diff --git a/.travis.yml b/.travis.yml index a1bbb8a884f..996e5ec07b5 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,4 +1,4 @@ -language: rust +language: minimal sudo: required dist: trusty services: @@ -20,7 +20,7 @@ matrix: - env: IMAGE=x86_64-gnu-cargotest - env: IMAGE=x86_64-gnu-debug - env: IMAGE=x86_64-gnu-nopt - - env: IMAGE=x86_64-gnu-rustbuild + - env: IMAGE=x86_64-gnu-make - env: IMAGE=x86_64-gnu-llvm-3.7 ALLOW_PR=1 RUST_BACKTRACE=1 - env: IMAGE=x86_64-musl @@ -39,7 +39,7 @@ matrix: install: brew install ccache - env: > RUST_CHECK_TARGET=check - RUST_CONFIGURE_ARGS=--target=x86_64-apple-darwin --enable-rustbuild + RUST_CONFIGURE_ARGS=--target=x86_64-apple-darwin --disable-rustbuild SRC=. os: osx install: brew install ccache @@ -51,17 +51,16 @@ matrix: install: brew install ccache script: - - if [ -z "$ALLOW_PR" ] && [ "$TRAVIS_BRANCH" != "auto" ]; then - echo skipping, not a full build; - elif [ -z "$ENABLE_AUTO" ] then - echo skipping, not quite ready yet - elif [ "$TRAVIS_OS_NAME" = "osx" ]; then - git submodule update --init; - src/ci/run.sh; - else - git submodule update --init; - src/ci/docker/run.sh $IMAGE; - fi + - > + if [ "$ALLOW_PR" = "" ] && [ "$TRAVIS_BRANCH" != "auto" ]; then + echo skipping, not a full build; + elif [ "$TRAVIS_OS_NAME" = "osx" ]; then + git submodule update --init; + src/ci/run.sh; + else + git submodule update --init; + src/ci/docker/run.sh $IMAGE; + fi # Save tagged docker images we created and load them if they're available before_cache: diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 4c0f93c3703..20a0bd2e256 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -86,13 +86,17 @@ benchmarks, generate documentation, install a fresh build of Rust, and more. It's your best friend when working on Rust, allowing you to compile & test your contributions before submission. -All the configuration for the build system lives in [the `mk` directory][mkdir] -in the project root. It can be hard to follow in places, as it uses some -advanced Make features which make for some challenging reading. If you have -questions on the build system internals, try asking in -[`#rust-internals`][pound-rust-internals]. +The build system lives in [the `src/bootstrap` directory][bootstrap] in the +project root. Our build system is itself written in Rust and is based on Cargo +to actually build all the compiler's crates. If you have questions on the build +system internals, try asking in [`#rust-internals`][pound-rust-internals]. -[mkdir]: https://github.com/rust-lang/rust/tree/master/mk/ +[bootstrap]: https://github.com/rust-lang/rust/tree/master/src/bootstrap/ + +> **Note**: the build system was recently rewritten from a jungle of makefiles +> to the current incarnation you'll see in `src/bootstrap`. If you experience +> bugs you can temporarily revert back to the makefiles with +> `--disable-rustbuild` passed to `./configure`. ### Configuration @@ -119,42 +123,111 @@ configuration used later in the build process. Some options to note: To see a full list of options, run `./configure --help`. -### Useful Targets +### Building -Some common make targets are: +Although the `./configure` script will generate a `Makefile`, this is actually +just a thin veneer over the actual build system driver, `x.py`. This file, at +the root of the repository, is used to build, test, and document various parts +of the compiler. You can execute it as: -- `make tips` - show useful targets, variables and other tips for working with - the build system. -- `make rustc-stage1` - build up to (and including) the first stage. For most - cases we don't need to build the stage2 compiler, so we can save time by not - building it. The stage1 compiler is a fully functioning compiler and - (probably) will be enough to determine if your change works as expected. -- `make $host/stage1/bin/rustc` - Where $host is a target triple like x86_64-unknown-linux-gnu. - This will build just rustc, without libstd. This is the fastest way to recompile after - you changed only rustc source code. Note however that the resulting rustc binary - won't have a stdlib to link against by default. You can build libstd once with - `make rustc-stage1`, rustc will pick it up afterwards. libstd is only guaranteed to - work if recompiled, so if there are any issues recompile it. -- `make check` - build the full compiler & run all tests (takes a while). This +```sh +python x.py build +``` + +On some systems you can also use the shorter version: + +```sh +./x.py build +``` + +To learn more about the driver and top-level targets, you can execute: + +```sh +python x.py --help +``` + +The general format for the driver script is: + +```sh +python x.py [] +``` + +Some example commands are `build`, `test`, and `doc`. These will build, test, +and document the specified directory. The second argument, ``, is +optional and defaults to working over the entire compiler. If specified, +however, only that specific directory will be built. For example: + +```sh +# build the entire compiler +python x.py build + +# build all documentation +python x.py doc + +# run all test suites +python x.py test + +# build only the standard library +python x.py build src/libstd + +# test only one particular test suite +python x.py test src/test/rustdoc + +# build only the stage0 libcore library +python x.py build src/libcore --stage 0 +``` + +You can explore the build system throught the various `--help` pages for each +subcommand. For example to learn more about a command you can run: + +``` +python x.py build --help +``` + +To learn about all possible rules you can execute, run: + +``` +python x.py build --help --verbose +``` + +### Useful commands + +Some common invocations of `x.py` are: + +- `x.py build --help` - show the help message and explain the subcommand +- `x.py build src/libtest --stage 1` - build up to (and including) the first + stage. For most cases we don't need to build the stage2 compiler, so we can + save time by not building it. The stage1 compiler is a fully functioning + compiler and (probably) will be enough to determine if your change works as + expected. +- `x.py build src/rustc --stage 1` - This will build just rustc, without libstd. + This is the fastest way to recompile after you changed only rustc source code. + Note however that the resulting rustc binary won't have a stdlib to link + against by default. You can build libstd once with `x.py build src/libstd`, + but it is is only guaranteed to work if recompiled, so if there are any issues + recompile it. +- `x.py test` - build the full compiler & run all tests (takes a while). This is what gets run by the continuous integration system against your pull request. You should run this before submitting to make sure your tests pass & everything builds in the correct manner. -- `make check-stage1-std NO_REBUILD=1` - test the standard library without - rebuilding the entire compiler -- `make check TESTNAME=` - Run a matching set of tests. +- `x.py test src/libstd --stage 1` - test the standard library without + recompiling stage 2. +- `x.py test src/test/run-pass --filter TESTNAME` - Run a matching set of tests. - `TESTNAME` should be a substring of the tests to match against e.g. it could be the fully qualified test name, or just a part of it. `TESTNAME=collections::hash::map::test_map::test_capacity_not_less_than_len` or `TESTNAME=test_capacity_not_less_than_len`. -- `make check-stage1-rpass TESTNAME=` - Run a single - rpass test with the stage1 compiler (this will be quicker than running the - command above as we only build the stage1 compiler, not the entire thing). - You can also leave off the `-rpass` to run all stage1 test types. -- `make check-stage1-coretest` - Run stage1 tests in `libcore`. -- `make tidy` - Check that the source code is in compliance with Rust's style - guidelines. There is no official document describing Rust's full guidelines - as of yet, but basic rules like 4 spaces for indentation and no more than 99 - characters in a single line should be kept in mind when writing code. +- `x.py test src/test/run-pass --stage 1 --filter ` - + Run a single rpass test with the stage1 compiler (this will be quicker than + running the command above as we only build the stage1 compiler, not the entire + thing). You can also leave off the directory argument to run all stage1 test + types. +- `x.py test src/libcore --stage 1` - Run stage1 tests in `libcore`. +- `x.py test src/tools/tidy` - Check that the source code is in compliance with + Rust's style guidelines. There is no official document describing Rust's full + guidelines as of yet, but basic rules like 4 spaces for indentation and no + more than 99 characters in a single line should be kept in mind when writing + code. ## Pull Requests @@ -172,19 +245,17 @@ amount of time you have to wait. You need to have built the compiler at least once before running these will work, but that’s only one full build rather than one each time. - $ make -j8 rustc-stage1 && make check-stage1 + $ python x.py test --stage 1 is one such example, which builds just `rustc`, and then runs the tests. If you’re adding something to the standard library, try - $ make -j8 check-stage1-std NO_REBUILD=1 - -This will not rebuild the compiler, but will run the tests. + $ python x.py test src/libstd --stage 1 Please make sure your pull request is in compliance with Rust's style guidelines by running - $ make tidy + $ python x.py test src/tools/tidy Make this check before every pull request (and every new commit in a pull request) ; you can add [git hooks](https://git-scm.com/book/en/v2/Customizing-Git-Git-Hooks) diff --git a/README.md b/README.md index 7360651095b..2133b17de0f 100644 --- a/README.md +++ b/README.md @@ -36,16 +36,14 @@ Read ["Installing Rust"] from [The Book]. ```sh $ ./configure - $ make && make install + $ make && sudo make install ``` - > ***Note:*** You may need to use `sudo make install` if you do not - > normally have permission to modify the destination directory. The - > install locations can be adjusted by passing a `--prefix` argument - > to `configure`. Various other options are also supported – pass + > ***Note:*** Install locations can be adjusted by passing a `--prefix` + > argument to `configure`. Various other options are also supported – pass > `--help` for more information on them. - When complete, `make install` will place several programs into + When complete, `sudo make install` will place several programs into `/usr/local/bin`: `rustc`, the Rust compiler, and `rustdoc`, the API-documentation tool. This install does not include [Cargo], Rust's package manager, which you may also want to build. @@ -108,30 +106,22 @@ MSVC builds of Rust additionally require an installation of Visual Studio 2013 (or later) so `rustc` can use its linker. Make sure to check the “C++ tools” option. -With these dependencies installed, the build takes two steps: +With these dependencies installed, you can build the compiler in a `cmd.exe` +shell with: ```sh -$ ./configure +> python x.py build +``` + +If you're running inside of an msys shell, however, you can run: + +```sh +$ ./configure --build=x86_64-pc-windows-msvc $ make && make install ``` -#### MSVC with rustbuild - -The old build system, based on makefiles, is currently being rewritten into a -Rust-based build system called rustbuild. This can be used to bootstrap the -compiler on MSVC without needing to install MSYS or MinGW. All you need are -[Python 2](https://www.python.org/downloads/), -[CMake](https://cmake.org/download/), and -[Git](https://git-scm.com/downloads) in your PATH (make sure you do not use the -ones from MSYS if you have it installed). You'll also need Visual Studio 2013 or -newer with the C++ tools. Then all you need to do is to kick off rustbuild. - -``` -python x.py build -``` - -Currently rustbuild only works with some known versions of Visual Studio. If you -have a more recent version installed that a part of rustbuild doesn't understand +Currently building Rust only works with some known versions of Visual Studio. If +you have a more recent version installed the build system doesn't understand then you may need to force rustbuild to use an older version. This can be done by manually calling the appropriate vcvars file before running the bootstrap. @@ -149,16 +139,6 @@ $ ./configure $ make docs ``` -Building the documentation requires building the compiler, so the above -details will apply. Once you have the compiler built, you can - -```sh -$ make docs NO_REBUILD=1 -``` - -To make sure you don’t re-build the compiler because you made a change -to some documentation. - The generated documentation will appear in a top-level `doc` directory, created by the `make` rule. diff --git a/appveyor.yml b/appveyor.yml index 686c48abb30..bf75439b74a 100644 --- a/appveyor.yml +++ b/appveyor.yml @@ -2,25 +2,22 @@ environment: matrix: # 32/64 bit MSVC - MSYS_BITS: 64 - TARGET: x86_64-pc-windows-msvc - CHECK: check - CONFIGURE_ARGS: --enable-llvm-assertions --enable-debug-assertions + RUST_CONFIGURE_ARGS: --build=x86_64-pc-windows-msvc + RUST_CHECK_TARGET: check - MSYS_BITS: 32 - TARGET: i686-pc-windows-msvc - CHECK: check - CONFIGURE_ARGS: --enable-llvm-assertions --enable-debug-assertions + RUST_CONFIGURE_ARGS: --build=i686-pc-windows-msvc + RUST_CHECK_TARGET: check - # MSVC rustbuild + # MSVC makefiles - MSYS_BITS: 64 - CONFIGURE_ARGS: --enable-rustbuild --enable-llvm-assertions --enable-debug-assertions - TARGET: x86_64-pc-windows-msvc - CHECK: check + RUST_CONFIGURE_ARGS: --build=x86_64-pc-windows-msvc --disable-rustbuild + RUST_CHECK_TARGET: check # MSVC cargotest - MSYS_BITS: 64 - CONFIGURE_ARGS: --enable-rustbuild --enable-llvm-assertions --enable-debug-assertions - TARGET: x86_64-pc-windows-msvc - CHECK: check-cargotest + NO_VENDOR: 1 + RUST_CHECK_TARGET: check-cargotest + RUST_CONFIGURE_ARGS: --build=x86_64-pc-windows-msvc # 32/64-bit MinGW builds. # @@ -47,24 +44,22 @@ environment: # *not* use debug assertions and llvm assertions. This is because they take # too long on appveyor and this is tested by rustbuild below. - MSYS_BITS: 32 - TARGET: i686-pc-windows-gnu - CHECK: check + RUST_CONFIGURE_ARGS: --build=i686-pc-windows-gnu + RUST_CHECK_TARGET: check MINGW_URL: https://s3.amazonaws.com/rust-lang-ci MINGW_ARCHIVE: i686-4.9.2-release-win32-dwarf-rt_v4-rev4.7z MINGW_DIR: mingw32 - MSYS_BITS: 32 - CONFIGURE_ARGS: --enable-rustbuild --enable-llvm-assertions --enable-debug-assertions - TARGET: i686-pc-windows-gnu - CHECK: check + RUST_CONFIGURE_ARGS: --build=i686-pc-windows-gnu --disable-rustbuild + RUST_CHECK_TARGET: check MINGW_URL: https://s3.amazonaws.com/rust-lang-ci MINGW_ARCHIVE: i686-4.9.2-release-win32-dwarf-rt_v4-rev4.7z MINGW_DIR: mingw32 - MSYS_BITS: 64 - CONFIGURE_ARGS: --enable-llvm-assertions --enable-debug-assertions - TARGET: x86_64-pc-windows-gnu - CHECK: check + RUST_CHECK_TARGET: check + RUST_CONFIGURE_ARGS: --build=x86_64-pc-windows-gnu MINGW_URL: https://s3.amazonaws.com/rust-lang-ci MINGW_ARCHIVE: x86_64-4.9.2-release-win32-seh-rt_v4-rev4.7z MINGW_DIR: mingw64 @@ -90,15 +85,20 @@ install: - if NOT defined MINGW_URL set PATH=C:\msys64\mingw%MSYS_BITS%\bin;C:\msys64\usr\bin;%PATH% test_script: - - sh ./configure - %CONFIGURE_ARGS% - --build=%TARGET% - - bash -c "make -j$(nproc)" - - bash -c "make %CHECK% -j$(nproc)" + - git submodule update --init + - set SRC=. + - set NO_CCACHE=1 + - sh src/ci/run.sh cache: - - build/%TARGET%/llvm -> src/rustllvm/llvm-auto-clean-trigger - - "%TARGET%/llvm -> src/rustllvm/llvm-auto-clean-trigger" + - "build/i686-pc-windows-gnu/llvm -> src/rustllvm/llvm-auto-clean-trigger" + - "build/x86_64-pc-windows-gnu/llvm -> src/rustllvm/llvm-auto-clean-trigger" + - "build/i686-pc-windows-msvc/llvm -> src/rustllvm/llvm-auto-clean-trigger" + - "build/x86_64-pc-windows-msvc/llvm -> src/rustllvm/llvm-auto-clean-trigger" + - "i686-pc-windows-gnu/llvm -> src/rustllvm/llvm-auto-clean-trigger" + - "x86_64-pc-windows-gnu/llvm -> src/rustllvm/llvm-auto-clean-trigger" + - "i686-pc-windows-msvc/llvm -> src/rustllvm/llvm-auto-clean-trigger" + - "x86_64-pc-windows-msvc/llvm -> src/rustllvm/llvm-auto-clean-trigger" branches: only: diff --git a/configure b/configure index 5311bf4b064..0a07e41fb97 100755 --- a/configure +++ b/configure @@ -631,7 +631,7 @@ opt stage0-landing-pads 1 "enable landing pads during bootstrap with stage0" opt dist-host-only 0 "only install bins for the host architecture" opt inject-std-version 1 "inject the current compiler version of libstd into programs" opt llvm-version-check 1 "check if the LLVM version is supported, build anyway" -opt rustbuild 0 "use the rust and cargo based build system" +opt rustbuild 1 "use the rust and cargo based build system" opt codegen-tests 1 "run the src/test/codegen tests" opt option-checking 1 "complain about unrecognized options in this configure script" opt ninja 0 "build LLVM using the Ninja generator (for MSVC, requires building in the correct environment)" @@ -664,11 +664,11 @@ valopt armv7-linux-androideabi-ndk "" "armv7-linux-androideabi NDK standalone pa valopt aarch64-linux-android-ndk "" "aarch64-linux-android NDK standalone path" valopt nacl-cross-path "" "NaCl SDK path (Pepper Canary is recommended). Must be absolute!" valopt musl-root "/usr/local" "MUSL root installation directory (deprecated)" -valopt musl-root-x86_64 "/usr/local" "x86_64-unknown-linux-musl install directory" -valopt musl-root-i686 "/usr/local" "i686-unknown-linux-musl install directory" -valopt musl-root-arm "/usr/local" "arm-unknown-linux-musleabi install directory" -valopt musl-root-armhf "/usr/local" "arm-unknown-linux-musleabihf install directory" -valopt musl-root-armv7 "/usr/local" "armv7-unknown-linux-musleabihf install directory" +valopt musl-root-x86_64 "" "x86_64-unknown-linux-musl install directory" +valopt musl-root-i686 "" "i686-unknown-linux-musl install directory" +valopt musl-root-arm "" "arm-unknown-linux-musleabi install directory" +valopt musl-root-armhf "" "arm-unknown-linux-musleabihf install directory" +valopt musl-root-armv7 "" "armv7-unknown-linux-musleabihf install directory" valopt extra-filename "" "Additional data that is hashed and passed to the -C extra-filename flag" if [ -e ${CFG_SRC_DIR}.git ] @@ -848,7 +848,10 @@ then fi # For building LLVM -probe_need CFG_CMAKE cmake +if [ -z "$CFG_LLVM_ROOT" ] +then + probe_need CFG_CMAKE cmake +fi # On MacOS X, invoking `javac` pops up a dialog if the JDK is not # installed. Since `javac` is only used if `antlr4` is available, @@ -1371,7 +1374,7 @@ then fi fi -if [ -z "$CFG_ENABLE_RUSTBUILD" ]; then +if [ -n "$CFG_DISABLE_RUSTBUILD" ]; then step_msg "making directories" @@ -1471,7 +1474,7 @@ fi step_msg "configuring submodules" # Have to be in the top of src directory for this -if [ -z $CFG_DISABLE_MANAGE_SUBMODULES ] && [ -z $CFG_ENABLE_RUSTBUILD ] +if [ -z "$CFG_DISABLE_MANAGE_SUBMODULES" ] && [ -n "$CFG_DISABLE_RUSTBUILD" ] then cd ${CFG_SRC_DIR} @@ -1543,11 +1546,11 @@ do ;; esac - if [ -n "$CFG_ENABLE_RUSTBUILD" ] + if [ -z "$CFG_DISABLE_RUSTBUILD" ] then msg "not configuring LLVM, rustbuild in use" do_reconfigure=0 - elif [ -z $CFG_LLVM_ROOT ] + elif [ -z "$CFG_LLVM_ROOT" ] then LLVM_BUILD_DIR=${CFG_BUILD_DIR}$t/llvm LLVM_INST_DIR=$LLVM_BUILD_DIR @@ -1868,7 +1871,7 @@ do putvar $CFG_LLVM_INST_DIR done -if [ -n "$CFG_ENABLE_RUSTBUILD" ] +if [ -z "$CFG_DISABLE_RUSTBUILD" ] then INPUT_MAKEFILE=src/bootstrap/mk/Makefile.in else @@ -1887,5 +1890,28 @@ else step_msg "complete" fi -msg "run \`make help\`" +if [ "$CFG_SRC_DIR" = `pwd` ]; then + X_PY=x.py +else + X_PY=${CFG_SRC_DIR_RELATIVE}x.py +fi + +if [ -z "$CFG_DISABLE_RUSTBUILD" ]; then + msg "NOTE you have now configured rust to use a rewritten build system" + msg " called rustbuild, and as a result this may have bugs that " + msg " you did not see before. If you experience any issues you can" + msg " go back to the old build system with --disable-rustbuild and" + msg " please feel free to report any bugs!" + msg "" + msg "run \`python ${X_PY} --help\`" +else + warn "the makefile-based build system is deprecated in favor of rustbuild" + msg "" + msg "It is recommended you avoid passing --disable-rustbuild to get your" + msg "build working as the makefiles will be deleted on 2017-02-02. If you" + msg "encounter bugs with rustbuild please file issues against rust-lang/rust" + msg "" + msg "run \`make help\`" +fi + msg diff --git a/mk/cfg/i686-unknown-openbsd.mk b/mk/cfg/i686-unknown-openbsd.mk new file mode 100644 index 00000000000..b839937c976 --- /dev/null +++ b/mk/cfg/i686-unknown-openbsd.mk @@ -0,0 +1,24 @@ +# i686-unknown-openbsd configuration +CC_i686-unknown-openbsd=$(CC) +CXX_i686-unknown-openbsd=$(CXX) +CPP_i686-unknown-openbsd=$(CPP) +AR_i686-unknown-openbsd=$(AR) +CFG_LIB_NAME_i686-unknown-openbsd=lib$(1).so +CFG_STATIC_LIB_NAME_i686-unknown-openbsd=lib$(1).a +CFG_LIB_GLOB_i686-unknown-openbsd=lib$(1)-*.so +CFG_LIB_DSYM_GLOB_i686-unknown-openbsd=$(1)-*.dylib.dSYM +CFG_JEMALLOC_CFLAGS_i686-unknown-openbsd := -m32 -I/usr/include $(CFLAGS) +CFG_GCCISH_CFLAGS_i686-unknown-openbsd := -g -fPIC -m32 -I/usr/include $(CFLAGS) +CFG_GCCISH_LINK_FLAGS_i686-unknown-openbsd := -shared -fPIC -g -pthread -m32 +CFG_GCCISH_DEF_FLAG_i686-unknown-openbsd := -Wl,--export-dynamic,--dynamic-list= +CFG_LLC_FLAGS_i686-unknown-openbsd := +CFG_INSTALL_NAME_i686-unknown-openbsd = +CFG_EXE_SUFFIX_i686-unknown-openbsd := +CFG_WINDOWSY_i686-unknown-openbsd := +CFG_UNIXY_i686-unknown-openbsd := 1 +CFG_LDPATH_i686-unknown-openbsd := +CFG_RUN_i686-unknown-openbsd=$(2) +CFG_RUN_TARG_i686-unknown-openbsd=$(call CFG_RUN_i686-unknown-openbsd,,$(2)) +CFG_GNU_TRIPLE_i686-unknown-openbsd := i686-unknown-openbsd +RUSTC_FLAGS_i686-unknown-openbsd=-C linker=$(call FIND_COMPILER,$(CC)) +CFG_DISABLE_JEMALLOC_i686-unknown-openbsd := 1 diff --git a/mk/crates.mk b/mk/crates.mk index 7ae5846c54b..79df941aeb3 100644 --- a/mk/crates.mk +++ b/mk/crates.mk @@ -52,7 +52,7 @@ TARGET_CRATES := libc std term \ getopts collections test rand \ compiler_builtins core alloc \ - rustc_unicode rustc_bitflags \ + std_unicode rustc_bitflags \ alloc_system alloc_jemalloc \ panic_abort panic_unwind unwind RUSTC_CRATES := rustc rustc_typeck rustc_mir rustc_borrowck rustc_resolve rustc_driver \ @@ -65,27 +65,23 @@ HOST_CRATES := syntax syntax_ext proc_macro_tokens proc_macro_plugin syntax_pos TOOLS := compiletest rustdoc rustc rustbook error_index_generator DEPS_core := -DEPS_compiler_builtins := core +DEPS_compiler_builtins := core native:compiler-rt DEPS_alloc := core libc alloc_system DEPS_alloc_system := core libc DEPS_alloc_jemalloc := core libc native:jemalloc -DEPS_collections := core alloc rustc_unicode +DEPS_collections := core alloc std_unicode DEPS_libc := core DEPS_rand := core DEPS_rustc_bitflags := core -DEPS_rustc_unicode := core +DEPS_std_unicode := core DEPS_panic_abort := libc alloc DEPS_panic_unwind := libc alloc unwind DEPS_unwind := libc RUSTFLAGS_compiler_builtins := -lstatic=compiler-rt +RUSTFLAGS_panic_abort := -C panic=abort -# FIXME(stage0): change this to just `RUSTFLAGS_panic_abort := ...` -RUSTFLAGS1_panic_abort := -C panic=abort -RUSTFLAGS2_panic_abort := -C panic=abort -RUSTFLAGS3_panic_abort := -C panic=abort - -DEPS_std := core libc rand alloc collections compiler_builtins rustc_unicode \ +DEPS_std := core libc rand alloc collections compiler_builtins std_unicode \ native:backtrace \ alloc_system panic_abort panic_unwind unwind DEPS_arena := std @@ -100,7 +96,7 @@ DEPS_serialize := std log DEPS_term := std DEPS_test := std getopts term native:rust_test_helpers -DEPS_syntax := std term serialize log arena libc rustc_bitflags rustc_unicode rustc_errors syntax_pos rustc_data_structures +DEPS_syntax := std term serialize log arena libc rustc_bitflags std_unicode rustc_errors syntax_pos rustc_data_structures DEPS_syntax_ext := syntax syntax_pos rustc_errors fmt_macros proc_macro DEPS_syntax_pos := serialize DEPS_proc_macro_tokens := syntax syntax_pos log @@ -140,7 +136,7 @@ DEPS_rustc_trans := arena flate getopts graphviz libc rustc rustc_back \ DEPS_rustc_incremental := rustc syntax_pos serialize rustc_data_structures DEPS_rustc_save_analysis := rustc log syntax syntax_pos serialize DEPS_rustc_typeck := rustc syntax syntax_pos rustc_platform_intrinsics rustc_const_math \ - rustc_const_eval rustc_errors + rustc_const_eval rustc_errors rustc_data_structures DEPS_rustdoc := rustc rustc_driver native:hoedown serialize getopts test \ rustc_lint rustc_const_eval syntax_pos rustc_data_structures @@ -162,7 +158,7 @@ ONLY_RLIB_libc := 1 ONLY_RLIB_alloc := 1 ONLY_RLIB_rand := 1 ONLY_RLIB_collections := 1 -ONLY_RLIB_rustc_unicode := 1 +ONLY_RLIB_std_unicode := 1 ONLY_RLIB_rustc_bitflags := 1 ONLY_RLIB_alloc_system := 1 ONLY_RLIB_alloc_jemalloc := 1 @@ -173,7 +169,7 @@ ONLY_RLIB_unwind := 1 TARGET_SPECIFIC_alloc_jemalloc := 1 # Documented-by-default crates -DOC_CRATES := std alloc collections core libc rustc_unicode +DOC_CRATES := std alloc collections core libc std_unicode ifeq ($(CFG_DISABLE_JEMALLOC),) RUSTFLAGS_rustc_back := --cfg 'feature="jemalloc"' diff --git a/mk/main.mk b/mk/main.mk index 9936c5b59be..49fdfc4118d 100644 --- a/mk/main.mk +++ b/mk/main.mk @@ -372,15 +372,12 @@ CFG_INFO := $(info cfg: disabling unstable features (CFG_DISABLE_UNSTABLE_FEATUR # Turn on feature-staging export CFG_DISABLE_UNSTABLE_FEATURES # Subvert unstable feature lints to do the self-build -export RUSTC_BOOTSTRAP=1 endif ifdef CFG_MUSL_ROOT export CFG_MUSL_ROOT endif -# FIXME: Transitionary measure to bootstrap using the old bootstrap logic. -# Remove this once the bootstrap compiler uses the new login in Issue #36548. -export RUSTC_BOOTSTRAP_KEY=62b3e239 +export RUSTC_BOOTSTRAP := 1 ###################################################################### # Per-stage targets and runner @@ -443,10 +440,7 @@ endif TSREQ$(1)_T_$(2)_H_$(3) = \ $$(HSREQ$(1)_H_$(3)) \ $$(foreach obj,$$(REQUIRED_OBJECTS_$(2)),\ - $$(TLIB$(1)_T_$(2)_H_$(3))/$$(obj)) \ - $$(TLIB0_T_$(2)_H_$(3))/$$(call CFG_STATIC_LIB_NAME_$(2),compiler-rt) -# ^ This copies `libcompiler-rt.a` to the stage0 sysroot -# ^ TODO(stage0) update this to not copy `libcompiler-rt.a` to stage0 + $$(TLIB$(1)_T_$(2)_H_$(3))/$$(obj)) # Prerequisites for a working stageN compiler and libraries, for a specific # target diff --git a/mk/tests.mk b/mk/tests.mk index 35ee7697a7a..345fc1679b0 100644 --- a/mk/tests.mk +++ b/mk/tests.mk @@ -15,7 +15,7 @@ # The names of crates that must be tested -# libcore/librustc_unicode tests are in a separate crate +# libcore/libstd_unicode tests are in a separate crate DEPS_coretest := $(eval $(call RUST_CRATE,coretest)) diff --git a/src/Cargo.lock b/src/Cargo.lock index ab1c1c453dd..4c6aeeddd38 100644 --- a/src/Cargo.lock +++ b/src/Cargo.lock @@ -45,7 +45,6 @@ dependencies = [ "gcc 0.3.38 (registry+https://github.com/rust-lang/crates.io-index)", "getopts 0.2.14 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.17 (registry+https://github.com/rust-lang/crates.io-index)", - "md5 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", "num_cpus 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)", "rustc-serialize 0.3.19 (registry+https://github.com/rust-lang/crates.io-index)", "toml 0.1.30 (registry+https://github.com/rust-lang/crates.io-index)", @@ -73,7 +72,7 @@ version = "0.0.0" dependencies = [ "alloc 0.0.0", "core 0.0.0", - "rustc_unicode 0.0.0", + "std_unicode 0.0.0", ] [[package]] @@ -172,11 +171,6 @@ name = "log" version = "0.3.6" source = "registry+https://github.com/rust-lang/crates.io-index" -[[package]] -name = "md5" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" - [[package]] name = "num_cpus" version = "0.2.13" @@ -409,6 +403,7 @@ version = "0.0.0" dependencies = [ "build_helper 0.1.0", "gcc 0.3.38 (registry+https://github.com/rust-lang/crates.io-index)", + "rustc_bitflags 0.0.0", ] [[package]] @@ -550,13 +545,6 @@ dependencies = [ "syntax_pos 0.0.0", ] -[[package]] -name = "rustc_unicode" -version = "0.0.0" -dependencies = [ - "core 0.0.0", -] - [[package]] name = "rustdoc" version = "0.0.0" @@ -604,7 +592,7 @@ dependencies = [ "panic_abort 0.0.0", "panic_unwind 0.0.0", "rand 0.0.0", - "rustc_unicode 0.0.0", + "std_unicode 0.0.0", "unwind 0.0.0", ] @@ -616,6 +604,13 @@ dependencies = [ "std 0.0.0", ] +[[package]] +name = "std_unicode" +version = "0.0.0" +dependencies = [ + "core 0.0.0", +] + [[package]] name = "syntax" version = "0.0.0" @@ -686,7 +681,6 @@ dependencies = [ "checksum getopts 0.2.14 (registry+https://github.com/rust-lang/crates.io-index)" = "d9047cfbd08a437050b363d35ef160452c5fe8ea5187ae0a624708c91581d685" "checksum libc 0.2.17 (registry+https://github.com/rust-lang/crates.io-index)" = "044d1360593a78f5c8e5e710beccdc24ab71d1f01bc19a29bcacdba22e8475d8" "checksum log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)" = "ab83497bf8bf4ed2a74259c1c802351fcd67a65baa86394b6ba73c36f4838054" -"checksum md5 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "a5539a8dee9b4ae308c9c406a379838b435a8f2c84cf9fedc6d5a576be9888db" "checksum num_cpus 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)" = "cee7e88156f3f9e19bdd598f8d6c9db7bf4078f99f8381f43a55b09648d1a6e3" "checksum rustc-serialize 0.3.19 (registry+https://github.com/rust-lang/crates.io-index)" = "6159e4e6e559c81bd706afe9c8fd68f547d3e851ce12e76b1de7914bab61691b" "checksum toml 0.1.30 (registry+https://github.com/rust-lang/crates.io-index)" = "0590d72182e50e879c4da3b11c6488dae18fccb1ae0c7a3eda18e16795844796" diff --git a/src/Cargo.toml b/src/Cargo.toml index dbd2f7743dc..8fb5c70c41b 100644 --- a/src/Cargo.toml +++ b/src/Cargo.toml @@ -11,3 +11,20 @@ members = [ "tools/rustbook", "tools/tidy", ] + +# Curiously, compiletest will segfault if compiled with opt-level=3 on 64-bit +# MSVC when running the compile-fail test suite when a should-fail test panics. +# But hey if this is removed and it gets past the bots, sounds good to me. +[profile.release] +opt-level = 2 +[profile.bench] +opt-level = 2 + +# These options are controlled from our rustc wrapper script, so turn them off +# here and have them controlled elsewhere. +[profile.dev] +debug = false +debug-assertions = false +[profile.test] +debug = false +debug-assertions = false diff --git a/src/bootstrap/Cargo.toml b/src/bootstrap/Cargo.toml index 4c9b578c134..35f8fb43f7b 100644 --- a/src/bootstrap/Cargo.toml +++ b/src/bootstrap/Cargo.toml @@ -29,4 +29,3 @@ getopts = "0.2" rustc-serialize = "0.3" gcc = "0.3.38" libc = "0.2" -md5 = "0.1" diff --git a/src/bootstrap/README.md b/src/bootstrap/README.md index f73f41ffae2..d0b501e4d89 100644 --- a/src/bootstrap/README.md +++ b/src/bootstrap/README.md @@ -32,7 +32,7 @@ The script accepts commands, flags, and filters to determine what to do: # build the whole compiler ./x.py build - # build the stage1 compier + # build the stage1 compiler ./x.py build --stage 1 # build stage0 libstd @@ -66,17 +66,6 @@ The script accepts commands, flags, and filters to determine what to do: * `doc` - a command for building documentation. Like above can take arguments for what to document. -If you're more used to `./configure` and `make`, however, then you can also -configure the build system to use rustbuild instead of the old makefiles: - -``` -./configure --enable-rustbuild -make -``` - -Afterwards the `Makefile` which is generated will have a few commands like -`make check`, `make tidy`, etc. - ## Configuring rustbuild There are currently two primary methods for configuring the rustbuild build @@ -90,6 +79,13 @@ be found at `src/bootstrap/config.toml.example`, and the configuration file can also be passed as `--config path/to/config.toml` if the build system is being invoked manually (via the python script). +Finally, rustbuild makes use of the [gcc-rs crate] which has [its own +method][env-vars] of configuring C compilers and C flags via environment +variables. + +[gcc-rs crate]: https://github.com/alexcrichton/gcc-rs +[env-vars]: https://github.com/alexcrichton/gcc-rs#external-configuration-via-environment-variables + ## Build stages The rustbuild build system goes through a few phases to actually build the @@ -273,16 +269,17 @@ After that, each module in rustbuild should have enough documentation to keep you up and running. Some general areas that you may be interested in modifying are: -* Adding a new build tool? Take a look at `build/step.rs` for examples of other - tools, as well as `build/mod.rs`. +* Adding a new build tool? Take a look at `bootstrap/step.rs` for examples of + other tools. * Adding a new compiler crate? Look no further! Adding crates can be done by adding a new directory with `Cargo.toml` followed by configuring all `Cargo.toml` files accordingly. * Adding a new dependency from crates.io? We're still working on that, so hold off on that for now. -* Adding a new configuration option? Take a look at `build/config.rs` or perhaps - `build/flags.rs` and then modify the build elsewhere to read that option. -* Adding a sanity check? Take a look at `build/sanity.rs`. +* Adding a new configuration option? Take a look at `bootstrap/config.rs` or + perhaps `bootstrap/flags.rs` and then modify the build elsewhere to read that + option. +* Adding a sanity check? Take a look at `bootstrap/sanity.rs`. If you have any questions feel free to reach out on `#rust-internals` on IRC or open an issue in the bug tracker! diff --git a/src/bootstrap/bin/rustc.rs b/src/bootstrap/bin/rustc.rs index 879eca60cc7..2f674a311fe 100644 --- a/src/bootstrap/bin/rustc.rs +++ b/src/bootstrap/bin/rustc.rs @@ -125,6 +125,11 @@ fn main() { cmd.arg("-C").arg(format!("codegen-units={}", s)); } + // Emit save-analysis info. + if env::var("RUSTC_SAVE_ANALYSIS") == Ok("api".to_string()) { + cmd.arg("-Zsave-analysis-api"); + } + // Dealing with rpath here is a little special, so let's go into some // detail. First off, `-rpath` is a linker option on Unix platforms // which adds to the runtime dynamic loader path when looking for diff --git a/src/bootstrap/bootstrap.py b/src/bootstrap/bootstrap.py index a3fabbb3e80..0dda7f12007 100644 --- a/src/bootstrap/bootstrap.py +++ b/src/bootstrap/bootstrap.py @@ -30,32 +30,37 @@ def get(url, path, verbose=False): sha_path = sha_file.name try: - download(sha_path, sha_url, verbose) + download(sha_path, sha_url, False, verbose) if os.path.exists(path): if verify(path, sha_path, False): - print("using already-download file " + path) + if verbose: + print("using already-download file " + path) return else: - print("ignoring already-download file " + path + " due to failed verification") + if verbose: + print("ignoring already-download file " + path + " due to failed verification") os.unlink(path) - download(temp_path, url, verbose) - if not verify(temp_path, sha_path, True): + download(temp_path, url, True, verbose) + if not verify(temp_path, sha_path, verbose): raise RuntimeError("failed verification") - print("moving {} to {}".format(temp_path, path)) + if verbose: + print("moving {} to {}".format(temp_path, path)) shutil.move(temp_path, path) finally: - delete_if_present(sha_path) - delete_if_present(temp_path) + delete_if_present(sha_path, verbose) + delete_if_present(temp_path, verbose) -def delete_if_present(path): +def delete_if_present(path, verbose): if os.path.isfile(path): - print("removing " + path) + if verbose: + print("removing " + path) os.unlink(path) -def download(path, url, verbose): - print("downloading {} to {}".format(url, path)) +def download(path, url, probably_big, verbose): + if probably_big or verbose: + print("downloading {}".format(url)) # see http://serverfault.com/questions/301128/how-to-download if sys.platform == 'win32': run(["PowerShell.exe", "/nologo", "-Command", @@ -63,17 +68,22 @@ def download(path, url, verbose): ".DownloadFile('{}', '{}')".format(url, path)], verbose=verbose) else: - run(["curl", "-o", path, url], verbose=verbose) + if probably_big or verbose: + option = "-#" + else: + option = "-s" + run(["curl", option, "-Sf", "-o", path, url], verbose=verbose) def verify(path, sha_path, verbose): - print("verifying " + path) + if verbose: + print("verifying " + path) with open(path, "rb") as f: found = hashlib.sha256(f.read()).hexdigest() with open(sha_path, "r") as f: expected, _ = f.readline().split() verified = found == expected - if not verified and verbose: + if not verified: print("invalid checksum:\n" " found: {}\n" " expected: {}".format(found, expected)) @@ -144,6 +154,7 @@ class RustBuild(object): if self.rustc().startswith(self.bin_root()) and \ (not os.path.exists(self.rustc()) or self.rustc_out_of_date()): + self.print_what_it_means_to_bootstrap() if os.path.exists(self.bin_root()): shutil.rmtree(self.bin_root()) channel = self.stage0_rustc_channel() @@ -167,6 +178,7 @@ class RustBuild(object): if self.cargo().startswith(self.bin_root()) and \ (not os.path.exists(self.cargo()) or self.cargo_out_of_date()): + self.print_what_it_means_to_bootstrap() channel = self.stage0_cargo_channel() filename = "cargo-{}-{}.tar.gz".format(channel, self.build) url = "https://static.rust-lang.org/cargo-dist/" + self.stage0_cargo_date() @@ -251,7 +263,27 @@ class RustBuild(object): else: return '' + def print_what_it_means_to_bootstrap(self): + if hasattr(self, 'printed'): + return + self.printed = True + if os.path.exists(self.bootstrap_binary()): + return + if not '--help' in sys.argv or len(sys.argv) == 1: + return + + print('info: the build system for Rust is written in Rust, so this') + print(' script is now going to download a stage0 rust compiler') + print(' and then compile the build system itself') + print('') + print('info: in the meantime you can read more about rustbuild at') + print(' src/bootstrap/README.md before the download finishes') + + def bootstrap_binary(self): + return os.path.join(self.build_dir, "bootstrap/debug/bootstrap") + def build_bootstrap(self): + self.print_what_it_means_to_bootstrap() build_dir = os.path.join(self.build_dir, "bootstrap") if self.clean and os.path.exists(build_dir): shutil.rmtree(build_dir) @@ -408,22 +440,31 @@ def main(): rb.use_vendored_sources = '\nvendor = true' in rb.config_toml or \ 'CFG_ENABLE_VENDOR' in rb.config_mk + if 'SUDO_USER' in os.environ: + if os.environ['USER'] != os.environ['SUDO_USER']: + rb.use_vendored_sources = True + print('info: looks like you are running this command under `sudo`') + print(' and so in order to preserve your $HOME this will now') + print(' use vendored sources by default. Note that if this') + print(' does not work you should run a normal build first') + print(' before running a command like `sudo make intall`') + if rb.use_vendored_sources: if not os.path.exists('.cargo'): os.makedirs('.cargo') - f = open('.cargo/config','w') - f.write(""" - [source.crates-io] - replace-with = 'vendored-sources' - registry = 'https://example.com' + with open('.cargo/config','w') as f: + f.write(""" + [source.crates-io] + replace-with = 'vendored-sources' + registry = 'https://example.com' - [source.vendored-sources] - directory = '{}/src/vendor' - """.format(rb.rust_root)) - f.close() + [source.vendored-sources] + directory = '{}/src/vendor' + """.format(rb.rust_root)) else: if os.path.exists('.cargo'): shutil.rmtree('.cargo') + data = stage0_data(rb.rust_root) rb._rustc_channel, rb._rustc_date = data['rustc'].split('-', 1) rb._cargo_channel, rb._cargo_date = data['cargo'].split('-', 1) @@ -438,7 +479,7 @@ def main(): sys.stdout.flush() # Run the bootstrap - args = [os.path.join(rb.build_dir, "bootstrap/debug/bootstrap")] + args = [rb.bootstrap_binary()] args.extend(sys.argv[1:]) env = os.environ.copy() env["BUILD"] = rb.build diff --git a/src/bootstrap/cc.rs b/src/bootstrap/cc.rs index e2bde4a6586..aa70e24d952 100644 --- a/src/bootstrap/cc.rs +++ b/src/bootstrap/cc.rs @@ -51,7 +51,7 @@ pub fn find(build: &mut Build) { if let Some(cc) = config.and_then(|c| c.cc.as_ref()) { cfg.compiler(cc); } else { - set_compiler(&mut cfg, "gcc", target, config); + set_compiler(&mut cfg, "gcc", target, config, build); } let compiler = cfg.get_compiler(); @@ -72,7 +72,7 @@ pub fn find(build: &mut Build) { if let Some(cxx) = config.and_then(|c| c.cxx.as_ref()) { cfg.compiler(cxx); } else { - set_compiler(&mut cfg, "g++", host, config); + set_compiler(&mut cfg, "g++", host, config, build); } let compiler = cfg.get_compiler(); build.verbose(&format!("CXX_{} = {:?}", host, compiler.path())); @@ -83,7 +83,8 @@ pub fn find(build: &mut Build) { fn set_compiler(cfg: &mut gcc::Config, gnu_compiler: &str, target: &str, - config: Option<&Target>) { + config: Option<&Target>, + build: &Build) { match target { // When compiling for android we may have the NDK configured in the // config.toml in which case we look there. Otherwise the default @@ -119,6 +120,22 @@ fn set_compiler(cfg: &mut gcc::Config, } } + "mips-unknown-linux-musl" => { + cfg.compiler("mips-linux-musl-gcc"); + } + "mipsel-unknown-linux-musl" => { + cfg.compiler("mipsel-linux-musl-gcc"); + } + + t if t.contains("musl") => { + if let Some(root) = build.musl_root(target) { + let guess = root.join("bin/musl-gcc"); + if guess.exists() { + cfg.compiler(guess); + } + } + } + _ => {} } } diff --git a/src/bootstrap/channel.rs b/src/bootstrap/channel.rs index 879c383404a..b2341f59787 100644 --- a/src/bootstrap/channel.rs +++ b/src/bootstrap/channel.rs @@ -20,7 +20,6 @@ use std::io::prelude::*; use std::process::Command; use build_helper::output; -use md5; use Build; @@ -91,20 +90,4 @@ pub fn collect(build: &mut Build) { build.ver_hash = Some(ver_hash); build.short_ver_hash = Some(short_ver_hash); } - - // Calculate this compiler's bootstrap key, which is currently defined as - // the first 8 characters of the md5 of the release string. - let key = md5::compute(build.release.as_bytes()); - build.bootstrap_key = format!("{:02x}{:02x}{:02x}{:02x}", - key[0], key[1], key[2], key[3]); - - // Slurp up the stage0 bootstrap key as we're bootstrapping from an - // otherwise stable compiler. - let mut s = String::new(); - t!(t!(File::open(build.src.join("src/stage0.txt"))).read_to_string(&mut s)); - if let Some(line) = s.lines().find(|l| l.starts_with("rustc_key")) { - if let Some(key) = line.split(": ").nth(1) { - build.bootstrap_key_stage0 = key.to_string(); - } - } } diff --git a/src/bootstrap/check.rs b/src/bootstrap/check.rs index ac6be2a870b..e0798860275 100644 --- a/src/bootstrap/check.rs +++ b/src/bootstrap/check.rs @@ -8,13 +8,14 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -//! Implementation of the various `check-*` targets of the build system. +//! Implementation of the test-related targets of the build system. //! //! This file implements the various regression test suites that we execute on //! our CI. use std::collections::HashSet; use std::env; +use std::fmt; use std::fs; use std::path::{PathBuf, Path}; use std::process::Command; @@ -22,10 +23,39 @@ use std::process::Command; use build_helper::output; use {Build, Compiler, Mode}; +use dist; use util::{self, dylib_path, dylib_path_var}; const ADB_TEST_DIR: &'static str = "/data/tmp"; +/// The two modes of the test runner; tests or benchmarks. +#[derive(Copy, Clone)] +pub enum TestKind { + /// Run `cargo test` + Test, + /// Run `cargo bench` + Bench, +} + +impl TestKind { + // Return the cargo subcommand for this test kind + fn subcommand(self) -> &'static str { + match self { + TestKind::Test => "test", + TestKind::Bench => "bench", + } + } +} + +impl fmt::Display for TestKind { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + f.write_str(match *self { + TestKind::Test => "Testing", + TestKind::Bench => "Benchmarking", + }) + } +} + /// Runs the `linkchecker` tool as compiled in `stage` by the `host` compiler. /// /// This tool in `src/tools` will verify the validity of all our links in the @@ -33,6 +63,8 @@ const ADB_TEST_DIR: &'static str = "/data/tmp"; pub fn linkcheck(build: &Build, stage: u32, host: &str) { println!("Linkcheck stage{} ({})", stage, host); let compiler = Compiler::new(stage, host); + + let _time = util::timeit(); build.run(build.tool_cmd(&compiler, "linkchecker") .arg(build.out.join(host).join("doc"))); } @@ -58,6 +90,7 @@ pub fn cargotest(build: &Build, stage: u32, host: &str) { let out_dir = build.out.join("ct"); t!(fs::create_dir_all(&out_dir)); + let _time = util::timeit(); build.run(build.tool_cmd(compiler, "cargotest") .env("PATH", newpath) .arg(&build.cargo) @@ -90,7 +123,8 @@ pub fn compiletest(build: &Build, target: &str, mode: &str, suite: &str) { - println!("Check compiletest {} ({} -> {})", suite, compiler.host, target); + println!("Check compiletest suite={} mode={} ({} -> {})", + suite, mode, compiler.host, target); let mut cmd = build.tool_cmd(compiler, "compiletest"); // compiletest currently has... a lot of arguments, so let's just pass all @@ -184,6 +218,9 @@ pub fn compiletest(build: &Build, // Running a C compiler on MSVC requires a few env vars to be set, to be // sure to set them here. + // + // Note that if we encounter `PATH` we make sure to append to our own `PATH` + // rather than stomp over it. if target.contains("msvc") { for &(ref k, ref v) in build.cc[target].0.env() { if k != "PATH" { @@ -191,7 +228,8 @@ pub fn compiletest(build: &Build, } } } - build.add_bootstrap_key(&mut cmd); + cmd.env("RUSTC_BOOTSTRAP", "1"); + build.add_rust_test_threads(&mut cmd); cmd.arg("--adb-path").arg("adb"); cmd.arg("--adb-test-dir").arg(ADB_TEST_DIR); @@ -203,6 +241,7 @@ pub fn compiletest(build: &Build, cmd.arg("--android-cross-path").arg(""); } + let _time = util::timeit(); build.run(&mut cmd); } @@ -215,6 +254,7 @@ pub fn docs(build: &Build, compiler: &Compiler) { // Do a breadth-first traversal of the `src/doc` directory and just run // tests for all files that end in `*.md` let mut stack = vec![build.src.join("src/doc")]; + let _time = util::timeit(); while let Some(p) = stack.pop() { if p.is_dir() { @@ -243,6 +283,8 @@ pub fn error_index(build: &Build, compiler: &Compiler) { let dir = testdir(build, compiler.host); t!(fs::create_dir_all(&dir)); let output = dir.join("error-index.md"); + + let _time = util::timeit(); build.run(build.tool_cmd(compiler, "error_index_generator") .arg("markdown") .arg(&output) @@ -254,6 +296,7 @@ pub fn error_index(build: &Build, compiler: &Compiler) { fn markdown_test(build: &Build, compiler: &Compiler, markdown: &Path) { let mut cmd = Command::new(build.rustdoc(compiler)); build.add_rustc_lib_path(compiler, &mut cmd); + build.add_rust_test_threads(&mut cmd); cmd.arg("--test"); cmd.arg(markdown); @@ -278,6 +321,7 @@ pub fn krate(build: &Build, compiler: &Compiler, target: &str, mode: Mode, + test_kind: TestKind, krate: Option<&str>) { let (name, path, features, root) = match mode { Mode::Libstd => { @@ -291,7 +335,7 @@ pub fn krate(build: &Build, } _ => panic!("can only test libraries"), }; - println!("Testing {} stage{} ({} -> {})", name, compiler.stage, + println!("{} {} stage{} ({} -> {})", test_kind, name, compiler.stage, compiler.host, target); // Build up the base `cargo test` command. @@ -299,7 +343,7 @@ pub fn krate(build: &Build, // Pass in some standard flags then iterate over the graph we've discovered // in `cargo metadata` with the maps above and figure out what `-p` // arguments need to get passed. - let mut cargo = build.cargo(compiler, mode, target, "test"); + let mut cargo = build.cargo(compiler, mode, target, test_kind.subcommand()); cargo.arg("--manifest-path") .arg(build.src.join(path).join("Cargo.toml")) .arg("--features").arg(features); @@ -336,16 +380,25 @@ pub fn krate(build: &Build, dylib_path.insert(0, build.sysroot_libdir(compiler, target)); cargo.env(dylib_path_var(), env::join_paths(&dylib_path).unwrap()); + if target.contains("android") { + cargo.arg("--no-run"); + } else if target.contains("emscripten") { + cargo.arg("--no-run"); + } + + cargo.arg("--"); + if build.config.quiet_tests { - cargo.arg("--"); cargo.arg("--quiet"); } + let _time = util::timeit(); + if target.contains("android") { - build.run(cargo.arg("--no-run")); + build.run(&mut cargo); krate_android(build, compiler, target, mode); } else if target.contains("emscripten") { - build.run(cargo.arg("--no-run")); + build.run(&mut cargo); krate_emscripten(build, compiler, target, mode); } else { cargo.args(&build.flags.cmd.test_args()); @@ -372,14 +425,17 @@ fn krate_android(build: &Build, target, compiler.host, test_file_name); + let quiet = if build.config.quiet_tests { "--quiet" } else { "" }; let program = format!("(cd {dir}; \ LD_LIBRARY_PATH=./{target} ./{test} \ --logfile {log} \ + {quiet} \ {args})", dir = ADB_TEST_DIR, target = target, test = test_file_name, log = log, + quiet = quiet, args = build.flags.cmd.test_args().join(" ")); let output = output(Command::new("adb").arg("shell").arg(&program)); @@ -408,18 +464,12 @@ fn krate_emscripten(build: &Build, let test_file_name = test.to_string_lossy().into_owned(); println!("running {}", test_file_name); let nodejs = build.config.nodejs.as_ref().expect("nodejs not configured"); - let status = Command::new(nodejs) - .arg(&test_file_name) - .stderr(::std::process::Stdio::inherit()) - .status(); - match status { - Ok(status) => { - if !status.success() { - panic!("some tests failed"); - } - } - Err(e) => panic!(format!("failed to execute command: {}", e)), - }; + let mut cmd = Command::new(nodejs); + cmd.arg(&test_file_name); + if build.config.quiet_tests { + cmd.arg("--quiet"); + } + build.run(&mut cmd); } } @@ -467,3 +517,32 @@ pub fn android_copy_libs(build: &Build, } } } + +/// Run "distcheck", a 'make check' from a tarball +pub fn distcheck(build: &Build) { + if build.config.build != "x86_64-unknown-linux-gnu" { + return + } + if !build.config.host.iter().any(|s| s == "x86_64-unknown-linux-gnu") { + return + } + if !build.config.target.iter().any(|s| s == "x86_64-unknown-linux-gnu") { + return + } + + let dir = build.out.join("tmp").join("distcheck"); + let _ = fs::remove_dir_all(&dir); + t!(fs::create_dir_all(&dir)); + + let mut cmd = Command::new("tar"); + cmd.arg("-xzf") + .arg(dist::rust_src_location(build)) + .arg("--strip-components=1") + .current_dir(&dir); + build.run(&mut cmd); + build.run(Command::new("./configure") + .current_dir(&dir)); + build.run(Command::new("make") + .arg("check") + .current_dir(&dir)); +} diff --git a/src/bootstrap/clean.rs b/src/bootstrap/clean.rs index 75bcbfee6ee..e7655458aed 100644 --- a/src/bootstrap/clean.rs +++ b/src/bootstrap/clean.rs @@ -46,6 +46,9 @@ fn rm_rf(build: &Build, path: &Path) { if !path.exists() { return } + if path.is_file() { + return do_op(path, "remove file", |p| fs::remove_file(p)); + } for file in t!(fs::read_dir(path)) { let file = t!(file).path(); diff --git a/src/bootstrap/compile.rs b/src/bootstrap/compile.rs index 236989dbcfe..b268686ca6c 100644 --- a/src/bootstrap/compile.rs +++ b/src/bootstrap/compile.rs @@ -120,8 +120,8 @@ fn build_startup_objects(build: &Build, target: &str, into: &Path) { for file in t!(fs::read_dir(build.src.join("src/rtstartup"))) { let file = t!(file); let mut cmd = Command::new(&compiler_path); - build.add_bootstrap_key(&mut cmd); - build.run(cmd.arg("--target").arg(target) + build.run(cmd.env("RUSTC_BOOTSTRAP", "1") + .arg("--target").arg(target) .arg("--emit=obj") .arg("--out-dir").arg(into) .arg(file.path())); diff --git a/src/bootstrap/dist.rs b/src/bootstrap/dist.rs index d603455122e..1d3445a9eac 100644 --- a/src/bootstrap/dist.rs +++ b/src/bootstrap/dist.rs @@ -23,7 +23,7 @@ use std::io::Write; use std::path::{PathBuf, Path}; use std::process::Command; -use {Build, Compiler}; +use {Build, Compiler, Mode}; use util::{cp_r, libdir, is_dylib, cp_filtered, copy}; pub fn package_vers(build: &Build) -> &str { @@ -284,6 +284,55 @@ pub fn std(build: &Build, compiler: &Compiler, target: &str) { t!(fs::remove_dir_all(&image)); } +pub fn rust_src_location(build: &Build) -> PathBuf { + let plain_name = format!("rustc-{}-src", package_vers(build)); + distdir(build).join(&format!("{}.tar.gz", plain_name)) +} + +/// Creates a tarball of save-analysis metadata, if available. +pub fn analysis(build: &Build, compiler: &Compiler, target: &str) { + println!("Dist analysis"); + + if build.config.channel != "nightly" { + println!("Skipping dist-analysis - not on nightly channel"); + return; + } + if compiler.stage != 2 { + return + } + + let name = format!("rust-analysis-{}", package_vers(build)); + let image = tmpdir(build).join(format!("{}-{}-image", name, target)); + + let src = build.stage_out(compiler, Mode::Libstd).join(target).join("release").join("deps"); + + let image_src = src.join("save-analysis"); + let dst = image.join("lib/rustlib").join(target).join("analysis"); + t!(fs::create_dir_all(&dst)); + cp_r(&image_src, &dst); + + let mut cmd = Command::new("sh"); + cmd.arg(sanitize_sh(&build.src.join("src/rust-installer/gen-installer.sh"))) + .arg("--product-name=Rust") + .arg("--rel-manifest-dir=rustlib") + .arg("--success-message=save-analysis-saved.") + .arg(format!("--image-dir={}", sanitize_sh(&image))) + .arg(format!("--work-dir={}", sanitize_sh(&tmpdir(build)))) + .arg(format!("--output-dir={}", sanitize_sh(&distdir(build)))) + .arg(format!("--package-name={}-{}", name, target)) + .arg(format!("--component-name=rust-analysis-{}", target)) + .arg("--legacy-manifest-dirs=rustlib,cargo"); + build.run(&mut cmd); + t!(fs::remove_dir_all(&image)); + + // Create plain source tarball + let mut cmd = Command::new("tar"); + cmd.arg("-czf").arg(sanitize_sh(&distdir(build).join(&format!("{}.tar.gz", name)))) + .arg("analysis") + .current_dir(&src); + build.run(&mut cmd); +} + /// Creates the `rust-src` installer component and the plain source tarball pub fn rust_src(build: &Build) { println!("Dist src"); @@ -374,7 +423,7 @@ pub fn rust_src(build: &Build) { // Create plain source tarball let mut cmd = Command::new("tar"); - cmd.arg("-czf").arg(sanitize_sh(&distdir(build).join(&format!("{}.tar.gz", plain_name)))) + cmd.arg("-czf").arg(sanitize_sh(&rust_src_location(build))) .arg(&plain_name) .current_dir(&dst); build.run(&mut cmd); diff --git a/src/bootstrap/flags.rs b/src/bootstrap/flags.rs index d7516954f12..7a2d56fc5d3 100644 --- a/src/bootstrap/flags.rs +++ b/src/bootstrap/flags.rs @@ -49,6 +49,10 @@ pub enum Subcommand { paths: Vec, test_args: Vec, }, + Bench { + paths: Vec, + test_args: Vec, + }, Clean, Dist { install: bool, @@ -141,6 +145,7 @@ Arguments: command == "dist" || command == "doc" || command == "test" || + command == "bench" || command == "clean" { println!("Available invocations:"); if args.iter().any(|a| a == "-v") { @@ -163,6 +168,7 @@ println!("\ Subcommands: build Compile either the compiler or libraries test Build and run some test suites + bench Build and run some benchmarks doc Build documentation clean Clean out build directories dist Build and/or install distribution artifacts @@ -210,6 +216,14 @@ To learn more about a subcommand, run `./x.py -h` test_args: m.opt_strs("test-args"), } } + "bench" => { + opts.optmulti("", "test-args", "extra arguments", "ARGS"); + m = parse(&opts); + Subcommand::Bench { + paths: remaining_as_path(&m), + test_args: m.opt_strs("test-args"), + } + } "clean" => { m = parse(&opts); if m.free.len() > 0 { @@ -225,6 +239,7 @@ To learn more about a subcommand, run `./x.py -h` install: m.opt_present("install"), } } + "--help" => usage(0, &opts), cmd => { println!("unknown command: {}", cmd); usage(1, &opts); @@ -259,7 +274,8 @@ To learn more about a subcommand, run `./x.py -h` impl Subcommand { pub fn test_args(&self) -> Vec<&str> { match *self { - Subcommand::Test { ref test_args, .. } => { + Subcommand::Test { ref test_args, .. } | + Subcommand::Bench { ref test_args, .. } => { test_args.iter().flat_map(|s| s.split_whitespace()).collect() } _ => Vec::new(), diff --git a/src/bootstrap/job.rs b/src/bootstrap/job.rs index b4d7aff97da..c3859275e6f 100644 --- a/src/bootstrap/job.rs +++ b/src/bootstrap/job.rs @@ -51,6 +51,7 @@ type LPVOID = *mut u8; type JOBOBJECTINFOCLASS = i32; type SIZE_T = usize; type LARGE_INTEGER = i64; +type UINT = u32; type ULONG_PTR = usize; type ULONGLONG = u64; @@ -59,6 +60,8 @@ const DUPLICATE_SAME_ACCESS: DWORD = 0x2; const PROCESS_DUP_HANDLE: DWORD = 0x40; const JobObjectExtendedLimitInformation: JOBOBJECTINFOCLASS = 9; const JOB_OBJECT_LIMIT_KILL_ON_JOB_CLOSE: DWORD = 0x2000; +const SEM_FAILCRITICALERRORS: UINT = 0x0001; +const SEM_NOGPFAULTERRORBOX: UINT = 0x0002; extern "system" { fn CreateJobObjectW(lpJobAttributes: *mut u8, lpName: *const u8) -> HANDLE; @@ -79,6 +82,7 @@ extern "system" { JobObjectInformationClass: JOBOBJECTINFOCLASS, lpJobObjectInformation: LPVOID, cbJobObjectInformationLength: DWORD) -> BOOL; + fn SetErrorMode(mode: UINT) -> UINT; } #[repr(C)] @@ -115,6 +119,13 @@ struct JOBOBJECT_BASIC_LIMIT_INFORMATION { } pub unsafe fn setup() { + // Tell Windows to not show any UI on errors (such as not finding a required dll + // during startup or terminating abnormally). This is important for running tests, + // since some of them use abnormal termination by design. + // This mode is inherited by all child processes. + let mode = SetErrorMode(SEM_NOGPFAULTERRORBOX); // read inherited flags + SetErrorMode(mode | SEM_FAILCRITICALERRORS | SEM_NOGPFAULTERRORBOX); + // Create a new job object for us to use let job = CreateJobObjectW(0 as *mut _, 0 as *const _); assert!(job != 0 as *mut _, "{}", io::Error::last_os_error()); diff --git a/src/bootstrap/lib.rs b/src/bootstrap/lib.rs index 828e82d3832..cd80c4298dc 100644 --- a/src/bootstrap/lib.rs +++ b/src/bootstrap/lib.rs @@ -13,22 +13,69 @@ //! This module, and its descendants, are the implementation of the Rust build //! system. Most of this build system is backed by Cargo but the outer layer //! here serves as the ability to orchestrate calling Cargo, sequencing Cargo -//! builds, building artifacts like LLVM, etc. +//! builds, building artifacts like LLVM, etc. The goals of rustbuild are: //! -//! More documentation can be found in each respective module below. +//! * To be an easily understandable, easily extensible, and maintainable build +//! system. +//! * Leverage standard tools in the Rust ecosystem to build the compiler, aka +//! crates.io and Cargo. +//! * A standard interface to build across all platforms, including MSVC +//! +//! ## Architecture +//! +//! Although this build system defers most of the complicated logic to Cargo +//! itself, it still needs to maintain a list of targets and dependencies which +//! it can itself perform. Rustbuild is made up of a list of rules with +//! dependencies amongst them (created in the `step` module) and then knows how +//! to execute each in sequence. Each time rustbuild is invoked, it will simply +//! iterate through this list of steps and execute each serially in turn. For +//! each step rustbuild relies on the step internally being incremental and +//! parallel. Note, though, that the `-j` parameter to rustbuild gets forwarded +//! to appropriate test harnesses and such. +//! +//! Most of the "meaty" steps that matter are backed by Cargo, which does indeed +//! have its own parallelism and incremental management. Later steps, like +//! tests, aren't incremental and simply run the entire suite currently. +//! +//! When you execute `x.py build`, the steps which are executed are: +//! +//! * First, the python script is run. This will automatically download the +//! stage0 rustc and cargo according to `src/stage0.txt`, or using the cached +//! versions if they're available. These are then used to compile rustbuild +//! itself (using Cargo). Finally, control is then transferred to rustbuild. +//! +//! * Rustbuild takes over, performs sanity checks, probes the environment, +//! reads configuration, builds up a list of steps, and then starts executing +//! them. +//! +//! * The stage0 libstd is compiled +//! * The stage0 libtest is compiled +//! * The stage0 librustc is compiled +//! * The stage1 compiler is assembled +//! * The stage1 libstd, libtest, librustc are compiled +//! * The stage2 compiler is assembled +//! * The stage2 libstd, libtest, librustc are compiled +//! +//! Each step is driven by a separate Cargo project and rustbuild orchestrates +//! copying files between steps and otherwise preparing for Cargo to run. +//! +//! ## Further information +//! +//! More documentation can be found in each respective module below, and you can +//! also check out the `src/bootstrap/README.md` file for more information. extern crate build_helper; extern crate cmake; extern crate filetime; extern crate gcc; extern crate getopts; -extern crate md5; extern crate num_cpus; extern crate rustc_serialize; extern crate toml; use std::collections::HashMap; use std::env; +use std::ffi::OsString; use std::fs::{self, File}; use std::path::{Component, PathBuf, Path}; use std::process::Command; @@ -120,8 +167,6 @@ pub struct Build { version: String, package_vers: String, local_rebuild: bool, - bootstrap_key: String, - bootstrap_key_stage0: String, // Probed tools at runtime lldb_version: Option, @@ -131,6 +176,7 @@ pub struct Build { cc: HashMap)>, cxx: HashMap, crates: HashMap, + is_sudo: bool, } #[derive(Debug)] @@ -141,6 +187,7 @@ struct Crate { doc_step: String, build_step: String, test_step: String, + bench_step: String, } /// The various "modes" of invoking Cargo. @@ -189,6 +236,16 @@ impl Build { }; let local_rebuild = config.local_rebuild; + let is_sudo = match env::var_os("SUDO_USER") { + Some(sudo_user) => { + match env::var_os("USER") { + Some(user) => user != sudo_user, + None => false, + } + } + None => false, + }; + Build { flags: flags, config: config, @@ -204,14 +261,13 @@ impl Build { ver_date: None, version: String::new(), local_rebuild: local_rebuild, - bootstrap_key: String::new(), - bootstrap_key_stage0: String::new(), package_vers: String::new(), cc: HashMap::new(), cxx: HashMap::new(), crates: HashMap::new(), lldb_version: None, lldb_python_dir: None, + is_sudo: is_sudo, } } @@ -418,7 +474,7 @@ impl Build { // how the actual compiler itself is called. // // These variables are primarily all read by - // src/bootstrap/{rustc,rustdoc.rs} + // src/bootstrap/bin/{rustc.rs,rustdoc.rs} cargo.env("RUSTC", self.out.join("bootstrap/debug/rustc")) .env("RUSTC_REAL", self.compiler_path(compiler)) .env("RUSTC_STAGE", stage.to_string()) @@ -437,7 +493,9 @@ impl Build { .env("RUSTDOC_REAL", self.rustdoc(compiler)) .env("RUSTC_FLAGS", self.rustc_flags(target).join(" ")); - self.add_bootstrap_key(&mut cargo); + // Enable usage of unstable features + cargo.env("RUSTC_BOOTSTRAP", "1"); + self.add_rust_test_threads(&mut cargo); // Specify some various options for build scripts used throughout // the build. @@ -449,6 +507,10 @@ impl Build { .env(format!("CFLAGS_{}", target), self.cflags(target).join(" ")); } + if self.config.channel == "nightly" && compiler.stage == 2 { + cargo.env("RUSTC_SAVE_ANALYSIS", "api".to_string()); + } + // Environment variables *required* needed throughout the build // // FIXME: should update code to not require this env var @@ -457,10 +519,11 @@ impl Build { if self.config.verbose || self.flags.verbose { cargo.arg("-v"); } - if self.config.rust_optimize { + // FIXME: cargo bench does not accept `--release` + if self.config.rust_optimize && cmd != "bench" { cargo.arg("--release"); } - if self.config.vendor { + if self.config.vendor || self.is_sudo { cargo.arg("--frozen"); } return cargo @@ -494,12 +557,30 @@ impl Build { fn tool_cmd(&self, compiler: &Compiler, tool: &str) -> Command { let mut cmd = Command::new(self.tool(&compiler, tool)); let host = compiler.host; - let paths = vec![ + let mut paths = vec![ self.cargo_out(compiler, Mode::Libstd, host).join("deps"), self.cargo_out(compiler, Mode::Libtest, host).join("deps"), self.cargo_out(compiler, Mode::Librustc, host).join("deps"), self.cargo_out(compiler, Mode::Tool, host).join("deps"), ]; + + // On MSVC a tool may invoke a C compiler (e.g. compiletest in run-make + // mode) and that C compiler may need some extra PATH modification. Do + // so here. + if compiler.host.contains("msvc") { + let curpaths = env::var_os("PATH").unwrap_or(OsString::new()); + let curpaths = env::split_paths(&curpaths).collect::>(); + for &(ref k, ref v) in self.cc[compiler.host].0.env() { + if k != "PATH" { + continue + } + for path in env::split_paths(v) { + if !curpaths.contains(&path) { + paths.push(path); + } + } + } + } add_lib_path(paths, &mut cmd); return cmd } @@ -507,7 +588,7 @@ impl Build { /// Get the space-separated set of activated features for the standard /// library. fn std_features(&self) -> String { - let mut features = String::new(); + let mut features = "panic-unwind".to_string(); if self.config.debug_jemalloc { features.push_str(" debug-jemalloc"); } @@ -653,12 +734,11 @@ impl Build { add_lib_path(vec![self.rustc_libdir(compiler)], cmd); } - /// Adds the compiler's bootstrap key to the environment of `cmd`. - fn add_bootstrap_key(&self, cmd: &mut Command) { - cmd.env("RUSTC_BOOTSTRAP", "1"); - // FIXME: Transitionary measure to bootstrap using the old bootstrap logic. - // Remove this once the bootstrap compiler uses the new login in Issue #36548. - cmd.env("RUSTC_BOOTSTRAP_KEY", "62b3e239"); + /// Adds the `RUST_TEST_THREADS` env var if necessary + fn add_rust_test_threads(&self, cmd: &mut Command) { + if env::var_os("RUST_TEST_THREADS").is_none() { + cmd.env("RUST_TEST_THREADS", self.jobs().to_string()); + } } /// Returns the compiler's libdir where it stores the dynamic libraries that diff --git a/src/bootstrap/metadata.rs b/src/bootstrap/metadata.rs index bf5cc6a4ad8..8befb105ff6 100644 --- a/src/bootstrap/metadata.rs +++ b/src/bootstrap/metadata.rs @@ -70,6 +70,7 @@ fn build_krate(build: &mut Build, krate: &str) { build_step: format!("build-crate-{}", package.name), doc_step: format!("doc-crate-{}", package.name), test_step: format!("test-crate-{}", package.name), + bench_step: format!("bench-crate-{}", package.name), name: package.name, deps: Vec::new(), path: path, diff --git a/src/bootstrap/mk/Makefile.in b/src/bootstrap/mk/Makefile.in index 1e73595ec99..1fa70081938 100644 --- a/src/bootstrap/mk/Makefile.in +++ b/src/bootstrap/mk/Makefile.in @@ -1,4 +1,4 @@ -# Copyright 20126 The Rust Project Developers. See the COPYRIGHT +# Copyright 2016 The Rust Project Developers. See the COPYRIGHT # file at the top-level directory of this distribution and at # http://rust-lang.org/COPYRIGHT. # @@ -23,9 +23,14 @@ all: $(Q)$(BOOTSTRAP) build $(BOOTSTRAP_ARGS) $(Q)$(BOOTSTRAP) doc $(BOOTSTRAP_ARGS) -# Don’t use $(Q) here, always show how to invoke the bootstrap script directly help: - $(BOOTSTRAP) --help + $(Q)echo 'Welcome to the rustbuild build system!' + $(Q)echo + $(Q)echo This makefile is a thin veneer over the ./x.py script located + $(Q)echo in this directory. To get the full power of the build system + $(Q)echo you can run x.py directly. + $(Q)echo + $(Q)echo To learn more run \`./x.py --help\` clean: $(Q)$(BOOTSTRAP) clean $(BOOTSTRAP_ARGS) @@ -50,16 +55,17 @@ check-cargotest: $(Q)$(BOOTSTRAP) test src/tools/cargotest $(BOOTSTRAP_ARGS) dist: $(Q)$(BOOTSTRAP) dist $(BOOTSTRAP_ARGS) +distcheck: + $(Q)$(BOOTSTRAP) test distcheck install: -ifeq (root user, $(USER) $(patsubst %,user,$(SUDO_USER))) - $(Q)echo "'sudo make install' is not supported currently." -else $(Q)$(BOOTSTRAP) dist --install $(BOOTSTRAP_ARGS) -endif tidy: $(Q)$(BOOTSTRAP) test src/tools/tidy $(BOOTSTRAP_ARGS) --stage 0 -check-stage2-android: - $(Q)$(BOOTSTRAP) --step check-target --target arm-linux-androideabi +check-stage2-T-arm-linux-androideabi-H-x86_64-unknown-linux-gnu: + $(Q)$(BOOTSTRAP) test --target arm-linux-androideabi +check-stage2-T-x86_64-unknown-linux-musl-H-x86_64-unknown-linux-gnu: + $(Q)$(BOOTSTRAP) test --target x86_64-unknown-linux-gnu + .PHONY: dist diff --git a/src/bootstrap/native.rs b/src/bootstrap/native.rs index 96d1b695dd7..ffa3fe1cbf2 100644 --- a/src/bootstrap/native.rs +++ b/src/bootstrap/native.rs @@ -28,7 +28,7 @@ use cmake; use gcc; use Build; -use util::up_to_date; +use util::{self, up_to_date}; /// Compile LLVM for `target`. pub fn llvm(build: &Build, target: &str) { @@ -58,6 +58,7 @@ pub fn llvm(build: &Build, target: &str) { println!("Building LLVM for {}", target); + let _time = util::timeit(); let _ = fs::remove_dir_all(&dst.join("build")); t!(fs::create_dir_all(&dst.join("build"))); let assertions = if build.config.llvm_assertions {"ON"} else {"OFF"}; @@ -158,6 +159,17 @@ pub fn test_helpers(build: &Build, target: &str) { println!("Building test helpers"); t!(fs::create_dir_all(&dst)); let mut cfg = gcc::Config::new(); + + // We may have found various cross-compilers a little differently due to our + // extra configuration, so inform gcc of these compilers. Note, though, that + // on MSVC we still need gcc's detection of env vars (ugh). + if !target.contains("msvc") { + if let Some(ar) = build.ar(target) { + cfg.archiver(ar); + } + cfg.compiler(build.cc(target)); + } + cfg.cargo_metadata(false) .out_dir(&dst) .target(target) diff --git a/src/bootstrap/sanity.rs b/src/bootstrap/sanity.rs index 47efa695217..f3fe22698bb 100644 --- a/src/bootstrap/sanity.rs +++ b/src/bootstrap/sanity.rs @@ -41,10 +41,14 @@ pub fn check(build: &mut Build) { } } let have_cmd = |cmd: &OsStr| { - for path in env::split_paths(&path).map(|p| p.join(cmd)) { - if fs::metadata(&path).is_ok() || - fs::metadata(path.with_extension("exe")).is_ok() { - return Some(path); + for path in env::split_paths(&path) { + let target = path.join(cmd); + let mut cmd_alt = cmd.to_os_string(); + cmd_alt.push(".exe"); + if target.exists() || + target.with_extension("exe").exists() || + target.join(cmd_alt).exists() { + return Some(target); } } return None; diff --git a/src/bootstrap/step.rs b/src/bootstrap/step.rs index 56be2ccb235..884cc7da8ea 100644 --- a/src/bootstrap/step.rs +++ b/src/bootstrap/step.rs @@ -8,10 +8,28 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. +//! Definition of steps of the build system. +//! +//! This is where some of the real meat of rustbuild is located, in how we +//! define targets and the dependencies amongst them. This file can sort of be +//! viewed as just defining targets in a makefile which shell out to predefined +//! functions elsewhere about how to execute the target. +//! +//! The primary function here you're likely interested in is the `build_rules` +//! function. This will create a `Rules` structure which basically just lists +//! everything that rustbuild can do. Each rule has a human-readable name, a +//! path associated with it, some dependencies, and then a closure of how to +//! actually perform the rule. +//! +//! All steps below are defined in self-contained units, so adding a new target +//! to the build system should just involve adding the meta information here +//! along with the actual implementation elsewhere. You can find more comments +//! about how to define rules themselves below. + use std::collections::{HashMap, HashSet}; use std::mem; -use check; +use check::{self, TestKind}; use compile; use dist; use doc; @@ -20,36 +38,6 @@ use install; use native; use {Compiler, Build, Mode}; -#[derive(PartialEq, Eq, Hash, Clone, Debug)] -struct Step<'a> { - name: &'a str, - stage: u32, - host: &'a str, - target: &'a str, -} - -impl<'a> Step<'a> { - fn name(&self, name: &'a str) -> Step<'a> { - Step { name: name, ..*self } - } - - fn stage(&self, stage: u32) -> Step<'a> { - Step { stage: stage, ..*self } - } - - fn host(&self, host: &'a str) -> Step<'a> { - Step { host: host, ..*self } - } - - fn target(&self, target: &'a str) -> Step<'a> { - Step { target: target, ..*self } - } - - fn compiler(&self) -> Compiler<'a> { - Compiler::new(self.stage, self.host) - } -} - pub fn run(build: &Build) { let rules = build_rules(build); let steps = rules.plan(); @@ -57,14 +45,91 @@ pub fn run(build: &Build) { } pub fn build_rules(build: &Build) -> Rules { - let mut rules: Rules = Rules::new(build); + let mut rules = Rules::new(build); + + // This is the first rule that we're going to define for rustbuild, which is + // used to compile LLVM itself. All rules are added through the `rules` + // structure created above and are configured through a builder-style + // interface. + // + // First up we see the `build` method. This represents a rule that's part of + // the top-level `build` subcommand. For example `./x.py build` is what this + // is associating with. Note that this is normally only relevant if you flag + // a rule as `default`, which we'll talk about later. + // + // Next up we'll see two arguments to this method: + // + // * `llvm` - this is the "human readable" name of this target. This name is + // not accessed anywhere outside this file itself (e.g. not in + // the CLI nor elsewhere in rustbuild). The purpose of this is to + // easily define dependencies between rules. That is, other rules + // will depend on this with the name "llvm". + // * `src/llvm` - this is the relevant path to the rule that we're working + // with. This path is the engine behind how commands like + // `./x.py build src/llvm` work. This should typically point + // to the relevant component, but if there's not really a + // path to be assigned here you can pass something like + // `path/to/nowhere` to ignore it. + // + // After we create the rule with the `build` method we can then configure + // various aspects of it. For example this LLVM rule uses `.host(true)` to + // flag that it's a rule only for host targets. In other words, LLVM isn't + // compiled for targets configured through `--target` (e.g. those we're just + // building a standard library for). + // + // Next up the `dep` method will add a dependency to this rule. The closure + // is yielded the step that represents executing the `llvm` rule itself + // (containing information like stage, host, target, ...) and then it must + // return a target that the step depends on. Here LLVM is actually + // interesting where a cross-compiled LLVM depends on the host LLVM, but + // otherwise it has no dependencies. + // + // To handle this we do a bit of dynamic dispatch to see what the dependency + // is. If we're building a LLVM for the build triple, then we don't actually + // have any dependencies! To do that we return a dependency on the "dummy" + // target which does nothing. + // + // If we're build a cross-compiled LLVM, however, we need to assemble the + // libraries from the previous compiler. This step has the same name as + // ours (llvm) but we want it for a different target, so we use the + // builder-style methods on `Step` to configure this target to the build + // triple. + // + // Finally, to finish off this rule, we define how to actually execute it. + // That logic is all defined in the `native` module so we just delegate to + // the relevant function there. The argument to the closure passed to `run` + // is a `Step` (defined below) which encapsulates information like the + // stage, target, host, etc. + rules.build("llvm", "src/llvm") + .host(true) + .dep(move |s| { + if s.target == build.config.build { + dummy(s, build) + } else { + s.target(&build.config.build) + } + }) + .run(move |s| native::llvm(build, s.target)); + + // Ok! After that example rule that's hopefully enough to explain what's + // going on here. You can check out the API docs below and also see a bunch + // more examples of rules directly below as well. + // dummy rule to do nothing, useful when a dep maps to no deps rules.build("dummy", "path/to/nowhere"); - fn dummy<'a>(s: &Step<'a>, build: &'a Build) -> Step<'a> { - s.name("dummy").stage(0) - .target(&build.config.build) - .host(&build.config.build) - } + + // the compiler with no target libraries ready to go + rules.build("rustc", "src/rustc") + .dep(move |s| { + if s.stage == 0 { + dummy(s, build) + } else { + s.name("librustc") + .host(&build.config.build) + .stage(s.stage - 1) + } + }) + .run(move |s| compile::assemble_rustc(build, s.stage, s.target)); // Helper for loading an entire DAG of crates, rooted at `name` let krates = |name: &str| { @@ -85,21 +150,6 @@ pub fn build_rules(build: &Build) -> Rules { return ret }; - rules.build("rustc", "path/to/nowhere") - .dep(move |s| { - if s.stage == 0 { - dummy(s, build) - } else { - s.name("librustc") - .host(&build.config.build) - .stage(s.stage - 1) - } - }) - .run(move |s| compile::assemble_rustc(build, s.stage, s.target)); - rules.build("llvm", "src/llvm") - .host(true) - .run(move |s| native::llvm(build, s.target)); - // ======================================================================== // Crate compilations // @@ -268,37 +318,55 @@ pub fn build_rules(build: &Build) -> Rules { rules.test(&krate.test_step, path) .dep(|s| s.name("libtest")) .run(move |s| check::krate(build, &s.compiler(), s.target, - Mode::Libstd, Some(&krate.name))); + Mode::Libstd, TestKind::Test, + Some(&krate.name))); } rules.test("check-std-all", "path/to/nowhere") .dep(|s| s.name("libtest")) .default(true) - .run(move |s| check::krate(build, &s.compiler(), s.target, Mode::Libstd, - None)); + .run(move |s| check::krate(build, &s.compiler(), s.target, + Mode::Libstd, TestKind::Test, None)); + + // std benchmarks + for (krate, path, _default) in krates("std_shim") { + rules.bench(&krate.bench_step, path) + .dep(|s| s.name("libtest")) + .run(move |s| check::krate(build, &s.compiler(), s.target, + Mode::Libstd, TestKind::Bench, + Some(&krate.name))); + } + rules.bench("bench-std-all", "path/to/nowhere") + .dep(|s| s.name("libtest")) + .default(true) + .run(move |s| check::krate(build, &s.compiler(), s.target, + Mode::Libstd, TestKind::Bench, None)); + for (krate, path, _default) in krates("test_shim") { rules.test(&krate.test_step, path) .dep(|s| s.name("libtest")) .run(move |s| check::krate(build, &s.compiler(), s.target, - Mode::Libtest, Some(&krate.name))); + Mode::Libtest, TestKind::Test, + Some(&krate.name))); } rules.test("check-test-all", "path/to/nowhere") .dep(|s| s.name("libtest")) .default(true) - .run(move |s| check::krate(build, &s.compiler(), s.target, Mode::Libtest, - None)); + .run(move |s| check::krate(build, &s.compiler(), s.target, + Mode::Libtest, TestKind::Test, None)); for (krate, path, _default) in krates("rustc-main") { rules.test(&krate.test_step, path) .dep(|s| s.name("librustc")) .host(true) .run(move |s| check::krate(build, &s.compiler(), s.target, - Mode::Librustc, Some(&krate.name))); + Mode::Librustc, TestKind::Test, + Some(&krate.name))); } rules.test("check-rustc-all", "path/to/nowhere") .dep(|s| s.name("librustc")) .default(true) .host(true) - .run(move |s| check::krate(build, &s.compiler(), s.target, Mode::Librustc, - None)); + .run(move |s| check::krate(build, &s.compiler(), s.target, + Mode::Librustc, TestKind::Test, None)); rules.test("check-linkchecker", "src/tools/linkchecker") .dep(|s| s.name("tool-linkchecker")) @@ -312,10 +380,10 @@ pub fn build_rules(build: &Build) -> Rules { .host(true) .run(move |s| check::cargotest(build, s.stage, s.target)); rules.test("check-tidy", "src/tools/tidy") - .dep(|s| s.name("tool-tidy")) + .dep(|s| s.name("tool-tidy").stage(0)) .default(true) .host(true) - .run(move |s| check::tidy(build, s.stage, s.target)); + .run(move |s| check::tidy(build, 0, s.target)); rules.test("check-error-index", "src/tools/error_index_generator") .dep(|s| s.name("libstd")) .dep(|s| s.name("tool-error-index").host(s.host)) @@ -327,6 +395,10 @@ pub fn build_rules(build: &Build) -> Rules { .default(true) .host(true) .run(move |s| check::docs(build, &s.compiler())); + rules.test("check-distcheck", "distcheck") + .dep(|s| s.name("dist-src")) + .run(move |_| check::distcheck(build)); + rules.build("test-helpers", "src/rt/rust_test_helpers.c") .run(move |s| native::test_helpers(build, s.target)); @@ -427,21 +499,98 @@ pub fn build_rules(build: &Build) -> Rules { .default(true) .dep(|s| s.name("default:doc")) .run(move |s| dist::docs(build, s.stage, s.target)); + rules.dist("dist-analysis", "src/libstd") + .dep(|s| s.name("dist-std")) + .default(true) + .run(move |s| dist::analysis(build, &s.compiler(), s.target)); rules.dist("install", "src") .dep(|s| s.name("default:dist")) .run(move |s| install::install(build, s.stage, s.target)); rules.verify(); - return rules + return rules; + + fn dummy<'a>(s: &Step<'a>, build: &'a Build) -> Step<'a> { + s.name("dummy").stage(0) + .target(&build.config.build) + .host(&build.config.build) + } +} + +#[derive(PartialEq, Eq, Hash, Clone, Debug)] +struct Step<'a> { + /// Human readable name of the rule this step is executing. Possible names + /// are all defined above in `build_rules`. + name: &'a str, + + /// The stage this step is executing in. This is typically 0, 1, or 2. + stage: u32, + + /// This step will likely involve a compiler, and the target that compiler + /// itself is built for is called the host, this variable. Typically this is + /// the target of the build machine itself. + host: &'a str, + + /// The target that this step represents generating. If you're building a + /// standard library for a new suite of targets, for example, this'll be set + /// to those targets. + target: &'a str, +} + +impl<'a> Step<'a> { + /// Creates a new step which is the same as this, except has a new name. + fn name(&self, name: &'a str) -> Step<'a> { + Step { name: name, ..*self } + } + + /// Creates a new step which is the same as this, except has a new stage. + fn stage(&self, stage: u32) -> Step<'a> { + Step { stage: stage, ..*self } + } + + /// Creates a new step which is the same as this, except has a new host. + fn host(&self, host: &'a str) -> Step<'a> { + Step { host: host, ..*self } + } + + /// Creates a new step which is the same as this, except has a new target. + fn target(&self, target: &'a str) -> Step<'a> { + Step { target: target, ..*self } + } + + /// Returns the `Compiler` structure that this step corresponds to. + fn compiler(&self) -> Compiler<'a> { + Compiler::new(self.stage, self.host) + } } struct Rule<'a> { + /// The human readable name of this target, defined in `build_rules`. name: &'a str, + + /// The path associated with this target, used in the `./x.py` driver for + /// easy and ergonomic specification of what to do. path: &'a str, + + /// The "kind" of top-level command that this rule is associated with, only + /// relevant if this is a default rule. kind: Kind, + + /// List of dependencies this rule has. Each dependency is a function from a + /// step that's being executed to another step that should be executed. deps: Vec) -> Step<'a> + 'a>>, + + /// How to actually execute this rule. Takes a step with contextual + /// information and then executes it. run: Box) + 'a>, + + /// Whether or not this is a "default" rule. That basically means that if + /// you run, for example, `./x.py test` whether it's included or not. default: bool, + + /// Whether or not this is a "host" rule, or in other words whether this is + /// only intended for compiler hosts and not for targets that are being + /// generated. host: bool, } @@ -449,6 +598,7 @@ struct Rule<'a> { enum Kind { Build, Test, + Bench, Dist, Doc, } @@ -467,6 +617,8 @@ impl<'a> Rule<'a> { } } +/// Builder pattern returned from the various methods on `Rules` which will add +/// the rule to the internal list on `Drop`. struct RuleBuilder<'a: 'b, 'b> { rules: &'b mut Rules<'a>, rule: Rule<'a>, @@ -528,21 +680,35 @@ impl<'a> Rules<'a> { } } + /// Creates a new rule of `Kind::Build` with the specified human readable + /// name and path associated with it. + /// + /// The builder returned should be configured further with information such + /// as how to actually run this rule. fn build<'b>(&'b mut self, name: &'a str, path: &'a str) -> RuleBuilder<'a, 'b> { self.rule(name, path, Kind::Build) } + /// Same as `build`, but for `Kind::Test`. fn test<'b>(&'b mut self, name: &'a str, path: &'a str) -> RuleBuilder<'a, 'b> { self.rule(name, path, Kind::Test) } + /// Same as `build`, but for `Kind::Bench`. + fn bench<'b>(&'b mut self, name: &'a str, path: &'a str) + -> RuleBuilder<'a, 'b> { + self.rule(name, path, Kind::Bench) + } + + /// Same as `build`, but for `Kind::Doc`. fn doc<'b>(&'b mut self, name: &'a str, path: &'a str) -> RuleBuilder<'a, 'b> { self.rule(name, path, Kind::Doc) } + /// Same as `build`, but for `Kind::Dist`. fn dist<'b>(&'b mut self, name: &'a str, path: &'a str) -> RuleBuilder<'a, 'b> { self.rule(name, path, Kind::Dist) @@ -583,6 +749,7 @@ invalid rule dependency graph detected, was a rule added and maybe typo'd? "build" => Kind::Build, "doc" => Kind::Doc, "test" => Kind::Test, + "bench" => Kind::Bench, "dist" => Kind::Dist, _ => return, }; @@ -602,10 +769,36 @@ invalid rule dependency graph detected, was a rule added and maybe typo'd? /// Construct the top-level build steps that we're going to be executing, /// given the subcommand that our build is performing. fn plan(&self) -> Vec> { + // Ok, the logic here is pretty subtle, and involves quite a few + // conditionals. The basic idea here is to: + // + // 1. First, filter all our rules to the relevant ones. This means that + // the command specified corresponds to one of our `Kind` variants, + // and we filter all rules based on that. + // + // 2. Next, we determine which rules we're actually executing. If a + // number of path filters were specified on the command line we look + // for those, otherwise we look for anything tagged `default`. + // + // 3. Finally, we generate some steps with host and target information. + // + // The last step is by far the most complicated and subtle. The basic + // thinking here is that we want to take the cartesian product of + // specified hosts and targets and build rules with that. The list of + // hosts and targets, if not specified, come from the how this build was + // configured. If the rule we're looking at is a host-only rule the we + // ignore the list of targets and instead consider the list of hosts + // also the list of targets. + // + // Once the host and target lists are generated we take the cartesian + // product of the two and then create a step based off them. Note that + // the stage each step is associated was specified with the `--step` + // flag on the command line. let (kind, paths) = match self.build.flags.cmd { Subcommand::Build { ref paths } => (Kind::Build, &paths[..]), Subcommand::Doc { ref paths } => (Kind::Doc, &paths[..]), Subcommand::Test { ref paths, test_args: _ } => (Kind::Test, &paths[..]), + Subcommand::Bench { ref paths, test_args: _ } => (Kind::Bench, &paths[..]), Subcommand::Dist { install } => { if install { return vec![self.sbuild.name("install")] @@ -631,7 +824,18 @@ invalid rule dependency graph detected, was a rule added and maybe typo'd? } else { &self.build.config.target }; - let arr = if rule.host {hosts} else {targets}; + // If --target was specified but --host wasn't specified, don't run + // any host-only tests + let arr = if rule.host { + if self.build.flags.target.len() > 0 && + self.build.flags.host.len() == 0 { + &hosts[..0] + } else { + hosts + } + } else { + targets + }; hosts.iter().flat_map(move |host| { arr.iter().map(move |target| { @@ -672,6 +876,15 @@ invalid rule dependency graph detected, was a rule added and maybe typo'd? } } + /// Performs topological sort of dependencies rooted at the `step` + /// specified, pushing all results onto the `order` vector provided. + /// + /// In other words, when this method returns, the `order` vector will + /// contain a list of steps which if executed in order will eventually + /// complete the `step` specified as well. + /// + /// The `added` set specified here is the set of steps that are already + /// present in `order` (and hence don't need to be added again). fn fill(&self, step: Step<'a>, order: &mut Vec>, diff --git a/src/bootstrap/util.rs b/src/bootstrap/util.rs index e028c522366..cb5b456a0f2 100644 --- a/src/bootstrap/util.rs +++ b/src/bootstrap/util.rs @@ -18,6 +18,7 @@ use std::ffi::OsString; use std::fs; use std::path::{Path, PathBuf}; use std::process::Command; +use std::time::Instant; use filetime::FileTime; @@ -189,3 +190,19 @@ pub fn push_exe_path(mut buf: PathBuf, components: &[&str]) -> PathBuf { buf } + +pub struct TimeIt(Instant); + +/// Returns an RAII structure that prints out how long it took to drop. +pub fn timeit() -> TimeIt { + TimeIt(Instant::now()) +} + +impl Drop for TimeIt { + fn drop(&mut self) { + let time = self.0.elapsed(); + println!("\tfinished in {}.{:03}", + time.as_secs(), + time.subsec_nanos() / 1_000_000); + } +} diff --git a/src/build_helper/lib.rs b/src/build_helper/lib.rs index 38844fb6c9e..07f9c91d3c7 100644 --- a/src/build_helper/lib.rs +++ b/src/build_helper/lib.rs @@ -21,7 +21,8 @@ pub fn run(cmd: &mut Command) { pub fn run_silent(cmd: &mut Command) { let status = match cmd.status() { Ok(status) => status, - Err(e) => fail(&format!("failed to execute command: {}", e)), + Err(e) => fail(&format!("failed to execute command: {:?}\nerror: {}", + cmd, e)), }; if !status.success() { fail(&format!("command did not execute successfully: {:?}\n\ @@ -63,7 +64,8 @@ pub fn cc2ar(cc: &Path, target: &str) -> Option { pub fn output(cmd: &mut Command) -> String { let output = match cmd.stderr(Stdio::inherit()).output() { Ok(status) => status, - Err(e) => fail(&format!("failed to execute command: {}", e)), + Err(e) => fail(&format!("failed to execute command: {:?}\nerror: {}", + cmd, e)), }; if !output.status.success() { panic!("command did not execute successfully: {:?}\n\ diff --git a/src/ci/docker/arm-android/Dockerfile b/src/ci/docker/arm-android/Dockerfile index c5b70c227c4..121c0263cbc 100644 --- a/src/ci/docker/arm-android/Dockerfile +++ b/src/ci/docker/arm-android/Dockerfile @@ -9,7 +9,6 @@ RUN dpkg --add-architecture i386 && \ curl \ ca-certificates \ python2.7 \ - python-minimal \ git \ cmake \ ccache \ @@ -39,8 +38,7 @@ ENV RUST_CONFIGURE_ARGS \ --arm-linux-androideabi-ndk=/android/ndk-arm-9 \ --armv7-linux-androideabi-ndk=/android/ndk-arm-9 \ --i686-linux-android-ndk=/android/ndk-x86-9 \ - --aarch64-linux-android-ndk=/android/ndk-aarch64 \ - --enable-rustbuild -ENV RUST_CHECK_TARGET check-stage2-android + --aarch64-linux-android-ndk=/android/ndk-aarch64 +ENV XPY_CHECK test --target arm-linux-androideabi RUN mkdir /tmp/obj RUN chmod 777 /tmp/obj diff --git a/src/ci/docker/cross/Dockerfile b/src/ci/docker/cross/Dockerfile index d8af878a958..b7b23d74c9d 100644 --- a/src/ci/docker/cross/Dockerfile +++ b/src/ci/docker/cross/Dockerfile @@ -7,7 +7,6 @@ RUN apt-get update && apt-get install -y --no-install-recommends \ curl \ ca-certificates \ python2.7 \ - python-minimal \ git \ cmake \ ccache \ diff --git a/src/ci/docker/run.sh b/src/ci/docker/run.sh index c5b1d00fb7c..ff5345d3aac 100755 --- a/src/ci/docker/run.sh +++ b/src/ci/docker/run.sh @@ -19,17 +19,21 @@ ci_dir="`dirname $docker_dir`" src_dir="`dirname $ci_dir`" root_dir="`dirname $src_dir`" -docker build \ +docker \ + build \ --rm \ -t rust-ci \ "`dirname "$script"`/$image" mkdir -p $HOME/.ccache mkdir -p $HOME/.cargo +mkdir -p $root_dir/obj -exec docker run \ +exec docker \ + run \ --volume "$root_dir:/checkout:ro" \ - --workdir /tmp/obj \ + --volume "$root_dir/obj:/checkout/obj" \ + --workdir /checkout/obj \ --env SRC=/checkout \ --env CCACHE_DIR=/ccache \ --volume "$HOME/.ccache:/ccache" \ diff --git a/src/ci/docker/x86_64-freebsd/Dockerfile b/src/ci/docker/x86_64-freebsd/Dockerfile index dc16c39961c..a3a52f9e6ff 100644 --- a/src/ci/docker/x86_64-freebsd/Dockerfile +++ b/src/ci/docker/x86_64-freebsd/Dockerfile @@ -7,7 +7,6 @@ RUN apt-get update && apt-get install -y --no-install-recommends \ curl \ ca-certificates \ python2.7 \ - python-minimal \ git \ cmake \ ccache \ @@ -23,7 +22,7 @@ ENV \ AR_x86_64_unknown_freebsd=x86_64-unknown-freebsd10-ar \ CC_x86_64_unknown_freebsd=x86_64-unknown-freebsd10-gcc -ENV RUST_CONFIGURE_ARGS --target=x86_64-unknown-freebsd --enable-rustbuild +ENV RUST_CONFIGURE_ARGS --target=x86_64-unknown-freebsd ENV RUST_CHECK_TARGET "" RUN mkdir /tmp/obj RUN chmod 777 /tmp/obj diff --git a/src/ci/docker/x86_64-gnu-cargotest/Dockerfile b/src/ci/docker/x86_64-gnu-cargotest/Dockerfile index 1db01f2b48d..107e2bf8a12 100644 --- a/src/ci/docker/x86_64-gnu-cargotest/Dockerfile +++ b/src/ci/docker/x86_64-gnu-cargotest/Dockerfile @@ -7,14 +7,14 @@ RUN apt-get update && apt-get install -y --no-install-recommends \ curl \ ca-certificates \ python2.7 \ - python-minimal \ git \ cmake \ ccache \ libssl-dev \ sudo -ENV RUST_CONFIGURE_ARGS --build=x86_64-unknown-linux-gnu --enable-rustbuild +ENV RUST_CONFIGURE_ARGS --build=x86_64-unknown-linux-gnu ENV RUST_CHECK_TARGET check-cargotest +ENV NO_VENDOR 1 RUN mkdir /tmp/obj RUN chmod 777 /tmp/obj diff --git a/src/ci/docker/x86_64-gnu-llvm-3.7/Dockerfile b/src/ci/docker/x86_64-gnu-llvm-3.7/Dockerfile index ca06940ae5e..c27e3d1325f 100644 --- a/src/ci/docker/x86_64-gnu-llvm-3.7/Dockerfile +++ b/src/ci/docker/x86_64-gnu-llvm-3.7/Dockerfile @@ -7,7 +7,6 @@ RUN apt-get update && apt-get install -y --no-install-recommends \ curl \ ca-certificates \ python2.7 \ - python2.7-minimal \ git \ cmake \ ccache \ @@ -19,7 +18,6 @@ RUN apt-get update && apt-get install -y --no-install-recommends \ ENV RUST_CONFIGURE_ARGS \ --build=x86_64-unknown-linux-gnu \ - --enable-rustbuild \ --llvm-root=/usr/lib/llvm-3.7 ENV RUST_CHECK_TARGET check RUN mkdir /tmp/obj diff --git a/src/ci/docker/x86_64-gnu-rustbuild/Dockerfile b/src/ci/docker/x86_64-gnu-make/Dockerfile similarity index 73% rename from src/ci/docker/x86_64-gnu-rustbuild/Dockerfile rename to src/ci/docker/x86_64-gnu-make/Dockerfile index d4d0492e2a2..93229b2a010 100644 --- a/src/ci/docker/x86_64-gnu-rustbuild/Dockerfile +++ b/src/ci/docker/x86_64-gnu-make/Dockerfile @@ -7,14 +7,13 @@ RUN apt-get update && apt-get install -y --no-install-recommends \ curl \ ca-certificates \ python2.7 \ - python-minimal \ git \ cmake \ ccache \ sudo \ gdb -ENV RUST_CONFIGURE_ARGS --build=x86_64-unknown-linux-gnu --enable-rustbuild +ENV RUST_CONFIGURE_ARGS --build=x86_64-unknown-linux-gnu --disable-rustbuild ENV RUST_CHECK_TARGET check RUN mkdir /tmp/obj RUN chmod 777 /tmp/obj diff --git a/src/ci/docker/x86_64-musl/Dockerfile b/src/ci/docker/x86_64-musl/Dockerfile index 1afaef2e056..967940fb1f3 100644 --- a/src/ci/docker/x86_64-musl/Dockerfile +++ b/src/ci/docker/x86_64-musl/Dockerfile @@ -20,8 +20,10 @@ RUN sh /build/build-musl.sh && rm -rf /build ENV RUST_CONFIGURE_ARGS \ --target=x86_64-unknown-linux-musl \ - --musl-root=/musl-x86_64 + --musl-root-x86_64=/musl-x86_64 ENV RUST_CHECK_TARGET check-stage2-T-x86_64-unknown-linux-musl-H-x86_64-unknown-linux-gnu +ENV PATH=$PATH:/musl-x86_64/bin +ENV XPY_CHECK test --target x86_64-unknown-linux-musl RUN mkdir /tmp/obj RUN chmod 777 /tmp/obj diff --git a/src/ci/run.sh b/src/ci/run.sh index da238dddeca..10f2d15da34 100755 --- a/src/ci/run.sh +++ b/src/ci/run.sh @@ -14,12 +14,20 @@ set -e if [ "$LOCAL_USER_ID" != "" ]; then useradd --shell /bin/bash -u $LOCAL_USER_ID -o -c "" -m user export HOME=/home/user - export LOCAL_USER_ID= - exec sudo -E -u user env PATH=$PATH "$0" + unset LOCAL_USER_ID + exec su --preserve-environment -c "env PATH=$PATH \"$0\"" user fi if [ "$NO_LLVM_ASSERTIONS" = "" ]; then - LLVM_ASSERTIONS=--enable-llvm-assertions + ENABLE_LLVM_ASSERTIONS=--enable-llvm-assertions +fi + +if [ "$NO_VENDOR" = "" ]; then + ENABLE_VENDOR=--enable-vendor +fi + +if [ "$NO_CCACHE" = "" ]; then + ENABLE_CCACHE=--enable-ccache fi set -ex @@ -28,9 +36,9 @@ $SRC/configure \ --disable-manage-submodules \ --enable-debug-assertions \ --enable-quiet-tests \ - --enable-ccache \ - --enable-vendor \ - $LLVM_ASSERTIONS \ + $ENABLE_CCACHE \ + $ENABLE_VENDOR \ + $ENABLE_LLVM_ASSERTIONS \ $RUST_CONFIGURE_ARGS if [ "$TRAVIS_OS_NAME" = "osx" ]; then @@ -41,4 +49,8 @@ fi make -j $ncpus tidy make -j $ncpus -exec make $RUST_CHECK_TARGET -j $ncpus +if [ ! -z "$XPY_CHECK" ]; then + exec python2.7 $SRC/x.py $XPY_CHECK +else + exec make $RUST_CHECK_TARGET -j $ncpus +fi diff --git a/src/compiler-rt b/src/compiler-rt index 3bc0272cab9..a8fc4c169fa 160000 --- a/src/compiler-rt +++ b/src/compiler-rt @@ -1 +1 @@ -Subproject commit 3bc0272cab9fdcfc2ef4df9625ec3c9d5909db79 +Subproject commit a8fc4c169fac43a5dc204d4fd56ddb1739f8c178 diff --git a/src/doc/book/ffi.md b/src/doc/book/ffi.md index 7510cd0b3b5..b53af694428 100644 --- a/src/doc/book/ffi.md +++ b/src/doc/book/ffi.md @@ -662,26 +662,31 @@ attribute turns off Rust's name mangling, so that it is easier to link to. It’s important to be mindful of `panic!`s when working with FFI. A `panic!` across an FFI boundary is undefined behavior. If you’re writing code that may -panic, you should run it in another thread, so that the panic doesn’t bubble up -to C: +panic, you should run it in a closure with [`catch_unwind()`]: ```rust -use std::thread; +use std::panic::catch_unwind; #[no_mangle] pub extern fn oh_no() -> i32 { - let h = thread::spawn(|| { + let result = catch_unwind(|| { panic!("Oops!"); }); - - match h.join() { - Ok(_) => 1, - Err(_) => 0, + match result { + Ok(_) => 0, + Err(_) => 1, } } -# fn main() {} + +fn main() {} ``` +Please note that [`catch_unwind()`] will only catch unwinding panics, not +those who abort the process. See the documentation of [`catch_unwind()`] +for more information. + +[`catch_unwind()`]: https://doc.rust-lang.org/std/panic/fn.catch_unwind.html + # Representing opaque structs Sometimes, a C library wants to provide a pointer to something, but not let you diff --git a/src/doc/book/testing.md b/src/doc/book/testing.md index 14a05102b9a..ebeb9923197 100644 --- a/src/doc/book/testing.md +++ b/src/doc/book/testing.md @@ -589,11 +589,11 @@ please see the [Documentation chapter](documentation.html). # Testing and concurrency -One thing that is important to note when writing tests are run concurrently -using threads. For this reason you should take care that your tests are written -in such a way as to not depend on each-other, or on any shared state. "Shared -state" can also include the environment, such as the current working directory, -or environment variables. +One thing that is important to note when writing tests is that they may be run +concurrently using threads. For this reason you should take care that your tests +are written in such a way as to not depend on each-other, or on any shared +state. "Shared state" can also include the environment, such as the current +working directory, or environment variables. If this is an issue it is possible to control this concurrency, either by setting the environment variable `RUST_TEST_THREADS`, or by passing the argument diff --git a/src/doc/book/variable-bindings.md b/src/doc/book/variable-bindings.md index 54316649c71..37b6c0513fc 100644 --- a/src/doc/book/variable-bindings.md +++ b/src/doc/book/variable-bindings.md @@ -47,7 +47,7 @@ let x: i32 = 5; ``` If I asked you to read this out loud to the rest of the class, you’d say “`x` -is a binding with the type `i32` and the value `five`.” +is a binding with the type `i32` and the value `5`.” In this case we chose to represent `x` as a 32-bit signed integer. Rust has many different primitive integer types. They begin with `i` for signed integers diff --git a/src/doc/index.md b/src/doc/index.md index f8a1ec134d9..71dfcf0b067 100644 --- a/src/doc/index.md +++ b/src/doc/index.md @@ -17,7 +17,7 @@ the language. [**The Rust Reference**][ref]. While Rust does not have a specification, the reference tries to describe its working in -detail. It tends to be out of date. +detail. It is accurate, but not necessarily complete. [**Standard Library API Reference**][api]. Documentation for the standard library. diff --git a/src/doc/reference.md b/src/doc/reference.md index 4fbe5183967..bf286aaec4b 100644 --- a/src/doc/reference.md +++ b/src/doc/reference.md @@ -603,7 +603,8 @@ syntax named by _designator_. Valid designators are: * `ty`: a [type](#types) * `ident`: an [identifier](#identifiers) * `path`: a [path](#paths) -* `tt`: either side of the `=>` in macro rules +* `tt`: a token tree (a single [token](#tokens) or a sequence of token trees surrounded + by matching `()`, `[]`, or `{}`) * `meta`: the contents of an [attribute](#attributes) In the transcriber, the @@ -740,13 +741,14 @@ There are several kinds of item: * [`extern crate` declarations](#extern-crate-declarations) * [`use` declarations](#use-declarations) * [modules](#modules) -* [functions](#functions) +* [function definitions](#functions) +* [`extern` blocks](#external-blocks) * [type definitions](grammar.html#type-definitions) -* [structs](#structs) -* [enumerations](#enumerations) +* [struct definitions](#structs) +* [enumeration definitions](#enumerations) * [constant items](#constant-items) * [static items](#static-items) -* [traits](#traits) +* [trait definitions](#traits) * [implementations](#implementations) Some items form an implicit scope for the declaration of sub-items. In other @@ -2462,11 +2464,6 @@ The currently implemented features of the reference compiler are: * `unboxed_closures` - Rust's new closure design, which is currently a work in progress feature with many known bugs. -* `unmarked_api` - Allows use of items within a `#![staged_api]` crate - which have not been marked with a stability marker. - Such items should not be allowed by the compiler to exist, - so if you need this there probably is a compiler bug. - * `allow_internal_unstable` - Allows `macro_rules!` macros to be tagged with the `#[allow_internal_unstable]` attribute, designed to allow `std` macros to call diff --git a/src/etc/generate-deriving-span-tests.py b/src/etc/generate-deriving-span-tests.py index 790fc894287..6642da858e5 100755 --- a/src/etc/generate-deriving-span-tests.py +++ b/src/etc/generate-deriving-span-tests.py @@ -37,8 +37,6 @@ TEMPLATE = """// Copyright {year} The Rust Project Developers. See the COPYRIGHT // This file was auto-generated using 'src/etc/generate-deriving-span-tests.py' -extern crate rand; - {error_deriving} struct Error; {code} @@ -106,7 +104,6 @@ STRUCT = 2 ALL = STRUCT | ENUM traits = { - 'Zero': (STRUCT, [], 1), 'Default': (STRUCT, [], 1), 'FromPrimitive': (0, [], 0), # only works for C-like enums @@ -116,7 +113,7 @@ traits = { for (trait, supers, errs) in [('Clone', [], 1), ('PartialEq', [], 2), - ('PartialOrd', ['PartialEq'], 8), + ('PartialOrd', ['PartialEq'], 9), ('Eq', ['PartialEq'], 1), ('Ord', ['Eq', 'PartialOrd', 'PartialEq'], 1), ('Debug', [], 1), diff --git a/src/grammar/verify.rs b/src/grammar/verify.rs index 48be58f731c..919fc98e438 100644 --- a/src/grammar/verify.rs +++ b/src/grammar/verify.rs @@ -23,7 +23,6 @@ use std::fs::File; use std::io::{BufRead, Read}; use std::path::Path; -use syntax::parse; use syntax::parse::lexer; use rustc::dep_graph::DepGraph; use rustc::session::{self, config}; @@ -31,15 +30,16 @@ use rustc::middle::cstore::DummyCrateStore; use std::rc::Rc; use syntax::ast; -use syntax::ast::Name; use syntax::codemap; use syntax::parse::token::{self, BinOpToken, DelimToken, Lit, Token}; use syntax::parse::lexer::TokenAndSpan; use syntax_pos::Pos; +use syntax::symbol::{Symbol, keywords}; + fn parse_token_list(file: &str) -> HashMap { fn id() -> token::Token { - Token::Ident(ast::Ident::with_empty_ctxt(Name(0))) + Token::Ident(ast::Ident::with_empty_ctxt(keywords::Invalid.name())) } let mut res = HashMap::new(); @@ -65,7 +65,7 @@ fn parse_token_list(file: &str) -> HashMap { "SHL" => Token::BinOp(BinOpToken::Shl), "LBRACE" => Token::OpenDelim(DelimToken::Brace), "RARROW" => Token::RArrow, - "LIT_STR" => Token::Literal(Lit::Str_(Name(0)), None), + "LIT_STR" => Token::Literal(Lit::Str_(keywords::Invalid.name()), None), "DOTDOT" => Token::DotDot, "MOD_SEP" => Token::ModSep, "DOTDOTDOT" => Token::DotDotDot, @@ -75,21 +75,22 @@ fn parse_token_list(file: &str) -> HashMap { "ANDAND" => Token::AndAnd, "AT" => Token::At, "LBRACKET" => Token::OpenDelim(DelimToken::Bracket), - "LIT_STR_RAW" => Token::Literal(Lit::StrRaw(Name(0), 0), None), + "LIT_STR_RAW" => Token::Literal(Lit::StrRaw(keywords::Invalid.name(), 0), None), "RPAREN" => Token::CloseDelim(DelimToken::Paren), "SLASH" => Token::BinOp(BinOpToken::Slash), "COMMA" => Token::Comma, - "LIFETIME" => Token::Lifetime(ast::Ident::with_empty_ctxt(Name(0))), + "LIFETIME" => Token::Lifetime( + ast::Ident::with_empty_ctxt(keywords::Invalid.name())), "CARET" => Token::BinOp(BinOpToken::Caret), "TILDE" => Token::Tilde, "IDENT" => id(), "PLUS" => Token::BinOp(BinOpToken::Plus), - "LIT_CHAR" => Token::Literal(Lit::Char(Name(0)), None), - "LIT_BYTE" => Token::Literal(Lit::Byte(Name(0)), None), + "LIT_CHAR" => Token::Literal(Lit::Char(keywords::Invalid.name()), None), + "LIT_BYTE" => Token::Literal(Lit::Byte(keywords::Invalid.name()), None), "EQ" => Token::Eq, "RBRACKET" => Token::CloseDelim(DelimToken::Bracket), "COMMENT" => Token::Comment, - "DOC_COMMENT" => Token::DocComment(Name(0)), + "DOC_COMMENT" => Token::DocComment(keywords::Invalid.name()), "DOT" => Token::Dot, "EQEQ" => Token::EqEq, "NE" => Token::Ne, @@ -99,9 +100,9 @@ fn parse_token_list(file: &str) -> HashMap { "BINOP" => Token::BinOp(BinOpToken::Plus), "POUND" => Token::Pound, "OROR" => Token::OrOr, - "LIT_INTEGER" => Token::Literal(Lit::Integer(Name(0)), None), + "LIT_INTEGER" => Token::Literal(Lit::Integer(keywords::Invalid.name()), None), "BINOPEQ" => Token::BinOpEq(BinOpToken::Plus), - "LIT_FLOAT" => Token::Literal(Lit::Float(Name(0)), None), + "LIT_FLOAT" => Token::Literal(Lit::Float(keywords::Invalid.name()), None), "WHITESPACE" => Token::Whitespace, "UNDERSCORE" => Token::Underscore, "MINUS" => Token::BinOp(BinOpToken::Minus), @@ -111,10 +112,11 @@ fn parse_token_list(file: &str) -> HashMap { "OR" => Token::BinOp(BinOpToken::Or), "GT" => Token::Gt, "LE" => Token::Le, - "LIT_BINARY" => Token::Literal(Lit::ByteStr(Name(0)), None), - "LIT_BINARY_RAW" => Token::Literal(Lit::ByteStrRaw(Name(0), 0), None), + "LIT_BINARY" => Token::Literal(Lit::ByteStr(keywords::Invalid.name()), None), + "LIT_BINARY_RAW" => Token::Literal( + Lit::ByteStrRaw(keywords::Invalid.name(), 0), None), "QUESTION" => Token::Question, - "SHEBANG" => Token::Shebang(Name(0)), + "SHEBANG" => Token::Shebang(keywords::Invalid.name()), _ => continue, }; @@ -158,7 +160,7 @@ fn fix(mut lit: &str) -> ast::Name { let leading_hashes = count(lit); // +1/-1 to adjust for single quotes - parse::token::intern(&lit[leading_hashes + 1..lit.len() - leading_hashes - 1]) + Symbol::intern(&lit[leading_hashes + 1..lit.len() - leading_hashes - 1]) } /// Assuming a char/byte literal, strip the 'b' prefix and the single quotes. @@ -168,7 +170,7 @@ fn fixchar(mut lit: &str) -> ast::Name { lit = &lit[1..]; } - parse::token::intern(&lit[1..lit.len() - 1]) + Symbol::intern(&lit[1..lit.len() - 1]) } fn count(lit: &str) -> usize { @@ -196,7 +198,7 @@ fn parse_antlr_token(s: &str, tokens: &HashMap, surrogate_ let not_found = format!("didn't find token {:?} in the map", toknum); let proto_tok = tokens.get(toknum).expect(¬_found[..]); - let nm = parse::token::intern(content); + let nm = Symbol::intern(content); debug!("What we got: content (`{}`), proto: {:?}", content, proto_tok); diff --git a/src/liballoc/boxed.rs b/src/liballoc/boxed.rs index 28f4dda1408..addb056f534 100644 --- a/src/liballoc/boxed.rs +++ b/src/liballoc/boxed.rs @@ -524,6 +524,9 @@ impl Iterator for Box { fn size_hint(&self) -> (usize, Option) { (**self).size_hint() } + fn nth(&mut self, n: usize) -> Option { + (**self).nth(n) + } } #[stable(feature = "rust1", since = "1.0.0")] impl DoubleEndedIterator for Box { @@ -532,7 +535,14 @@ impl DoubleEndedIterator for Box { } } #[stable(feature = "rust1", since = "1.0.0")] -impl ExactSizeIterator for Box {} +impl ExactSizeIterator for Box { + fn len(&self) -> usize { + (**self).len() + } + fn is_empty(&self) -> bool { + (**self).is_empty() + } +} #[unstable(feature = "fused", issue = "35602")] impl FusedIterator for Box {} diff --git a/src/liballoc/lib.rs b/src/liballoc/lib.rs index 0d450184ed8..f9dfdc0e075 100644 --- a/src/liballoc/lib.rs +++ b/src/liballoc/lib.rs @@ -74,11 +74,13 @@ #![feature(allocator)] #![feature(box_syntax)] +#![feature(cfg_target_has_atomic)] #![feature(coerce_unsized)] #![feature(const_fn)] #![feature(core_intrinsics)] #![feature(custom_attribute)] #![feature(dropck_parametricity)] +#![cfg_attr(not(test), feature(exact_size_is_empty))] #![feature(fundamental)] #![feature(lang_items)] #![feature(needs_allocator)] @@ -121,6 +123,7 @@ mod boxed { } #[cfg(test)] mod boxed_test; +#[cfg(target_has_atomic = "ptr")] pub mod arc; pub mod rc; pub mod raw_vec; diff --git a/src/liballoc/oom.rs b/src/liballoc/oom.rs index d355d59185e..3640156fec2 100644 --- a/src/liballoc/oom.rs +++ b/src/liballoc/oom.rs @@ -8,12 +8,10 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -use core::sync::atomic::{AtomicPtr, Ordering}; -use core::mem; +#[cfg(target_has_atomic = "ptr")] +pub use self::imp::set_oom_handler; use core::intrinsics; -static OOM_HANDLER: AtomicPtr<()> = AtomicPtr::new(default_oom_handler as *mut ()); - fn default_oom_handler() -> ! { // The default handler can't do much more since we can't assume the presence // of libc or any way of printing an error message. @@ -26,17 +24,38 @@ fn default_oom_handler() -> ! { #[unstable(feature = "oom", reason = "not a scrutinized interface", issue = "27700")] pub fn oom() -> ! { - let value = OOM_HANDLER.load(Ordering::SeqCst); - let handler: fn() -> ! = unsafe { mem::transmute(value) }; - handler(); + self::imp::oom() } -/// Set a custom handler for out-of-memory conditions -/// -/// To avoid recursive OOM failures, it is critical that the OOM handler does -/// not allocate any memory itself. -#[unstable(feature = "oom", reason = "not a scrutinized interface", - issue = "27700")] -pub fn set_oom_handler(handler: fn() -> !) { - OOM_HANDLER.store(handler as *mut (), Ordering::SeqCst); +#[cfg(target_has_atomic = "ptr")] +mod imp { + use core::mem; + use core::sync::atomic::{AtomicPtr, Ordering}; + + static OOM_HANDLER: AtomicPtr<()> = AtomicPtr::new(super::default_oom_handler as *mut ()); + + #[inline(always)] + pub fn oom() -> ! { + let value = OOM_HANDLER.load(Ordering::SeqCst); + let handler: fn() -> ! = unsafe { mem::transmute(value) }; + handler(); + } + + /// Set a custom handler for out-of-memory conditions + /// + /// To avoid recursive OOM failures, it is critical that the OOM handler does + /// not allocate any memory itself. + #[unstable(feature = "oom", reason = "not a scrutinized interface", + issue = "27700")] + pub fn set_oom_handler(handler: fn() -> !) { + OOM_HANDLER.store(handler as *mut (), Ordering::SeqCst); + } +} + +#[cfg(not(target_has_atomic = "ptr"))] +mod imp { + #[inline(always)] + pub fn oom() -> ! { + super::default_oom_handler() + } } diff --git a/src/liballoc/rc.rs b/src/liballoc/rc.rs index 8d863d7d9e9..d1e0e333b8f 100644 --- a/src/liballoc/rc.rs +++ b/src/liballoc/rc.rs @@ -12,35 +12,35 @@ //! Single-threaded reference-counting pointers. //! -//! The type [`Rc`][rc] provides shared ownership of a value of type `T`, -//! allocated in the heap. Invoking [`clone`][clone] on `Rc` produces a new -//! pointer to the same value in the heap. When the last `Rc` pointer to a +//! The type [`Rc`][`Rc`] provides shared ownership of a value of type `T`, +//! allocated in the heap. Invoking [`clone()`][clone] on [`Rc`] produces a new +//! pointer to the same value in the heap. When the last [`Rc`] pointer to a //! given value is destroyed, the pointed-to value is also destroyed. //! //! Shared references in Rust disallow mutation by default, and `Rc` is no -//! exception. If you need to mutate through an `Rc`, use [`Cell`][cell] or -//! [`RefCell`][refcell]. +//! exception. If you need to mutate through an [`Rc`], use [`Cell`] or +//! [`RefCell`]. //! -//! `Rc` uses non-atomic reference counting. This means that overhead is very -//! low, but an `Rc` cannot be sent between threads, and consequently `Rc` +//! [`Rc`] uses non-atomic reference counting. This means that overhead is very +//! low, but an [`Rc`] cannot be sent between threads, and consequently [`Rc`] //! does not implement [`Send`][send]. As a result, the Rust compiler -//! will check *at compile time* that you are not sending `Rc`s between +//! will check *at compile time* that you are not sending [`Rc`]s between //! threads. If you need multi-threaded, atomic reference counting, use //! [`sync::Arc`][arc]. //! -//! The [`downgrade`][downgrade] method can be used to create a non-owning -//! [`Weak`][weak] pointer. A `Weak` pointer can be [`upgrade`][upgrade]d -//! to an `Rc`, but this will return [`None`][option] if the value has +//! The [`downgrade()`][downgrade] method can be used to create a non-owning +//! [`Weak`] pointer. A [`Weak`] pointer can be [`upgrade`][upgrade]d +//! to an [`Rc`], but this will return [`None`] if the value has //! already been dropped. //! -//! A cycle between `Rc` pointers will never be deallocated. For this reason, -//! `Weak` is used to break cycles. For example, a tree could have strong -//! `Rc` pointers from parent nodes to children, and `Weak` pointers from +//! A cycle between [`Rc`] pointers will never be deallocated. For this reason, +//! [`Weak`] is used to break cycles. For example, a tree could have strong +//! [`Rc`] pointers from parent nodes to children, and [`Weak`] pointers from //! children back to their parents. //! -//! `Rc` automatically dereferences to `T` (via the [`Deref`][deref] trait), -//! so you can call `T`'s methods on a value of type `Rc`. To avoid name -//! clashes with `T`'s methods, the methods of `Rc` itself are [associated +//! `Rc` automatically dereferences to `T` (via the [`Deref`] trait), +//! so you can call `T`'s methods on a value of type [`Rc`][`Rc`]. To avoid name +//! clashes with `T`'s methods, the methods of [`Rc`][`Rc`] itself are [associated //! functions][assoc], called using function-like syntax: //! //! ``` @@ -50,28 +50,15 @@ //! Rc::downgrade(&my_rc); //! ``` //! -//! `Weak` does not auto-dereference to `T`, because the value may have +//! [`Weak`][`Weak`] does not auto-dereference to `T`, because the value may have //! already been destroyed. //! -//! [rc]: struct.Rc.html -//! [weak]: struct.Weak.html -//! [clone]: ../../std/clone/trait.Clone.html#tymethod.clone -//! [cell]: ../../std/cell/struct.Cell.html -//! [refcell]: ../../std/cell/struct.RefCell.html -//! [send]: ../../std/marker/trait.Send.html -//! [arc]: ../../std/sync/struct.Arc.html -//! [deref]: ../../std/ops/trait.Deref.html -//! [downgrade]: struct.Rc.html#method.downgrade -//! [upgrade]: struct.Weak.html#method.upgrade -//! [option]: ../../std/option/enum.Option.html -//! [assoc]: ../../book/method-syntax.html#associated-functions -//! //! # Examples //! //! Consider a scenario where a set of `Gadget`s are owned by a given `Owner`. //! We want to have our `Gadget`s point to their `Owner`. We can't do this with //! unique ownership, because more than one gadget may belong to the same -//! `Owner`. `Rc` allows us to share an `Owner` between multiple `Gadget`s, +//! `Owner`. [`Rc`] allows us to share an `Owner` between multiple `Gadget`s, //! and have the `Owner` remain allocated as long as any `Gadget` points at it. //! //! ``` @@ -127,20 +114,20 @@ //! ``` //! //! If our requirements change, and we also need to be able to traverse from -//! `Owner` to `Gadget`, we will run into problems. An `Rc` pointer from `Owner` +//! `Owner` to `Gadget`, we will run into problems. An [`Rc`] pointer from `Owner` //! to `Gadget` introduces a cycle between the values. This means that their //! reference counts can never reach 0, and the values will remain allocated -//! forever: a memory leak. In order to get around this, we can use `Weak` +//! forever: a memory leak. In order to get around this, we can use [`Weak`] //! pointers. //! //! Rust actually makes it somewhat difficult to produce this loop in the first //! place. In order to end up with two values that point at each other, one of -//! them needs to be mutable. This is difficult because `Rc` enforces +//! them needs to be mutable. This is difficult because [`Rc`] enforces //! memory safety by only giving out shared references to the value it wraps, //! and these don't allow direct mutation. We need to wrap the part of the -//! value we wish to mutate in a [`RefCell`][refcell], which provides *interior +//! value we wish to mutate in a [`RefCell`], which provides *interior //! mutability*: a method to achieve mutability through a shared reference. -//! `RefCell` enforces Rust's borrowing rules at runtime. +//! [`RefCell`] enforces Rust's borrowing rules at runtime. //! //! ``` //! use std::rc::Rc; @@ -214,6 +201,19 @@ //! // Gadget Man, so he gets destroyed as well. //! } //! ``` +//! +//! [`Rc`]: struct.Rc.html +//! [`Weak`]: struct.Weak.html +//! [clone]: ../../std/clone/trait.Clone.html#tymethod.clone +//! [`Cell`]: ../../std/cell/struct.Cell.html +//! [`RefCell`]: ../../std/cell/struct.RefCell.html +//! [send]: ../../std/marker/trait.Send.html +//! [arc]: ../../std/sync/struct.Arc.html +//! [`Deref`]: ../../std/ops/trait.Deref.html +//! [downgrade]: struct.Rc.html#method.downgrade +//! [upgrade]: struct.Weak.html#method.upgrade +//! [`None`]: ../../std/option/enum.Option.html#variant.None +//! [assoc]: ../../book/method-syntax.html#associated-functions #![stable(feature = "rust1", since = "1.0.0")] @@ -251,9 +251,11 @@ struct RcBox { /// See the [module-level documentation](./index.html) for more details. /// /// The inherent methods of `Rc` are all associated functions, which means -/// that you have to call them as e.g. `Rc::get_mut(&value)` instead of -/// `value.get_mut()`. This avoids conflicts with methods of the inner +/// that you have to call them as e.g. [`Rc::get_mut(&value)`][get_mut] instead of +/// `value.get_mut()`. This avoids conflicts with methods of the inner /// type `T`. +/// +/// [get_mut]: #method.get_mut #[stable(feature = "rust1", since = "1.0.0")] pub struct Rc { ptr: Shared>, @@ -337,10 +339,10 @@ impl Rc { } /// Checks whether [`Rc::try_unwrap`][try_unwrap] would return - /// [`Ok`][result]. + /// [`Ok`]. /// /// [try_unwrap]: struct.Rc.html#method.try_unwrap - /// [result]: ../../std/result/enum.Result.html + /// [`Ok`]: ../../std/result/enum.Result.html#variant.Ok /// /// # Examples /// @@ -543,14 +545,14 @@ impl Rc { /// Returns a mutable reference to the inner value, if there are /// no other `Rc` or [`Weak`][weak] pointers to the same value. /// - /// Returns [`None`][option] otherwise, because it is not safe to + /// Returns [`None`] otherwise, because it is not safe to /// mutate a shared value. /// /// See also [`make_mut`][make_mut], which will [`clone`][clone] /// the inner value when it's shared. /// /// [weak]: struct.Weak.html - /// [option]: ../../std/option/enum.Option.html + /// [`None`]: ../../std/option/enum.Option.html#variant.None /// [make_mut]: struct.Rc.html#method.make_mut /// [clone]: ../../std/clone/trait.Clone.html#tymethod.clone /// diff --git a/src/liballoc_jemalloc/build.rs b/src/liballoc_jemalloc/build.rs index 08a1f8ae8c6..fc849e7a50c 100644 --- a/src/liballoc_jemalloc/build.rs +++ b/src/liballoc_jemalloc/build.rs @@ -69,6 +69,7 @@ fn main() { .read_dir() .unwrap() .map(|e| e.unwrap()) + .filter(|e| &*e.file_name() != ".git") .collect::>(); while let Some(entry) = stack.pop() { let path = entry.path(); @@ -150,11 +151,17 @@ fn main() { cmd.arg(format!("--build={}", build_helper::gnu_target(&host))); run(&mut cmd); - run(Command::new("make") - .current_dir(&build_dir) - .arg("build_lib_static") - .arg("-j") - .arg(env::var("NUM_JOBS").expect("NUM_JOBS was not set"))); + let mut make = Command::new("make"); + make.current_dir(&build_dir) + .arg("build_lib_static"); + + // mingw make seems... buggy? unclear... + if !host.contains("windows") { + make.arg("-j") + .arg(env::var("NUM_JOBS").expect("NUM_JOBS was not set")); + } + + run(&mut make); if target.contains("windows") { println!("cargo:rustc-link-lib=static=jemalloc"); diff --git a/src/libcollections/Cargo.toml b/src/libcollections/Cargo.toml index 65d456e750f..ab882fde9c2 100644 --- a/src/libcollections/Cargo.toml +++ b/src/libcollections/Cargo.toml @@ -10,8 +10,12 @@ path = "lib.rs" [dependencies] alloc = { path = "../liballoc" } core = { path = "../libcore" } -rustc_unicode = { path = "../librustc_unicode" } +std_unicode = { path = "../libstd_unicode" } [[test]] name = "collectionstest" path = "../libcollectionstest/lib.rs" + +[[bench]] +name = "collectionstest" +path = "../libcollectionstest/lib.rs" diff --git a/src/libcollections/binary_heap.rs b/src/libcollections/binary_heap.rs index b4be8a43213..8d0c76c3646 100644 --- a/src/libcollections/binary_heap.rs +++ b/src/libcollections/binary_heap.rs @@ -986,7 +986,11 @@ impl<'a, T> DoubleEndedIterator for Iter<'a, T> { } #[stable(feature = "rust1", since = "1.0.0")] -impl<'a, T> ExactSizeIterator for Iter<'a, T> {} +impl<'a, T> ExactSizeIterator for Iter<'a, T> { + fn is_empty(&self) -> bool { + self.iter.is_empty() + } +} #[unstable(feature = "fused", issue = "35602")] impl<'a, T> FusedIterator for Iter<'a, T> {} @@ -1022,7 +1026,11 @@ impl DoubleEndedIterator for IntoIter { } #[stable(feature = "rust1", since = "1.0.0")] -impl ExactSizeIterator for IntoIter {} +impl ExactSizeIterator for IntoIter { + fn is_empty(&self) -> bool { + self.iter.is_empty() + } +} #[unstable(feature = "fused", issue = "35602")] impl FusedIterator for IntoIter {} @@ -1057,7 +1065,11 @@ impl<'a, T: 'a> DoubleEndedIterator for Drain<'a, T> { } #[stable(feature = "drain", since = "1.6.0")] -impl<'a, T: 'a> ExactSizeIterator for Drain<'a, T> {} +impl<'a, T: 'a> ExactSizeIterator for Drain<'a, T> { + fn is_empty(&self) -> bool { + self.iter.is_empty() + } +} #[unstable(feature = "fused", issue = "35602")] impl<'a, T: 'a> FusedIterator for Drain<'a, T> {} diff --git a/src/libcollections/enum_set.rs b/src/libcollections/enum_set.rs index 2d12b4ccffe..79e0021b148 100644 --- a/src/libcollections/enum_set.rs +++ b/src/libcollections/enum_set.rs @@ -16,7 +16,7 @@ #![unstable(feature = "enumset", reason = "matches collection reform specification, \ waiting for dust to settle", - issue = "0")] + issue = "37966")] use core::marker; use core::fmt; diff --git a/src/libcollections/lib.rs b/src/libcollections/lib.rs index 23d6edd6d79..68b067012d3 100644 --- a/src/libcollections/lib.rs +++ b/src/libcollections/lib.rs @@ -36,6 +36,7 @@ #![cfg_attr(not(test), feature(char_escape_debug))] #![feature(core_intrinsics)] #![feature(dropck_parametricity)] +#![feature(exact_size_is_empty)] #![feature(fmt_internals)] #![feature(fused)] #![feature(heap_api)] @@ -46,18 +47,19 @@ #![feature(placement_in)] #![feature(placement_new_protocol)] #![feature(shared)] +#![feature(slice_get_slice)] #![feature(slice_patterns)] #![feature(specialization)] #![feature(staged_api)] -#![feature(step_by)] #![feature(trusted_len)] #![feature(unicode)] #![feature(unique)] +#![feature(untagged_unions)] #![cfg_attr(test, feature(rand, test))] #![no_std] -extern crate rustc_unicode; +extern crate std_unicode; extern crate alloc; #[cfg(test)] diff --git a/src/libcollections/slice.rs b/src/libcollections/slice.rs index 75796cf94bf..5fb8cd6e1e2 100644 --- a/src/libcollections/slice.rs +++ b/src/libcollections/slice.rs @@ -98,8 +98,7 @@ #![cfg_attr(test, allow(unused_imports, dead_code))] use alloc::boxed::Box; -use core::cmp::Ordering::{self, Greater, Less}; -use core::cmp; +use core::cmp::Ordering::{self, Greater}; use core::mem::size_of; use core::mem; use core::ptr; @@ -118,6 +117,8 @@ pub use core::slice::{SplitMut, ChunksMut, Split}; pub use core::slice::{SplitN, RSplitN, SplitNMut, RSplitNMut}; #[stable(feature = "rust1", since = "1.0.0")] pub use core::slice::{from_raw_parts, from_raw_parts_mut}; +#[unstable(feature = "slice_get_slice", issue = "35729")] +pub use core::slice::SliceIndex; //////////////////////////////////////////////////////////////////////////////// // Basic slice extension methods @@ -353,7 +354,9 @@ impl [T] { /// ``` #[stable(feature = "rust1", since = "1.0.0")] #[inline] - pub fn get(&self, index: usize) -> Option<&T> { + pub fn get(&self, index: I) -> Option<&I::Output> + where I: SliceIndex + { core_slice::SliceExt::get(self, index) } @@ -372,7 +375,9 @@ impl [T] { /// or `None` if the index is out of bounds #[stable(feature = "rust1", since = "1.0.0")] #[inline] - pub fn get_mut(&mut self, index: usize) -> Option<&mut T> { + pub fn get_mut(&mut self, index: I) -> Option<&mut I::Output> + where I: SliceIndex + { core_slice::SliceExt::get_mut(self, index) } @@ -390,7 +395,9 @@ impl [T] { /// ``` #[stable(feature = "rust1", since = "1.0.0")] #[inline] - pub unsafe fn get_unchecked(&self, index: usize) -> &T { + pub unsafe fn get_unchecked(&self, index: I) -> &I::Output + where I: SliceIndex + { core_slice::SliceExt::get_unchecked(self, index) } @@ -410,7 +417,9 @@ impl [T] { /// ``` #[stable(feature = "rust1", since = "1.0.0")] #[inline] - pub unsafe fn get_unchecked_mut(&mut self, index: usize) -> &mut T { + pub unsafe fn get_unchecked_mut(&mut self, index: I) -> &mut I::Output + where I: SliceIndex + { core_slice::SliceExt::get_unchecked_mut(self, index) } @@ -1032,8 +1041,8 @@ impl [T] { /// This is equivalent to `self.sort_by(|a, b| a.cmp(b))`. /// - /// This sort is stable and `O(n log n)` worst-case but allocates - /// approximately `2 * n` where `n` is the length of `self`. + /// This sort is stable and `O(n log n)` worst-case, but allocates + /// temporary storage half the size of `self`. /// /// # Examples /// @@ -1054,8 +1063,8 @@ impl [T] { /// Sorts the slice, in place, using `f` to extract a key by which to /// order the sort by. /// - /// This sort is stable and `O(n log n)` worst-case but allocates - /// approximately `2 * n`, where `n` is the length of `self`. + /// This sort is stable and `O(n log n)` worst-case, but allocates + /// temporary storage half the size of `self`. /// /// # Examples /// @@ -1076,8 +1085,8 @@ impl [T] { /// Sorts the slice, in place, using `compare` to compare /// elements. /// - /// This sort is stable and `O(n log n)` worst-case but allocates - /// approximately `2 * n`, where `n` is the length of `self`. + /// This sort is stable and `O(n log n)` worst-case, but allocates + /// temporary storage half the size of `self`. /// /// # Examples /// @@ -1295,213 +1304,333 @@ impl ToOwned for [T] { // Sorting //////////////////////////////////////////////////////////////////////////////// -fn insertion_sort(v: &mut [T], mut compare: F) +/// Inserts `v[0]` into pre-sorted sequence `v[1..]` so that whole `v[..]` becomes sorted. +/// +/// This is the integral subroutine of insertion sort. +fn insert_head(v: &mut [T], compare: &mut F) where F: FnMut(&T, &T) -> Ordering { - let len = v.len() as isize; - let buf_v = v.as_mut_ptr(); - - // 1 <= i < len; - for i in 1..len { - // j satisfies: 0 <= j <= i; - let mut j = i; + if v.len() >= 2 && compare(&v[0], &v[1]) == Greater { unsafe { - // `i` is in bounds. - let read_ptr = buf_v.offset(i) as *const T; + // There are three ways to implement insertion here: + // + // 1. Swap adjacent elements until the first one gets to its final destination. + // However, this way we copy data around more than is necessary. If elements are big + // structures (costly to copy), this method will be slow. + // + // 2. Iterate until the right place for the first element is found. Then shift the + // elements succeeding it to make room for it and finally place it into the + // remaining hole. This is a good method. + // + // 3. Copy the first element into a temporary variable. Iterate until the right place + // for it is found. As we go along, copy every traversed element into the slot + // preceding it. Finally, copy data from the temporary variable into the remaining + // hole. This method is very good. Benchmarks demonstrated slightly better + // performance than with the 2nd method. + // + // All methods were benchmarked, and the 3rd showed best results. So we chose that one. + let mut tmp = NoDrop { value: ptr::read(&v[0]) }; - // find where to insert, we need to do strict <, - // rather than <=, to maintain stability. + // Intermediate state of the insertion process is always tracked by `hole`, which + // serves two purposes: + // 1. Protects integrity of `v` from panics in `compare`. + // 2. Fills the remaining hole in `v` in the end. + // + // Panic safety: + // + // If `compare` panics at any point during the process, `hole` will get dropped and + // fill the hole in `v` with `tmp`, thus ensuring that `v` still holds every object it + // initially held exactly once. + let mut hole = InsertionHole { + src: &mut tmp.value, + dest: &mut v[1], + }; + ptr::copy_nonoverlapping(&v[1], &mut v[0], 1); - // 0 <= j - 1 < len, so .offset(j - 1) is in bounds. - while j > 0 && compare(&*read_ptr, &*buf_v.offset(j - 1)) == Less { - j -= 1; + for i in 2..v.len() { + if compare(&tmp.value, &v[i]) != Greater { + break; + } + ptr::copy_nonoverlapping(&v[i], &mut v[i - 1], 1); + hole.dest = &mut v[i]; } + // `hole` gets dropped and thus copies `tmp` into the remaining hole in `v`. + } + } - // shift everything to the right, to make space to - // insert this value. + // Holds a value, but never drops it. + #[allow(unions_with_drop_fields)] + union NoDrop { + value: T + } - // j + 1 could be `len` (for the last `i`), but in - // that case, `i == j` so we don't copy. The - // `.offset(j)` is always in bounds. + // When dropped, copies from `src` into `dest`. + struct InsertionHole { + src: *mut T, + dest: *mut T, + } - if i != j { - let tmp = ptr::read(read_ptr); - ptr::copy(&*buf_v.offset(j), buf_v.offset(j + 1), (i - j) as usize); - ptr::copy_nonoverlapping(&tmp, buf_v.offset(j), 1); - mem::forget(tmp); - } + impl Drop for InsertionHole { + fn drop(&mut self) { + unsafe { ptr::copy_nonoverlapping(self.src, self.dest, 1); } } } } -fn merge_sort(v: &mut [T], mut compare: F) +/// Merges non-decreasing runs `v[..mid]` and `v[mid..]` using `buf` as temporary storage, and +/// stores the result into `v[..]`. +/// +/// # Safety +/// +/// The two slices must be non-empty and `mid` must be in bounds. Buffer `buf` must be long enough +/// to hold a copy of the shorter slice. Also, `T` must not be a zero-sized type. +unsafe fn merge(v: &mut [T], mid: usize, buf: *mut T, compare: &mut F) where F: FnMut(&T, &T) -> Ordering { - // warning: this wildly uses unsafe. - const BASE_INSERTION: usize = 32; - const LARGE_INSERTION: usize = 16; - - // FIXME #12092: smaller insertion runs seems to make sorting - // vectors of large elements a little faster on some platforms, - // but hasn't been tested/tuned extensively - let insertion = if size_of::() <= 16 { - BASE_INSERTION - } else { - LARGE_INSERTION - }; - let len = v.len(); + let v = v.as_mut_ptr(); + let v_mid = v.offset(mid as isize); + let v_end = v.offset(len as isize); - // short vectors get sorted in-place via insertion sort to avoid allocations - if len <= insertion { - insertion_sort(v, compare); - return; - } + // The merge process first copies the shorter run into `buf`. Then it traces the newly copied + // run and the longer run forwards (or backwards), comparing their next unconsumed elements and + // copying the lesser (or greater) one into `v`. + // + // As soon as the shorter run is fully consumed, the process is done. If the longer run gets + // consumed first, then we must copy whatever is left of the shorter run into the remaining + // hole in `v`. + // + // Intermediate state of the process is always tracked by `hole`, which serves two purposes: + // 1. Protects integrity of `v` from panics in `compare`. + // 2. Fills the remaining hole in `v` if the longer run gets consumed first. + // + // Panic safety: + // + // If `compare` panics at any point during the process, `hole` will get dropped and fill the + // hole in `v` with the unconsumed range in `buf`, thus ensuring that `v` still holds every + // object it initially held exactly once. + let mut hole; - // allocate some memory to use as scratch memory, we keep the - // length 0 so we can keep shallow copies of the contents of `v` - // without risking the dtors running on an object twice if - // `compare` panics. - let mut working_space = Vec::with_capacity(2 * len); - // these both are buffers of length `len`. - let mut buf_dat = working_space.as_mut_ptr(); - let mut buf_tmp = unsafe { buf_dat.offset(len as isize) }; + if mid <= len - mid { + // The left run is shorter. + ptr::copy_nonoverlapping(v, buf, mid); + hole = MergeHole { + start: buf, + end: buf.offset(mid as isize), + dest: v, + }; - // length `len`. - let buf_v = v.as_ptr(); + // Initially, these pointers point to the beginnings of their arrays. + let left = &mut hole.start; + let mut right = v_mid; + let out = &mut hole.dest; - // step 1. sort short runs with insertion sort. This takes the - // values from `v` and sorts them into `buf_dat`, leaving that - // with sorted runs of length INSERTION. + while *left < hole.end && right < v_end { + // Consume the lesser side. + // If equal, prefer the left run to maintain stability. + let to_copy = if compare(&**left, &*right) == Greater { + get_and_increment(&mut right) + } else { + get_and_increment(left) + }; + ptr::copy_nonoverlapping(to_copy, get_and_increment(out), 1); + } + } else { + // The right run is shorter. + ptr::copy_nonoverlapping(v_mid, buf, len - mid); + hole = MergeHole { + start: buf, + end: buf.offset((len - mid) as isize), + dest: v_mid, + }; - // We could hardcode the sorting comparisons here, and we could - // manipulate/step the pointers themselves, rather than repeatedly - // .offset-ing. - for start in (0..len).step_by(insertion) { - // start <= i < len; - for i in start..cmp::min(start + insertion, len) { - // j satisfies: start <= j <= i; - let mut j = i as isize; - unsafe { - // `i` is in bounds. - let read_ptr = buf_v.offset(i as isize); + // Initially, these pointers point past the ends of their arrays. + let left = &mut hole.dest; + let right = &mut hole.end; + let mut out = v_end; - // find where to insert, we need to do strict <, - // rather than <=, to maintain stability. - - // start <= j - 1 < len, so .offset(j - 1) is in - // bounds. - while j > start as isize && compare(&*read_ptr, &*buf_dat.offset(j - 1)) == Less { - j -= 1; - } - - // shift everything to the right, to make space to - // insert this value. - - // j + 1 could be `len` (for the last `i`), but in - // that case, `i == j` so we don't copy. The - // `.offset(j)` is always in bounds. - ptr::copy(&*buf_dat.offset(j), buf_dat.offset(j + 1), i - j as usize); - ptr::copy_nonoverlapping(read_ptr, buf_dat.offset(j), 1); - } + while v < *left && buf < *right { + // Consume the greater side. + // If equal, prefer the right run to maintain stability. + let to_copy = if compare(&*left.offset(-1), &*right.offset(-1)) == Greater { + decrement_and_get(left) + } else { + decrement_and_get(right) + }; + ptr::copy_nonoverlapping(to_copy, decrement_and_get(&mut out), 1); } } + // Finally, `hole` gets dropped. If the shorter run was not fully consumed, whatever remains of + // it will now be copied into the hole in `v`. - // step 2. merge the sorted runs. - let mut width = insertion; - while width < len { - // merge the sorted runs of length `width` in `buf_dat` two at - // a time, placing the result in `buf_tmp`. - - // 0 <= start <= len. - for start in (0..len).step_by(2 * width) { - // manipulate pointers directly for speed (rather than - // using a `for` loop with `range` and `.offset` inside - // that loop). - unsafe { - // the end of the first run & start of the - // second. Offset of `len` is defined, since this is - // precisely one byte past the end of the object. - let right_start = buf_dat.offset(cmp::min(start + width, len) as isize); - // end of the second. Similar reasoning to the above re safety. - let right_end_idx = cmp::min(start + 2 * width, len); - let right_end = buf_dat.offset(right_end_idx as isize); - - // the pointers to the elements under consideration - // from the two runs. - - // both of these are in bounds. - let mut left = buf_dat.offset(start as isize); - let mut right = right_start; - - // where we're putting the results, it is a run of - // length `2*width`, so we step it once for each step - // of either `left` or `right`. `buf_tmp` has length - // `len`, so these are in bounds. - let mut out = buf_tmp.offset(start as isize); - let out_end = buf_tmp.offset(right_end_idx as isize); - - // If left[last] <= right[0], they are already in order: - // fast-forward the left side (the right side is handled - // in the loop). - // If `right` is not empty then left is not empty, and - // the offsets are in bounds. - if right != right_end && compare(&*right.offset(-1), &*right) != Greater { - let elems = (right_start as usize - left as usize) / mem::size_of::(); - ptr::copy_nonoverlapping(&*left, out, elems); - out = out.offset(elems as isize); - left = right_start; - } - - while out < out_end { - // Either the left or the right run are exhausted, - // so just copy the remainder from the other run - // and move on; this gives a huge speed-up (order - // of 25%) for mostly sorted vectors (the best - // case). - if left == right_start { - // the number remaining in this run. - let elems = (right_end as usize - right as usize) / mem::size_of::(); - ptr::copy_nonoverlapping(&*right, out, elems); - break; - } else if right == right_end { - let elems = (right_start as usize - left as usize) / mem::size_of::(); - ptr::copy_nonoverlapping(&*left, out, elems); - break; - } - - // check which side is smaller, and that's the - // next element for the new run. - - // `left < right_start` and `right < right_end`, - // so these are valid. - let to_copy = if compare(&*left, &*right) == Greater { - step(&mut right) - } else { - step(&mut left) - }; - ptr::copy_nonoverlapping(&*to_copy, out, 1); - step(&mut out); - } - } - } - - mem::swap(&mut buf_dat, &mut buf_tmp); - - width *= 2; - } - - // write the result to `v` in one go, so that there are never two copies - // of the same object in `v`. - unsafe { - ptr::copy_nonoverlapping(&*buf_dat, v.as_mut_ptr(), len); - } - - // increment the pointer, returning the old pointer. - #[inline(always)] - unsafe fn step(ptr: &mut *mut T) -> *mut T { + unsafe fn get_and_increment(ptr: &mut *mut T) -> *mut T { let old = *ptr; *ptr = ptr.offset(1); old } + + unsafe fn decrement_and_get(ptr: &mut *mut T) -> *mut T { + *ptr = ptr.offset(-1); + *ptr + } + + // When dropped, copies the range `start..end` into `dest..`. + struct MergeHole { + start: *mut T, + end: *mut T, + dest: *mut T, + } + + impl Drop for MergeHole { + fn drop(&mut self) { + // `T` is not a zero-sized type, so it's okay to divide by it's size. + let len = (self.end as usize - self.start as usize) / mem::size_of::(); + unsafe { ptr::copy_nonoverlapping(self.start, self.dest, len); } + } + } +} + +/// This merge sort borrows some (but not all) ideas from TimSort, which is described in detail +/// [here](http://svn.python.org/projects/python/trunk/Objects/listsort.txt). +/// +/// The algorithm identifies strictly descending and non-descending subsequences, which are called +/// natural runs. There is a stack of pending runs yet to be merged. Each newly found run is pushed +/// onto the stack, and then some pairs of adjacent runs are merged until these two invariants are +/// satisfied, for every `i` in `0 .. runs.len() - 2`: +/// +/// 1. `runs[i].len > runs[i + 1].len` +/// 2. `runs[i].len > runs[i + 1].len + runs[i + 2].len` +/// +/// The invariants ensure that the total running time is `O(n log n)` worst-case. +fn merge_sort(v: &mut [T], mut compare: F) + where F: FnMut(&T, &T) -> Ordering +{ + // Sorting has no meaningful behavior on zero-sized types. + if size_of::() == 0 { + return; + } + + // FIXME #12092: These numbers are platform-specific and need more extensive testing/tuning. + // + // If `v` has length up to `insertion_len`, simply switch to insertion sort because it is going + // to perform better than merge sort. For bigger types `T`, the threshold is smaller. + // + // Short runs are extended using insertion sort to span at least `min_run` elements, in order + // to improve performance. + let (max_insertion, min_run) = if size_of::() <= 16 { + (64, 32) + } else { + (32, 16) + }; + + let len = v.len(); + + // Short arrays get sorted in-place via insertion sort to avoid allocations. + if len <= max_insertion { + if len >= 2 { + for i in (0..len-1).rev() { + insert_head(&mut v[i..], &mut compare); + } + } + return; + } + + // Allocate a buffer to use as scratch memory. We keep the length 0 so we can keep in it + // shallow copies of the contents of `v` without risking the dtors running on copies if + // `compare` panics. When merging two sorted runs, this buffer holds a copy of the shorter run, + // which will always have length at most `len / 2`. + let mut buf = Vec::with_capacity(len / 2); + + // In order to identify natural runs in `v`, we traverse it backwards. That might seem like a + // strange decision, but consider the fact that merges more often go in the opposite direction + // (forwards). According to benchmarks, merging forwards is slightly faster than merging + // backwards. To conclude, identifying runs by traversing backwards improves performance. + let mut runs = vec![]; + let mut end = len; + while end > 0 { + // Find the next natural run, and reverse it if it's strictly descending. + let mut start = end - 1; + if start > 0 { + start -= 1; + if compare(&v[start], &v[start + 1]) == Greater { + while start > 0 && compare(&v[start - 1], &v[start]) == Greater { + start -= 1; + } + v[start..end].reverse(); + } else { + while start > 0 && compare(&v[start - 1], &v[start]) != Greater { + start -= 1; + } + } + } + + // Insert some more elements into the run if it's too short. Insertion sort is faster than + // merge sort on short sequences, so this significantly improves performance. + while start > 0 && end - start < min_run { + start -= 1; + insert_head(&mut v[start..end], &mut compare); + } + + // Push this run onto the stack. + runs.push(Run { + start: start, + len: end - start, + }); + end = start; + + // Merge some pairs of adjacent runs to satisfy the invariants. + while let Some(r) = collapse(&runs) { + let left = runs[r + 1]; + let right = runs[r]; + unsafe { + merge(&mut v[left.start .. right.start + right.len], left.len, buf.as_mut_ptr(), + &mut compare); + } + runs[r] = Run { + start: left.start, + len: left.len + right.len, + }; + runs.remove(r + 1); + } + } + + // Finally, exactly one run must remain in the stack. + debug_assert!(runs.len() == 1 && runs[0].start == 0 && runs[0].len == len); + + // Examines the stack of runs and identifies the next pair of runs to merge. More specifically, + // if `Some(r)` is returned, that means `runs[r]` and `runs[r + 1]` must be merged next. If the + // algorithm should continue building a new run instead, `None` is returned. + // + // TimSort is infamous for it's buggy implementations, as described here: + // http://envisage-project.eu/timsort-specification-and-verification/ + // + // The gist of the story is: we must enforce the invariants on the top four runs on the stack. + // Enforcing them on just top three is not sufficient to ensure that the invariants will still + // hold for *all* runs in the stack. + // + // This function correctly checks invariants for the top four runs. Additionally, if the top + // run starts at index 0, it will always demand a merge operation until the stack is fully + // collapsed, in order to complete the sort. + #[inline] + fn collapse(runs: &[Run]) -> Option { + let n = runs.len(); + if n >= 2 && (runs[n - 1].start == 0 || + runs[n - 2].len <= runs[n - 1].len || + (n >= 3 && runs[n - 3].len <= runs[n - 2].len + runs[n - 1].len) || + (n >= 4 && runs[n - 4].len <= runs[n - 3].len + runs[n - 2].len)) { + if n >= 3 && runs[n - 3].len < runs[n - 1].len { + Some(n - 3) + } else { + Some(n - 2) + } + } else { + None + } + } + + #[derive(Clone, Copy)] + struct Run { + start: usize, + len: usize, + } } diff --git a/src/libcollections/str.rs b/src/libcollections/str.rs index 48a74bdecbb..d4be0914f15 100644 --- a/src/libcollections/str.rs +++ b/src/libcollections/str.rs @@ -24,12 +24,12 @@ use core::str::pattern::Pattern; use core::str::pattern::{Searcher, ReverseSearcher, DoubleEndedSearcher}; use core::mem; use core::iter::FusedIterator; -use rustc_unicode::str::{UnicodeStr, Utf16Encoder}; +use std_unicode::str::{UnicodeStr, Utf16Encoder}; use vec_deque::VecDeque; use borrow::{Borrow, ToOwned}; use string::String; -use rustc_unicode; +use std_unicode; use vec::Vec; use slice::SliceConcatExt; use boxed::Box; @@ -54,7 +54,7 @@ pub use core::str::{from_utf8, Chars, CharIndices, Bytes}; #[stable(feature = "rust1", since = "1.0.0")] pub use core::str::{from_utf8_unchecked, ParseBoolError}; #[stable(feature = "rust1", since = "1.0.0")] -pub use rustc_unicode::str::SplitWhitespace; +pub use std_unicode::str::SplitWhitespace; #[stable(feature = "rust1", since = "1.0.0")] pub use core::str::pattern; @@ -1705,7 +1705,7 @@ impl str { } fn case_ignoreable_then_cased>(iter: I) -> bool { - use rustc_unicode::derived_property::{Cased, Case_Ignorable}; + use std_unicode::derived_property::{Cased, Case_Ignorable}; match iter.skip_while(|&c| Case_Ignorable(c)).next() { Some(c) => Cased(c), None => false, diff --git a/src/libcollections/string.rs b/src/libcollections/string.rs index 348eb6fb5ff..b4c41a99a6b 100644 --- a/src/libcollections/string.rs +++ b/src/libcollections/string.rs @@ -63,8 +63,8 @@ use core::mem; use core::ops::{self, Add, AddAssign, Index, IndexMut}; use core::ptr; use core::str::pattern::Pattern; -use rustc_unicode::char::{decode_utf16, REPLACEMENT_CHARACTER}; -use rustc_unicode::str as unicode_str; +use std_unicode::char::{decode_utf16, REPLACEMENT_CHARACTER}; +use std_unicode::str as unicode_str; use borrow::{Cow, ToOwned}; use range::RangeArgument; @@ -1129,8 +1129,6 @@ impl String { #[inline] #[stable(feature = "rust1", since = "1.0.0")] pub fn insert(&mut self, idx: usize, ch: char) { - let len = self.len(); - assert!(idx <= len); assert!(self.is_char_boundary(idx)); let mut bits = [0; 4]; let bits = ch.encode_utf8(&mut bits).as_bytes(); @@ -1184,7 +1182,6 @@ impl String { reason = "recent addition", issue = "35553")] pub fn insert_str(&mut self, idx: usize, string: &str) { - assert!(idx <= self.len()); assert!(self.is_char_boundary(idx)); unsafe { @@ -1260,6 +1257,38 @@ impl String { self.len() == 0 } + /// Divide one string into two at an index. + /// + /// The argument, `mid`, should be a byte offset from the start of the string. It must also + /// be on the boundary of a UTF-8 code point. + /// + /// The two strings returned go from the start of the string to `mid`, and from `mid` to the end + /// of the string. + /// + /// # Panics + /// + /// Panics if `mid` is not on a `UTF-8` code point boundary, or if it is beyond the last + /// code point of the string. + /// + /// # Examples + /// + /// ``` + /// # #![feature(string_split_off)] + /// # fn main() { + /// let mut hello = String::from("Hello, World!"); + /// let world = hello.split_off(7); + /// assert_eq!(hello, "Hello, "); + /// assert_eq!(world, "World!"); + /// # } + /// ``` + #[inline] + #[unstable(feature = "string_split_off", issue = "38080")] + pub fn split_off(&mut self, mid: usize) -> String { + assert!(self.is_char_boundary(mid)); + let other = self.vec.split_off(mid); + unsafe { String::from_utf8_unchecked(other) } + } + /// Truncates this `String`, removing all contents. /// /// While this means the `String` will have a length of zero, it does not diff --git a/src/libcollections/vec.rs b/src/libcollections/vec.rs index 24f8e3a2d91..c9f9e513ef3 100644 --- a/src/libcollections/vec.rs +++ b/src/libcollections/vec.rs @@ -1244,7 +1244,7 @@ impl Vec { /// ``` #[stable(feature = "vec_extend_from_slice", since = "1.6.0")] pub fn extend_from_slice(&mut self, other: &[T]) { - self.extend(other.iter().cloned()) + self.spec_extend(other.iter()) } } @@ -1499,7 +1499,7 @@ impl ops::DerefMut for Vec { impl FromIterator for Vec { #[inline] fn from_iter>(iter: I) -> Vec { - >::from_iter(iter.into_iter()) + >::from_iter(iter.into_iter()) } } @@ -1572,12 +1572,12 @@ impl Extend for Vec { } // Specialization trait used for Vec::from_iter and Vec::extend -trait SpecExtend { +trait SpecExtend { fn from_iter(iter: I) -> Self; fn spec_extend(&mut self, iter: I); } -impl SpecExtend for Vec +impl SpecExtend for Vec where I: Iterator, { default fn from_iter(mut iterator: I) -> Self { @@ -1607,7 +1607,7 @@ impl SpecExtend for Vec } } -impl SpecExtend for Vec +impl SpecExtend for Vec where I: TrustedLen, { fn from_iter(iterator: I) -> Self { @@ -1642,6 +1642,33 @@ impl SpecExtend for Vec } } +impl<'a, T: 'a, I> SpecExtend<&'a T, I> for Vec + where I: Iterator, + T: Clone, +{ + default fn from_iter(iterator: I) -> Self { + SpecExtend::from_iter(iterator.cloned()) + } + + default fn spec_extend(&mut self, iterator: I) { + self.spec_extend(iterator.cloned()) + } +} + +impl<'a, T: 'a> SpecExtend<&'a T, slice::Iter<'a, T>> for Vec + where T: Copy, +{ + fn spec_extend(&mut self, iterator: slice::Iter<'a, T>) { + let slice = iterator.as_slice(); + self.reserve(slice.len()); + unsafe { + let len = self.len(); + self.set_len(len + slice.len()); + self.get_unchecked_mut(len..).copy_from_slice(slice); + } + } +} + impl Vec { fn extend_desugared>(&mut self, mut iterator: I) { // This is the case for a general iterator. @@ -1669,7 +1696,7 @@ impl Vec { #[stable(feature = "extend_ref", since = "1.2.0")] impl<'a, T: 'a + Copy> Extend<&'a T> for Vec { fn extend>(&mut self, iter: I) { - self.extend(iter.into_iter().map(|&x| x)) + self.spec_extend(iter.into_iter()) } } @@ -1988,7 +2015,11 @@ impl DoubleEndedIterator for IntoIter { } #[stable(feature = "rust1", since = "1.0.0")] -impl ExactSizeIterator for IntoIter {} +impl ExactSizeIterator for IntoIter { + fn is_empty(&self) -> bool { + self.ptr == self.end + } +} #[unstable(feature = "fused", issue = "35602")] impl FusedIterator for IntoIter {} @@ -2082,7 +2113,11 @@ impl<'a, T> Drop for Drain<'a, T> { #[stable(feature = "drain", since = "1.6.0")] -impl<'a, T> ExactSizeIterator for Drain<'a, T> {} +impl<'a, T> ExactSizeIterator for Drain<'a, T> { + fn is_empty(&self) -> bool { + self.iter.is_empty() + } +} #[unstable(feature = "fused", issue = "35602")] impl<'a, T> FusedIterator for Drain<'a, T> {} diff --git a/src/libcollections/vec_deque.rs b/src/libcollections/vec_deque.rs index 5397193cab4..dbe3fec205c 100644 --- a/src/libcollections/vec_deque.rs +++ b/src/libcollections/vec_deque.rs @@ -810,7 +810,7 @@ impl VecDeque { /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn is_empty(&self) -> bool { - self.len() == 0 + self.tail == self.head } /// Create a draining iterator that removes the specified range in the @@ -1916,7 +1916,11 @@ impl<'a, T> DoubleEndedIterator for Iter<'a, T> { } #[stable(feature = "rust1", since = "1.0.0")] -impl<'a, T> ExactSizeIterator for Iter<'a, T> {} +impl<'a, T> ExactSizeIterator for Iter<'a, T> { + fn is_empty(&self) -> bool { + self.head == self.tail + } +} #[unstable(feature = "fused", issue = "35602")] impl<'a, T> FusedIterator for Iter<'a, T> {} @@ -1980,7 +1984,11 @@ impl<'a, T> DoubleEndedIterator for IterMut<'a, T> { } #[stable(feature = "rust1", since = "1.0.0")] -impl<'a, T> ExactSizeIterator for IterMut<'a, T> {} +impl<'a, T> ExactSizeIterator for IterMut<'a, T> { + fn is_empty(&self) -> bool { + self.head == self.tail + } +} #[unstable(feature = "fused", issue = "35602")] impl<'a, T> FusedIterator for IterMut<'a, T> {} @@ -2017,7 +2025,11 @@ impl DoubleEndedIterator for IntoIter { } #[stable(feature = "rust1", since = "1.0.0")] -impl ExactSizeIterator for IntoIter {} +impl ExactSizeIterator for IntoIter { + fn is_empty(&self) -> bool { + self.inner.is_empty() + } +} #[unstable(feature = "fused", issue = "35602")] impl FusedIterator for IntoIter {} diff --git a/src/libcollectionstest/lib.rs b/src/libcollectionstest/lib.rs index 14ec8d58bef..0fe0a1bad64 100644 --- a/src/libcollectionstest/lib.rs +++ b/src/libcollectionstest/lib.rs @@ -18,12 +18,14 @@ #![feature(const_fn)] #![feature(dedup_by)] #![feature(enumset)] +#![feature(exact_size_is_empty)] #![feature(pattern)] #![feature(rand)] #![feature(repeat_str)] #![feature(step_by)] #![feature(str_escape)] #![feature(str_replacen)] +#![feature(string_split_off)] #![feature(test)] #![feature(unboxed_closures)] #![feature(unicode)] @@ -31,7 +33,7 @@ extern crate collections; extern crate test; -extern crate rustc_unicode; +extern crate std_unicode; use std::hash::{Hash, Hasher}; use std::collections::hash_map::DefaultHasher; diff --git a/src/libcollectionstest/slice.rs b/src/libcollectionstest/slice.rs index a6230ef471c..1b52214dee6 100644 --- a/src/libcollectionstest/slice.rs +++ b/src/libcollectionstest/slice.rs @@ -383,7 +383,7 @@ fn test_reverse() { #[test] fn test_sort() { - for len in 4..25 { + for len in (2..25).chain(500..510) { for _ in 0..100 { let mut v: Vec<_> = thread_rng().gen_iter::().take(len).collect(); let mut v1 = v.clone(); @@ -410,7 +410,7 @@ fn test_sort() { #[test] fn test_sort_stability() { - for len in 4..25 { + for len in (2..25).chain(500..510) { for _ in 0..10 { let mut counts = [0; 10]; @@ -441,6 +441,13 @@ fn test_sort_stability() { } } +#[test] +fn test_sort_zero_sized_type() { + // Should not panic. + [(); 10].sort(); + [(); 100].sort(); +} + #[test] fn test_concat() { let v: [Vec; 0] = []; @@ -633,6 +640,16 @@ fn test_iter_clone() { assert_eq!(it.next(), jt.next()); } +#[test] +fn test_iter_is_empty() { + let xs = [1, 2, 5, 10, 11]; + for i in 0..xs.len() { + for j in i..xs.len() { + assert_eq!(xs[i..j].iter().is_empty(), xs[i..j].is_empty()); + } + } +} + #[test] fn test_mut_iterator() { let mut xs = [1, 2, 3, 4, 5]; @@ -1328,89 +1345,104 @@ mod bench { }) } - #[bench] - fn sort_random_small(b: &mut Bencher) { + fn gen_ascending(len: usize) -> Vec { + (0..len as u64).collect() + } + + fn gen_descending(len: usize) -> Vec { + (0..len as u64).rev().collect() + } + + fn gen_random(len: usize) -> Vec { let mut rng = thread_rng(); - b.iter(|| { - let mut v: Vec<_> = rng.gen_iter::().take(5).collect(); - v.sort(); - }); - b.bytes = 5 * mem::size_of::() as u64; + rng.gen_iter::().take(len).collect() } - #[bench] - fn sort_random_medium(b: &mut Bencher) { + fn gen_mostly_ascending(len: usize) -> Vec { let mut rng = thread_rng(); - b.iter(|| { - let mut v: Vec<_> = rng.gen_iter::().take(100).collect(); - v.sort(); - }); - b.bytes = 100 * mem::size_of::() as u64; + let mut v = gen_ascending(len); + for _ in (0usize..).take_while(|x| x * x <= len) { + let x = rng.gen::() % len; + let y = rng.gen::() % len; + v.swap(x, y); + } + v } - #[bench] - fn sort_random_large(b: &mut Bencher) { + fn gen_mostly_descending(len: usize) -> Vec { let mut rng = thread_rng(); - b.iter(|| { - let mut v: Vec<_> = rng.gen_iter::().take(10000).collect(); - v.sort(); - }); - b.bytes = 10000 * mem::size_of::() as u64; + let mut v = gen_descending(len); + for _ in (0usize..).take_while(|x| x * x <= len) { + let x = rng.gen::() % len; + let y = rng.gen::() % len; + v.swap(x, y); + } + v } - #[bench] - fn sort_sorted(b: &mut Bencher) { - let mut v: Vec<_> = (0..10000).collect(); - b.iter(|| { - v.sort(); - }); - b.bytes = (v.len() * mem::size_of_val(&v[0])) as u64; - } - - type BigSortable = (u64, u64, u64, u64); - - #[bench] - fn sort_big_random_small(b: &mut Bencher) { + fn gen_big_random(len: usize) -> Vec<[u64; 16]> { let mut rng = thread_rng(); - b.iter(|| { - let mut v = rng.gen_iter::() - .take(5) - .collect::>(); - v.sort(); - }); - b.bytes = 5 * mem::size_of::() as u64; + rng.gen_iter().map(|x| [x; 16]).take(len).collect() } - #[bench] - fn sort_big_random_medium(b: &mut Bencher) { - let mut rng = thread_rng(); - b.iter(|| { - let mut v = rng.gen_iter::() - .take(100) - .collect::>(); - v.sort(); - }); - b.bytes = 100 * mem::size_of::() as u64; + fn gen_big_ascending(len: usize) -> Vec<[u64; 16]> { + (0..len as u64).map(|x| [x; 16]).take(len).collect() } - #[bench] - fn sort_big_random_large(b: &mut Bencher) { - let mut rng = thread_rng(); - b.iter(|| { - let mut v = rng.gen_iter::() - .take(10000) - .collect::>(); - v.sort(); - }); - b.bytes = 10000 * mem::size_of::() as u64; + fn gen_big_descending(len: usize) -> Vec<[u64; 16]> { + (0..len as u64).rev().map(|x| [x; 16]).take(len).collect() } + macro_rules! sort_bench { + ($name:ident, $gen:expr, $len:expr) => { + #[bench] + fn $name(b: &mut Bencher) { + b.iter(|| $gen($len).sort()); + b.bytes = $len * mem::size_of_val(&$gen(1)[0]) as u64; + } + } + } + + sort_bench!(sort_small_random, gen_random, 10); + sort_bench!(sort_small_ascending, gen_ascending, 10); + sort_bench!(sort_small_descending, gen_descending, 10); + + sort_bench!(sort_small_big_random, gen_big_random, 10); + sort_bench!(sort_small_big_ascending, gen_big_ascending, 10); + sort_bench!(sort_small_big_descending, gen_big_descending, 10); + + sort_bench!(sort_medium_random, gen_random, 100); + sort_bench!(sort_medium_ascending, gen_ascending, 100); + sort_bench!(sort_medium_descending, gen_descending, 100); + + sort_bench!(sort_large_random, gen_random, 10000); + sort_bench!(sort_large_ascending, gen_ascending, 10000); + sort_bench!(sort_large_descending, gen_descending, 10000); + sort_bench!(sort_large_mostly_ascending, gen_mostly_ascending, 10000); + sort_bench!(sort_large_mostly_descending, gen_mostly_descending, 10000); + + sort_bench!(sort_large_big_random, gen_big_random, 10000); + sort_bench!(sort_large_big_ascending, gen_big_ascending, 10000); + sort_bench!(sort_large_big_descending, gen_big_descending, 10000); + #[bench] - fn sort_big_sorted(b: &mut Bencher) { - let mut v: Vec = (0..10000).map(|i| (i, i, i, i)).collect(); + fn sort_large_random_expensive(b: &mut Bencher) { + let len = 10000; b.iter(|| { - v.sort(); + let mut count = 0; + let cmp = move |a: &u64, b: &u64| { + count += 1; + if count % 1_000_000_000 == 0 { + panic!("should not happen"); + } + (*a as f64).cos().partial_cmp(&(*b as f64).cos()).unwrap() + }; + + let mut v = gen_random(len); + v.sort_by(cmp); + + black_box(count); }); - b.bytes = (v.len() * mem::size_of_val(&v[0])) as u64; + b.bytes = len as u64 * mem::size_of::() as u64; } } diff --git a/src/libcollectionstest/str.rs b/src/libcollectionstest/str.rs index 14a0819d381..384579ce6b8 100644 --- a/src/libcollectionstest/str.rs +++ b/src/libcollectionstest/str.rs @@ -530,7 +530,7 @@ fn from_utf8_mostly_ascii() { #[test] fn test_is_utf16() { - use rustc_unicode::str::is_utf16; + use std_unicode::str::is_utf16; macro_rules! pos { ($($e:expr),*) => { { $(assert!(is_utf16($e));)* } } @@ -1186,7 +1186,7 @@ fn test_rev_split_char_iterator_no_trailing() { #[test] fn test_utf16_code_units() { - use rustc_unicode::str::Utf16Encoder; + use std_unicode::str::Utf16Encoder; assert_eq!(Utf16Encoder::new(vec!['é', '\u{1F4A9}'].into_iter()).collect::>(), [0xE9, 0xD83D, 0xDCA9]) } diff --git a/src/libcollectionstest/string.rs b/src/libcollectionstest/string.rs index 98de33bdaa8..a7d85d0bea1 100644 --- a/src/libcollectionstest/string.rs +++ b/src/libcollectionstest/string.rs @@ -132,7 +132,7 @@ fn test_from_utf16() { let s_as_utf16 = s.encode_utf16().collect::>(); let u_as_string = String::from_utf16(&u).unwrap(); - assert!(::rustc_unicode::str::is_utf16(&u)); + assert!(::std_unicode::str::is_utf16(&u)); assert_eq!(s_as_utf16, u); assert_eq!(u_as_string, s); @@ -231,6 +231,45 @@ fn test_pop() { assert_eq!(data, "ประเทศไทย中"); } +#[test] +fn test_split_off_empty() { + let orig = "Hello, world!"; + let mut split = String::from(orig); + let empty: String = split.split_off(orig.len()); + assert!(empty.is_empty()); +} + +#[test] +#[should_panic] +fn test_split_off_past_end() { + let orig = "Hello, world!"; + let mut split = String::from(orig); + split.split_off(orig.len() + 1); +} + +#[test] +#[should_panic] +fn test_split_off_mid_char() { + let mut orig = String::from("山"); + orig.split_off(1); +} + +#[test] +fn test_split_off_ascii() { + let mut ab = String::from("ABCD"); + let cd = ab.split_off(2); + assert_eq!(ab, "AB"); + assert_eq!(cd, "CD"); +} + +#[test] +fn test_split_off_unicode() { + let mut nihon = String::from("日本語"); + let go = nihon.split_off("日本".len()); + assert_eq!(nihon, "日本"); + assert_eq!(go, "語"); +} + #[test] fn test_str_truncate() { let mut s = String::from("12345"); diff --git a/src/libcollectionstest/vec_deque.rs b/src/libcollectionstest/vec_deque.rs index f1ea85a6c5b..cdf022e4f02 100644 --- a/src/libcollectionstest/vec_deque.rs +++ b/src/libcollectionstest/vec_deque.rs @@ -1007,3 +1007,24 @@ fn assert_covariance() { d } } + +#[test] +fn test_is_empty() { + let mut v = VecDeque::::new(); + assert!(v.is_empty()); + assert!(v.iter().is_empty()); + assert!(v.iter_mut().is_empty()); + v.extend(&[2, 3, 4]); + assert!(!v.is_empty()); + assert!(!v.iter().is_empty()); + assert!(!v.iter_mut().is_empty()); + while let Some(_) = v.pop_front() { + assert_eq!(v.is_empty(), v.len() == 0); + assert_eq!(v.iter().is_empty(), v.iter().len() == 0); + assert_eq!(v.iter_mut().is_empty(), v.iter_mut().len() == 0); + } + assert!(v.is_empty()); + assert!(v.iter().is_empty()); + assert!(v.iter_mut().is_empty()); + assert!(v.into_iter().is_empty()); +} diff --git a/src/libcompiler_builtins/Cargo.toml b/src/libcompiler_builtins/Cargo.toml index 9e91e390a57..79570dc0252 100644 --- a/src/libcompiler_builtins/Cargo.toml +++ b/src/libcompiler_builtins/Cargo.toml @@ -8,6 +8,7 @@ version = "0.0.0" name = "compiler_builtins" path = "lib.rs" test = false +bench = false [dependencies] core = { path = "../libcore" } diff --git a/src/libcompiler_builtins/build.rs b/src/libcompiler_builtins/build.rs index acbd39bb163..f61e2281a5c 100644 --- a/src/libcompiler_builtins/build.rs +++ b/src/libcompiler_builtins/build.rs @@ -94,6 +94,7 @@ fn main() { cfg.flag("-fvisibility=hidden"); cfg.flag("-fomit-frame-pointer"); cfg.flag("-ffreestanding"); + cfg.define("VISIBILITY_HIDDEN", None); } let mut sources = Sources::new(); diff --git a/src/libcore/Cargo.toml b/src/libcore/Cargo.toml index 3b406ac0447..a72c712ad17 100644 --- a/src/libcore/Cargo.toml +++ b/src/libcore/Cargo.toml @@ -7,7 +7,12 @@ version = "0.0.0" name = "core" path = "lib.rs" test = false +bench = false [[test]] name = "coretest" path = "../libcoretest/lib.rs" + +[[bench]] +name = "coretest" +path = "../libcoretest/lib.rs" diff --git a/src/libcore/char.rs b/src/libcore/char.rs index 26d28049a47..7f3ac13bac1 100644 --- a/src/libcore/char.rs +++ b/src/libcore/char.rs @@ -10,7 +10,7 @@ //! Character manipulation. //! -//! For more details, see ::rustc_unicode::char (a.k.a. std::char) +//! For more details, see ::std_unicode::char (a.k.a. std::char) #![allow(non_snake_case)] #![stable(feature = "core_char", since = "1.2.0")] @@ -238,7 +238,7 @@ impl fmt::Display for CharTryFromError { /// A 'radix' here is sometimes also called a 'base'. A radix of two /// indicates a binary number, a radix of ten, decimal, and a radix of /// sixteen, hexadecimal, to give some common values. Arbitrary -/// radicum are supported. +/// radices are supported. /// /// `from_digit()` will return `None` if the input is not a digit in /// the given radix. diff --git a/src/libcore/fmt/mod.rs b/src/libcore/fmt/mod.rs index 2d75a8ec420..9167264ba9d 100644 --- a/src/libcore/fmt/mod.rs +++ b/src/libcore/fmt/mod.rs @@ -166,7 +166,9 @@ pub struct Formatter<'a> { // NB. Argument is essentially an optimized partially applied formatting function, // equivalent to `exists T.(&T, fn(&T, &mut Formatter) -> Result`. -enum Void {} +struct Void { + _priv: (), +} /// This struct represents the generic "argument" which is taken by the Xprintf /// family of functions. It contains a function to format the given value. At diff --git a/src/libcore/iter/iterator.rs b/src/libcore/iter/iterator.rs index f6b74a91c19..48808b601c1 100644 --- a/src/libcore/iter/iterator.rs +++ b/src/libcore/iter/iterator.rs @@ -247,7 +247,7 @@ pub trait Iterator { /// ``` #[inline] #[stable(feature = "rust1", since = "1.0.0")] - fn nth(&mut self, mut n: usize) -> Option where Self: Sized { + fn nth(&mut self, mut n: usize) -> Option { for x in self { if n == 0 { return Some(x) } n -= 1; @@ -2179,4 +2179,7 @@ impl<'a, I: Iterator + ?Sized> Iterator for &'a mut I { type Item = I::Item; fn next(&mut self) -> Option { (**self).next() } fn size_hint(&self) -> (usize, Option) { (**self).size_hint() } + fn nth(&mut self, n: usize) -> Option { + (**self).nth(n) + } } diff --git a/src/libcore/iter/mod.rs b/src/libcore/iter/mod.rs index 02a2e370547..3999db0d63c 100644 --- a/src/libcore/iter/mod.rs +++ b/src/libcore/iter/mod.rs @@ -225,12 +225,12 @@ //! often called 'iterator adapters', as they're a form of the 'adapter //! pattern'. //! -//! Common iterator adapters include [`map()`], [`take()`], and [`collect()`]. +//! Common iterator adapters include [`map()`], [`take()`], and [`filter()`]. //! For more, see their documentation. //! //! [`map()`]: trait.Iterator.html#method.map //! [`take()`]: trait.Iterator.html#method.take -//! [`collect()`]: trait.Iterator.html#method.collect +//! [`filter()`]: trait.Iterator.html#method.filter //! //! # Laziness //! @@ -268,7 +268,7 @@ //! [`map()`]: trait.Iterator.html#method.map //! //! The two most common ways to evaluate an iterator are to use a `for` loop -//! like this, or using the [`collect()`] adapter to produce a new collection. +//! like this, or using the [`collect()`] method to produce a new collection. //! //! [`collect()`]: trait.Iterator.html#method.collect //! @@ -368,7 +368,16 @@ impl DoubleEndedIterator for Rev where I: DoubleEndedIterator { #[stable(feature = "rust1", since = "1.0.0")] impl ExactSizeIterator for Rev - where I: ExactSizeIterator + DoubleEndedIterator {} + where I: ExactSizeIterator + DoubleEndedIterator +{ + fn len(&self) -> usize { + self.iter.len() + } + + fn is_empty(&self) -> bool { + self.iter.is_empty() + } +} #[unstable(feature = "fused", issue = "35602")] impl FusedIterator for Rev @@ -425,7 +434,15 @@ impl<'a, I, T: 'a> DoubleEndedIterator for Cloned #[stable(feature = "iter_cloned", since = "1.1.0")] impl<'a, I, T: 'a> ExactSizeIterator for Cloned where I: ExactSizeIterator, T: Clone -{} +{ + fn len(&self) -> usize { + self.it.len() + } + + fn is_empty(&self) -> bool { + self.it.is_empty() + } +} #[unstable(feature = "fused", issue = "35602")] impl<'a, I, T: 'a> FusedIterator for Cloned @@ -920,7 +937,7 @@ unsafe impl TrustedLen for Zip /// you can also [`map()`] backwards: /// /// ```rust -/// let v: Vec = vec![1, 2, 3].into_iter().rev().map(|x| x + 1).collect(); +/// let v: Vec = vec![1, 2, 3].into_iter().map(|x| x + 1).rev().collect(); /// /// assert_eq!(v, [4, 3, 2]); /// ``` @@ -1007,7 +1024,16 @@ impl DoubleEndedIterator for Map where #[stable(feature = "rust1", since = "1.0.0")] impl ExactSizeIterator for Map - where F: FnMut(I::Item) -> B {} + where F: FnMut(I::Item) -> B +{ + fn len(&self) -> usize { + self.iter.len() + } + + fn is_empty(&self) -> bool { + self.iter.is_empty() + } +} #[unstable(feature = "fused", issue = "35602")] impl FusedIterator for Map @@ -1236,7 +1262,15 @@ impl DoubleEndedIterator for Enumerate where } #[stable(feature = "rust1", since = "1.0.0")] -impl ExactSizeIterator for Enumerate where I: ExactSizeIterator {} +impl ExactSizeIterator for Enumerate where I: ExactSizeIterator { + fn len(&self) -> usize { + self.iter.len() + } + + fn is_empty(&self) -> bool { + self.iter.is_empty() + } +} #[doc(hidden)] unsafe impl TrustedRandomAccess for Enumerate @@ -1945,7 +1979,15 @@ impl DoubleEndedIterator for Fuse #[stable(feature = "rust1", since = "1.0.0")] -impl ExactSizeIterator for Fuse where I: ExactSizeIterator {} +impl ExactSizeIterator for Fuse where I: ExactSizeIterator { + fn len(&self) -> usize { + self.iter.len() + } + + fn is_empty(&self) -> bool { + self.iter.is_empty() + } +} /// An iterator that calls a function with a reference to each element before /// yielding it. @@ -2012,7 +2054,16 @@ impl DoubleEndedIterator for Inspect #[stable(feature = "rust1", since = "1.0.0")] impl ExactSizeIterator for Inspect - where F: FnMut(&I::Item) {} + where F: FnMut(&I::Item) +{ + fn len(&self) -> usize { + self.iter.len() + } + + fn is_empty(&self) -> bool { + self.iter.is_empty() + } +} #[unstable(feature = "fused", issue = "35602")] impl FusedIterator for Inspect diff --git a/src/libcore/iter/traits.rs b/src/libcore/iter/traits.rs index e94582cda7c..c5465549adf 100644 --- a/src/libcore/iter/traits.rs +++ b/src/libcore/iter/traits.rs @@ -552,7 +552,14 @@ pub trait ExactSizeIterator: Iterator { } #[stable(feature = "rust1", since = "1.0.0")] -impl<'a, I: ExactSizeIterator + ?Sized> ExactSizeIterator for &'a mut I {} +impl<'a, I: ExactSizeIterator + ?Sized> ExactSizeIterator for &'a mut I { + fn len(&self) -> usize { + (**self).len() + } + fn is_empty(&self) -> bool { + (**self).is_empty() + } +} /// Trait to represent types that can be created by summing up an iterator. /// diff --git a/src/libcore/lib.rs b/src/libcore/lib.rs index 07f5e725e20..9834fca5fdc 100644 --- a/src/libcore/lib.rs +++ b/src/libcore/lib.rs @@ -89,7 +89,6 @@ #![feature(specialization)] #![feature(staged_api)] #![feature(unboxed_closures)] -#![cfg_attr(stage0, feature(question_mark))] #![feature(never_type)] #![feature(prelude_import)] diff --git a/src/libcore/option.rs b/src/libcore/option.rs index 607e16887a8..8871e1fa840 100644 --- a/src/libcore/option.rs +++ b/src/libcore/option.rs @@ -659,6 +659,16 @@ impl Option { impl<'a, T: Clone> Option<&'a T> { /// Maps an `Option<&T>` to an `Option` by cloning the contents of the /// option. + /// + /// # Examples + /// + /// ``` + /// let x = 12; + /// let opt_x = Some(&x); + /// assert_eq!(opt_x, Some(&12)); + /// let cloned = opt_x.cloned(); + /// assert_eq!(cloned, Some(12)); + /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn cloned(self) -> Option { self.map(|t| t.clone()) diff --git a/src/libcore/slice.rs b/src/libcore/slice.rs index 871b63145ca..a4a90e7a9da 100644 --- a/src/libcore/slice.rs +++ b/src/libcore/slice.rs @@ -38,10 +38,14 @@ use cmp; use fmt; use intrinsics::assume; use iter::*; -use ops::{self, RangeFull}; +use ops::{FnMut, self}; +use option::Option; +use option::Option::{None, Some}; +use result::Result; +use result::Result::{Ok, Err}; use ptr; use mem; -use marker; +use marker::{Copy, Send, Sync, Sized, self}; use iter_private::TrustedRandomAccess; #[repr(C)] @@ -80,7 +84,8 @@ pub trait SliceExt { #[stable(feature = "core", since = "1.6.0")] fn chunks(&self, size: usize) -> Chunks; #[stable(feature = "core", since = "1.6.0")] - fn get(&self, index: usize) -> Option<&Self::Item>; + fn get(&self, index: I) -> Option<&I::Output> + where I: SliceIndex; #[stable(feature = "core", since = "1.6.0")] fn first(&self) -> Option<&Self::Item>; #[stable(feature = "core", since = "1.6.0")] @@ -90,7 +95,8 @@ pub trait SliceExt { #[stable(feature = "core", since = "1.6.0")] fn last(&self) -> Option<&Self::Item>; #[stable(feature = "core", since = "1.6.0")] - unsafe fn get_unchecked(&self, index: usize) -> &Self::Item; + unsafe fn get_unchecked(&self, index: I) -> &I::Output + where I: SliceIndex; #[stable(feature = "core", since = "1.6.0")] fn as_ptr(&self) -> *const Self::Item; #[stable(feature = "core", since = "1.6.0")] @@ -108,7 +114,8 @@ pub trait SliceExt { #[stable(feature = "core", since = "1.6.0")] fn is_empty(&self) -> bool { self.len() == 0 } #[stable(feature = "core", since = "1.6.0")] - fn get_mut(&mut self, index: usize) -> Option<&mut Self::Item>; + fn get_mut(&mut self, index: I) -> Option<&mut I::Output> + where I: SliceIndex; #[stable(feature = "core", since = "1.6.0")] fn iter_mut(&mut self) -> IterMut; #[stable(feature = "core", since = "1.6.0")] @@ -137,7 +144,8 @@ pub trait SliceExt { #[stable(feature = "core", since = "1.6.0")] fn reverse(&mut self); #[stable(feature = "core", since = "1.6.0")] - unsafe fn get_unchecked_mut(&mut self, index: usize) -> &mut Self::Item; + unsafe fn get_unchecked_mut(&mut self, index: I) -> &mut I::Output + where I: SliceIndex; #[stable(feature = "core", since = "1.6.0")] fn as_mut_ptr(&mut self) -> *mut Self::Item; @@ -258,8 +266,10 @@ impl SliceExt for [T] { } #[inline] - fn get(&self, index: usize) -> Option<&T> { - if index < self.len() { Some(&self[index]) } else { None } + fn get(&self, index: I) -> Option<&I::Output> + where I: SliceIndex + { + index.get(self) } #[inline] @@ -284,8 +294,10 @@ impl SliceExt for [T] { } #[inline] - unsafe fn get_unchecked(&self, index: usize) -> &T { - &*(self.as_ptr().offset(index as isize)) + unsafe fn get_unchecked(&self, index: I) -> &I::Output + where I: SliceIndex + { + index.get_unchecked(self) } #[inline] @@ -323,8 +335,10 @@ impl SliceExt for [T] { } #[inline] - fn get_mut(&mut self, index: usize) -> Option<&mut T> { - if index < self.len() { Some(&mut self[index]) } else { None } + fn get_mut(&mut self, index: I) -> Option<&mut I::Output> + where I: SliceIndex + { + index.get_mut(self) } #[inline] @@ -451,8 +465,10 @@ impl SliceExt for [T] { } #[inline] - unsafe fn get_unchecked_mut(&mut self, index: usize) -> &mut T { - &mut *self.as_mut_ptr().offset(index as isize) + unsafe fn get_unchecked_mut(&mut self, index: I) -> &mut I::Output + where I: SliceIndex + { + index.get_unchecked_mut(self) } #[inline] @@ -515,23 +531,26 @@ impl SliceExt for [T] { } #[stable(feature = "rust1", since = "1.0.0")] -#[rustc_on_unimplemented = "slice indices are of type `usize`"] -impl ops::Index for [T] { - type Output = T; +#[rustc_on_unimplemented = "slice indices are of type `usize` or ranges of `usize`"] +impl ops::Index for [T] + where I: SliceIndex +{ + type Output = I::Output; - fn index(&self, index: usize) -> &T { - // NB built-in indexing - &(*self)[index] + #[inline] + fn index(&self, index: I) -> &I::Output { + index.index(self) } } #[stable(feature = "rust1", since = "1.0.0")] -#[rustc_on_unimplemented = "slice indices are of type `usize`"] -impl ops::IndexMut for [T] { +#[rustc_on_unimplemented = "slice indices are of type `usize` or ranges of `usize`"] +impl ops::IndexMut for [T] + where I: SliceIndex +{ #[inline] - fn index_mut(&mut self, index: usize) -> &mut T { - // NB built-in indexing - &mut (*self)[index] + fn index_mut(&mut self, index: I) -> &mut I::Output { + index.index_mut(self) } } @@ -547,205 +566,349 @@ fn slice_index_order_fail(index: usize, end: usize) -> ! { panic!("slice index starts at {} but ends at {}", index, end); } +/// A helper trait used for indexing operations. +#[unstable(feature = "slice_get_slice", issue = "35729")] +#[rustc_on_unimplemented = "slice indices are of type `usize` or ranges of `usize`"] +pub trait SliceIndex { + /// The output type returned by methods. + type Output: ?Sized; -/// Implements slicing with syntax `&self[begin .. end]`. -/// -/// Returns a slice of self for the index range [`begin`..`end`). -/// -/// This operation is `O(1)`. -/// -/// # Panics -/// -/// Requires that `begin <= end` and `end <= self.len()`, -/// otherwise slicing will panic. -#[stable(feature = "rust1", since = "1.0.0")] -#[rustc_on_unimplemented = "slice indices are of type `usize`"] -impl ops::Index> for [T] { + /// Returns a shared reference to the output at this location, if in + /// bounds. + fn get(self, slice: &[T]) -> Option<&Self::Output>; + + /// Returns a mutable reference to the output at this location, if in + /// bounds. + fn get_mut(self, slice: &mut [T]) -> Option<&mut Self::Output>; + + /// Returns a shared reference to the output at this location, without + /// performing any bounds checking. + unsafe fn get_unchecked(self, slice: &[T]) -> &Self::Output; + + /// Returns a mutable reference to the output at this location, without + /// performing any bounds checking. + unsafe fn get_unchecked_mut(self, slice: &mut [T]) -> &mut Self::Output; + + /// Returns a shared reference to the output at this location, panicking + /// if out of bounds. + fn index(self, slice: &[T]) -> &Self::Output; + + /// Returns a mutable reference to the output at this location, panicking + /// if out of bounds. + fn index_mut(self, slice: &mut [T]) -> &mut Self::Output; +} + +#[stable(feature = "slice-get-slice-impls", since = "1.13.0")] +impl SliceIndex for usize { + type Output = T; + + #[inline] + fn get(self, slice: &[T]) -> Option<&T> { + if self < slice.len() { + unsafe { + Some(self.get_unchecked(slice)) + } + } else { + None + } + } + + #[inline] + fn get_mut(self, slice: &mut [T]) -> Option<&mut T> { + if self < slice.len() { + unsafe { + Some(self.get_unchecked_mut(slice)) + } + } else { + None + } + } + + #[inline] + unsafe fn get_unchecked(self, slice: &[T]) -> &T { + &*slice.as_ptr().offset(self as isize) + } + + #[inline] + unsafe fn get_unchecked_mut(self, slice: &mut [T]) -> &mut T { + &mut *slice.as_mut_ptr().offset(self as isize) + } + + #[inline] + fn index(self, slice: &[T]) -> &T { + // NB: use intrinsic indexing + &(*slice)[self] + } + + #[inline] + fn index_mut(self, slice: &mut [T]) -> &mut T { + // NB: use intrinsic indexing + &mut (*slice)[self] + } +} + +#[stable(feature = "slice-get-slice-impls", since = "1.13.0")] +impl SliceIndex for ops::Range { type Output = [T]; #[inline] - fn index(&self, index: ops::Range) -> &[T] { - if index.start > index.end { - slice_index_order_fail(index.start, index.end); - } else if index.end > self.len() { - slice_index_len_fail(index.end, self.len()); + fn get(self, slice: &[T]) -> Option<&[T]> { + if self.start > self.end || self.end > slice.len() { + None + } else { + unsafe { + Some(self.get_unchecked(slice)) + } + } + } + + #[inline] + fn get_mut(self, slice: &mut [T]) -> Option<&mut [T]> { + if self.start > self.end || self.end > slice.len() { + None + } else { + unsafe { + Some(self.get_unchecked_mut(slice)) + } + } + } + + #[inline] + unsafe fn get_unchecked(self, slice: &[T]) -> &[T] { + from_raw_parts(slice.as_ptr().offset(self.start as isize), self.end - self.start) + } + + #[inline] + unsafe fn get_unchecked_mut(self, slice: &mut [T]) -> &mut [T] { + from_raw_parts_mut(slice.as_mut_ptr().offset(self.start as isize), self.end - self.start) + } + + #[inline] + fn index(self, slice: &[T]) -> &[T] { + if self.start > self.end { + slice_index_order_fail(self.start, self.end); + } else if self.end > slice.len() { + slice_index_len_fail(self.end, slice.len()); } unsafe { - from_raw_parts ( - self.as_ptr().offset(index.start as isize), - index.end - index.start - ) + self.get_unchecked(slice) + } + } + + #[inline] + fn index_mut(self, slice: &mut [T]) -> &mut [T] { + if self.start > self.end { + slice_index_order_fail(self.start, self.end); + } else if self.end > slice.len() { + slice_index_len_fail(self.end, slice.len()); + } + unsafe { + self.get_unchecked_mut(slice) } } } -/// Implements slicing with syntax `&self[.. end]`. -/// -/// Returns a slice of self from the beginning until but not including -/// the index `end`. -/// -/// Equivalent to `&self[0 .. end]` -#[stable(feature = "rust1", since = "1.0.0")] -#[rustc_on_unimplemented = "slice indices are of type `usize`"] -impl ops::Index> for [T] { +#[stable(feature = "slice-get-slice-impls", since = "1.13.0")] +impl SliceIndex for ops::RangeTo { type Output = [T]; #[inline] - fn index(&self, index: ops::RangeTo) -> &[T] { - self.index(0 .. index.end) + fn get(self, slice: &[T]) -> Option<&[T]> { + (0..self.end).get(slice) + } + + #[inline] + fn get_mut(self, slice: &mut [T]) -> Option<&mut [T]> { + (0..self.end).get_mut(slice) + } + + #[inline] + unsafe fn get_unchecked(self, slice: &[T]) -> &[T] { + (0..self.end).get_unchecked(slice) + } + + #[inline] + unsafe fn get_unchecked_mut(self, slice: &mut [T]) -> &mut [T] { + (0..self.end).get_unchecked_mut(slice) + } + + #[inline] + fn index(self, slice: &[T]) -> &[T] { + (0..self.end).index(slice) + } + + #[inline] + fn index_mut(self, slice: &mut [T]) -> &mut [T] { + (0..self.end).index_mut(slice) } } -/// Implements slicing with syntax `&self[begin ..]`. -/// -/// Returns a slice of self from and including the index `begin` until the end. -/// -/// Equivalent to `&self[begin .. self.len()]` -#[stable(feature = "rust1", since = "1.0.0")] -#[rustc_on_unimplemented = "slice indices are of type `usize`"] -impl ops::Index> for [T] { +#[stable(feature = "slice-get-slice-impls", since = "1.13.0")] +impl SliceIndex for ops::RangeFrom { type Output = [T]; #[inline] - fn index(&self, index: ops::RangeFrom) -> &[T] { - self.index(index.start .. self.len()) + fn get(self, slice: &[T]) -> Option<&[T]> { + (self.start..slice.len()).get(slice) + } + + #[inline] + fn get_mut(self, slice: &mut [T]) -> Option<&mut [T]> { + (self.start..slice.len()).get_mut(slice) + } + + #[inline] + unsafe fn get_unchecked(self, slice: &[T]) -> &[T] { + (self.start..slice.len()).get_unchecked(slice) + } + + #[inline] + unsafe fn get_unchecked_mut(self, slice: &mut [T]) -> &mut [T] { + (self.start..slice.len()).get_unchecked_mut(slice) + } + + #[inline] + fn index(self, slice: &[T]) -> &[T] { + (self.start..slice.len()).index(slice) + } + + #[inline] + fn index_mut(self, slice: &mut [T]) -> &mut [T] { + (self.start..slice.len()).index_mut(slice) } } -/// Implements slicing with syntax `&self[..]`. -/// -/// Returns a slice of the whole slice. This operation cannot panic. -/// -/// Equivalent to `&self[0 .. self.len()]` -#[stable(feature = "rust1", since = "1.0.0")] -impl ops::Index for [T] { +#[stable(feature = "slice-get-slice-impls", since = "1.13.0")] +impl SliceIndex for ops::RangeFull { type Output = [T]; #[inline] - fn index(&self, _index: RangeFull) -> &[T] { - self + fn get(self, slice: &[T]) -> Option<&[T]> { + Some(slice) + } + + #[inline] + fn get_mut(self, slice: &mut [T]) -> Option<&mut [T]> { + Some(slice) + } + + #[inline] + unsafe fn get_unchecked(self, slice: &[T]) -> &[T] { + slice + } + + #[inline] + unsafe fn get_unchecked_mut(self, slice: &mut [T]) -> &mut [T] { + slice + } + + #[inline] + fn index(self, slice: &[T]) -> &[T] { + slice + } + + #[inline] + fn index_mut(self, slice: &mut [T]) -> &mut [T] { + slice } } -#[unstable(feature = "inclusive_range", reason = "recently added, follows RFC", issue = "28237")] -#[rustc_on_unimplemented = "slice indices are of type `usize`"] -impl ops::Index> for [T] { + +#[stable(feature = "slice-get-slice-impls", since = "1.13.0")] +impl SliceIndex for ops::RangeInclusive { type Output = [T]; #[inline] - fn index(&self, index: ops::RangeInclusive) -> &[T] { - match index { + fn get(self, slice: &[T]) -> Option<&[T]> { + match self { + ops::RangeInclusive::Empty { .. } => Some(&[]), + ops::RangeInclusive::NonEmpty { end, .. } if end == usize::max_value() => None, + ops::RangeInclusive::NonEmpty { start, end } => (start..end + 1).get(slice), + } + } + + #[inline] + fn get_mut(self, slice: &mut [T]) -> Option<&mut [T]> { + match self { + ops::RangeInclusive::Empty { .. } => Some(&mut []), + ops::RangeInclusive::NonEmpty { end, .. } if end == usize::max_value() => None, + ops::RangeInclusive::NonEmpty { start, end } => (start..end + 1).get_mut(slice), + } + } + + #[inline] + unsafe fn get_unchecked(self, slice: &[T]) -> &[T] { + match self { ops::RangeInclusive::Empty { .. } => &[], - ops::RangeInclusive::NonEmpty { end, .. } if end == usize::max_value() => - panic!("attempted to index slice up to maximum usize"), - ops::RangeInclusive::NonEmpty { start, end } => - self.index(start .. end+1) + ops::RangeInclusive::NonEmpty { start, end } => (start..end + 1).get_unchecked(slice), + } + } + + #[inline] + unsafe fn get_unchecked_mut(self, slice: &mut [T]) -> &mut [T] { + match self { + ops::RangeInclusive::Empty { .. } => &mut [], + ops::RangeInclusive::NonEmpty { start, end } => { + (start..end + 1).get_unchecked_mut(slice) + } + } + } + + #[inline] + fn index(self, slice: &[T]) -> &[T] { + match self { + ops::RangeInclusive::Empty { .. } => &[], + ops::RangeInclusive::NonEmpty { end, .. } if end == usize::max_value() => { + panic!("attempted to index slice up to maximum usize"); + }, + ops::RangeInclusive::NonEmpty { start, end } => (start..end + 1).index(slice), + } + } + + #[inline] + fn index_mut(self, slice: &mut [T]) -> &mut [T] { + match self { + ops::RangeInclusive::Empty { .. } => &mut [], + ops::RangeInclusive::NonEmpty { end, .. } if end == usize::max_value() => { + panic!("attempted to index slice up to maximum usize"); + }, + ops::RangeInclusive::NonEmpty { start, end } => (start..end + 1).index_mut(slice), } } } -#[unstable(feature = "inclusive_range", reason = "recently added, follows RFC", issue = "28237")] -#[rustc_on_unimplemented = "slice indices are of type `usize`"] -impl ops::Index> for [T] { + +#[stable(feature = "slice-get-slice-impls", since = "1.13.0")] +impl SliceIndex for ops::RangeToInclusive { type Output = [T]; #[inline] - fn index(&self, index: ops::RangeToInclusive) -> &[T] { - self.index(0...index.end) + fn get(self, slice: &[T]) -> Option<&[T]> { + (0...self.end).get(slice) } -} -/// Implements mutable slicing with syntax `&mut self[begin .. end]`. -/// -/// Returns a slice of self for the index range [`begin`..`end`). -/// -/// This operation is `O(1)`. -/// -/// # Panics -/// -/// Requires that `begin <= end` and `end <= self.len()`, -/// otherwise slicing will panic. -#[stable(feature = "rust1", since = "1.0.0")] -#[rustc_on_unimplemented = "slice indices are of type `usize`"] -impl ops::IndexMut> for [T] { #[inline] - fn index_mut(&mut self, index: ops::Range) -> &mut [T] { - if index.start > index.end { - slice_index_order_fail(index.start, index.end); - } else if index.end > self.len() { - slice_index_len_fail(index.end, self.len()); - } - unsafe { - from_raw_parts_mut( - self.as_mut_ptr().offset(index.start as isize), - index.end - index.start - ) - } + fn get_mut(self, slice: &mut [T]) -> Option<&mut [T]> { + (0...self.end).get_mut(slice) } -} -/// Implements mutable slicing with syntax `&mut self[.. end]`. -/// -/// Returns a slice of self from the beginning until but not including -/// the index `end`. -/// -/// Equivalent to `&mut self[0 .. end]` -#[stable(feature = "rust1", since = "1.0.0")] -#[rustc_on_unimplemented = "slice indices are of type `usize`"] -impl ops::IndexMut> for [T] { #[inline] - fn index_mut(&mut self, index: ops::RangeTo) -> &mut [T] { - self.index_mut(0 .. index.end) + unsafe fn get_unchecked(self, slice: &[T]) -> &[T] { + (0...self.end).get_unchecked(slice) } -} -/// Implements mutable slicing with syntax `&mut self[begin ..]`. -/// -/// Returns a slice of self from and including the index `begin` until the end. -/// -/// Equivalent to `&mut self[begin .. self.len()]` -#[stable(feature = "rust1", since = "1.0.0")] -#[rustc_on_unimplemented = "slice indices are of type `usize`"] -impl ops::IndexMut> for [T] { #[inline] - fn index_mut(&mut self, index: ops::RangeFrom) -> &mut [T] { - let len = self.len(); - self.index_mut(index.start .. len) + unsafe fn get_unchecked_mut(self, slice: &mut [T]) -> &mut [T] { + (0...self.end).get_unchecked_mut(slice) } -} -/// Implements mutable slicing with syntax `&mut self[..]`. -/// -/// Returns a slice of the whole slice. This operation can not panic. -/// -/// Equivalent to `&mut self[0 .. self.len()]` -#[stable(feature = "rust1", since = "1.0.0")] -impl ops::IndexMut for [T] { #[inline] - fn index_mut(&mut self, _index: RangeFull) -> &mut [T] { - self + fn index(self, slice: &[T]) -> &[T] { + (0...self.end).index(slice) } -} -#[unstable(feature = "inclusive_range", reason = "recently added, follows RFC", issue = "28237")] -#[rustc_on_unimplemented = "slice indices are of type `usize`"] -impl ops::IndexMut> for [T] { #[inline] - fn index_mut(&mut self, index: ops::RangeInclusive) -> &mut [T] { - match index { - ops::RangeInclusive::Empty { .. } => &mut [], - ops::RangeInclusive::NonEmpty { end, .. } if end == usize::max_value() => - panic!("attempted to index slice up to maximum usize"), - ops::RangeInclusive::NonEmpty { start, end } => - self.index_mut(start .. end+1) - } - } -} -#[unstable(feature = "inclusive_range", reason = "recently added, follows RFC", issue = "28237")] -#[rustc_on_unimplemented = "slice indices are of type `usize`"] -impl ops::IndexMut> for [T] { - #[inline] - fn index_mut(&mut self, index: ops::RangeToInclusive) -> &mut [T] { - self.index_mut(0...index.end) + fn index_mut(self, slice: &mut [T]) -> &mut [T] { + (0...self.end).index_mut(slice) } } @@ -983,7 +1146,11 @@ impl<'a, T> Iter<'a, T> { iterator!{struct Iter -> *const T, &'a T} #[stable(feature = "rust1", since = "1.0.0")] -impl<'a, T> ExactSizeIterator for Iter<'a, T> {} +impl<'a, T> ExactSizeIterator for Iter<'a, T> { + fn is_empty(&self) -> bool { + self.ptr == self.end + } +} #[unstable(feature = "fused", issue = "35602")] impl<'a, T> FusedIterator for Iter<'a, T> {} @@ -1107,7 +1274,11 @@ impl<'a, T> IterMut<'a, T> { iterator!{struct IterMut -> *mut T, &'a mut T} #[stable(feature = "rust1", since = "1.0.0")] -impl<'a, T> ExactSizeIterator for IterMut<'a, T> {} +impl<'a, T> ExactSizeIterator for IterMut<'a, T> { + fn is_empty(&self) -> bool { + self.ptr == self.end + } +} #[unstable(feature = "fused", issue = "35602")] impl<'a, T> FusedIterator for IterMut<'a, T> {} diff --git a/src/libcore/str/mod.rs b/src/libcore/str/mod.rs index b4cd52e59f6..de418b831cc 100644 --- a/src/libcore/str/mod.rs +++ b/src/libcore/str/mod.rs @@ -618,6 +618,11 @@ impl<'a> ExactSizeIterator for Bytes<'a> { fn len(&self) -> usize { self.0.len() } + + #[inline] + fn is_empty(&self) -> bool { + self.0.is_empty() + } } #[unstable(feature = "fused", issue = "35602")] diff --git a/src/libcoretest/lib.rs b/src/libcoretest/lib.rs index b8c01e570f5..92fb01e535c 100644 --- a/src/libcoretest/lib.rs +++ b/src/libcoretest/lib.rs @@ -40,7 +40,7 @@ extern crate core; extern crate test; extern crate libc; -extern crate rustc_unicode; +extern crate std_unicode; extern crate rand; mod any; diff --git a/src/libcoretest/slice.rs b/src/libcoretest/slice.rs index f82ab44adad..ad39e6b081b 100644 --- a/src/libcoretest/slice.rs +++ b/src/libcoretest/slice.rs @@ -180,3 +180,47 @@ fn test_windows_last() { let c2 = v2.windows(2); assert_eq!(c2.last().unwrap()[0], 3); } + +#[test] +fn get_range() { + let v: &[i32] = &[0, 1, 2, 3, 4, 5]; + assert_eq!(v.get(..), Some(&[0, 1, 2, 3, 4, 5][..])); + assert_eq!(v.get(..2), Some(&[0, 1][..])); + assert_eq!(v.get(2..), Some(&[2, 3, 4, 5][..])); + assert_eq!(v.get(1..4), Some(&[1, 2, 3][..])); + assert_eq!(v.get(7..), None); + assert_eq!(v.get(7..10), None); +} + +#[test] +fn get_mut_range() { + let mut v: &mut [i32] = &mut [0, 1, 2, 3, 4, 5]; + assert_eq!(v.get_mut(..), Some(&mut [0, 1, 2, 3, 4, 5][..])); + assert_eq!(v.get_mut(..2), Some(&mut [0, 1][..])); + assert_eq!(v.get_mut(2..), Some(&mut [2, 3, 4, 5][..])); + assert_eq!(v.get_mut(1..4), Some(&mut [1, 2, 3][..])); + assert_eq!(v.get_mut(7..), None); + assert_eq!(v.get_mut(7..10), None); +} + +#[test] +fn get_unchecked_range() { + unsafe { + let v: &[i32] = &[0, 1, 2, 3, 4, 5]; + assert_eq!(v.get_unchecked(..), &[0, 1, 2, 3, 4, 5][..]); + assert_eq!(v.get_unchecked(..2), &[0, 1][..]); + assert_eq!(v.get_unchecked(2..), &[2, 3, 4, 5][..]); + assert_eq!(v.get_unchecked(1..4), &[1, 2, 3][..]); + } +} + +#[test] +fn get_unchecked_mut_range() { + unsafe { + let v: &mut [i32] = &mut [0, 1, 2, 3, 4, 5]; + assert_eq!(v.get_unchecked_mut(..), &mut [0, 1, 2, 3, 4, 5][..]); + assert_eq!(v.get_unchecked_mut(..2), &mut [0, 1][..]); + assert_eq!(v.get_unchecked_mut(2..), &mut[2, 3, 4, 5][..]); + assert_eq!(v.get_unchecked_mut(1..4), &mut [1, 2, 3][..]); + } +} diff --git a/src/libgraphviz/lib.rs b/src/libgraphviz/lib.rs index 03057af4a84..220051c9d35 100644 --- a/src/libgraphviz/lib.rs +++ b/src/libgraphviz/lib.rs @@ -295,7 +295,6 @@ #![cfg_attr(not(stage0), deny(warnings))] #![feature(str_escape)] -#![cfg_attr(stage0, feature(question_mark))] use self::LabelText::*; diff --git a/src/liblibc b/src/liblibc index 6e8c1b490cc..0ac39c5ccf6 160000 --- a/src/liblibc +++ b/src/liblibc @@ -1 +1 @@ -Subproject commit 6e8c1b490ccbe5e84d248bab883515bc85394b5f +Subproject commit 0ac39c5ccf6a04395b7c40dd62321cb91f63f160 diff --git a/src/libpanic_abort/Cargo.toml b/src/libpanic_abort/Cargo.toml index 9d62be64fc4..d90d2864813 100644 --- a/src/libpanic_abort/Cargo.toml +++ b/src/libpanic_abort/Cargo.toml @@ -6,6 +6,7 @@ version = "0.0.0" [lib] path = "lib.rs" test = false +bench = false [dependencies] core = { path = "../libcore" } diff --git a/src/libpanic_unwind/Cargo.toml b/src/libpanic_unwind/Cargo.toml index 18f37a8bb17..90c16fff6f1 100644 --- a/src/libpanic_unwind/Cargo.toml +++ b/src/libpanic_unwind/Cargo.toml @@ -6,6 +6,7 @@ version = "0.0.0" [lib] path = "lib.rs" test = false +bench = false [dependencies] alloc = { path = "../liballoc" } diff --git a/src/librustc/cfg/construct.rs b/src/librustc/cfg/construct.rs index c399623462b..f21d98a0fc7 100644 --- a/src/librustc/cfg/construct.rs +++ b/src/librustc/cfg/construct.rs @@ -10,8 +10,6 @@ use rustc_data_structures::graph; use cfg::*; -use hir::def::Def; -use hir::pat_util; use ty::{self, TyCtxt}; use syntax::ast; use syntax::ptr::P; @@ -100,7 +98,7 @@ impl<'a, 'tcx> CFGBuilder<'a, 'tcx> { fn pat(&mut self, pat: &hir::Pat, pred: CFGIndex) -> CFGIndex { match pat.node { PatKind::Binding(.., None) | - PatKind::Path(..) | + PatKind::Path(_) | PatKind::Lit(..) | PatKind::Range(..) | PatKind::Wild => { @@ -284,7 +282,7 @@ impl<'a, 'tcx> CFGBuilder<'a, 'tcx> { hir::ExprBreak(label, ref opt_expr) => { let v = self.opt_expr(opt_expr, pred); - let loop_scope = self.find_scope(expr, label.map(|l| l.node)); + let loop_scope = self.find_scope(expr, label); let b = self.add_ast_node(expr.id, &[v]); self.add_exiting_edge(expr, b, loop_scope, loop_scope.break_index); @@ -292,7 +290,7 @@ impl<'a, 'tcx> CFGBuilder<'a, 'tcx> { } hir::ExprAgain(label) => { - let loop_scope = self.find_scope(expr, label.map(|l| l.node)); + let loop_scope = self.find_scope(expr, label); let a = self.add_ast_node(expr.id, &[pred]); self.add_exiting_edge(expr, a, loop_scope, loop_scope.continue_index); @@ -361,7 +359,7 @@ impl<'a, 'tcx> CFGBuilder<'a, 'tcx> { hir::ExprClosure(..) | hir::ExprLit(..) | - hir::ExprPath(..) => { + hir::ExprPath(_) => { self.straightline(expr, pred, None::.iter()) } } @@ -457,7 +455,7 @@ impl<'a, 'tcx> CFGBuilder<'a, 'tcx> { // Visit the guard expression let guard_exit = self.expr(&guard, guard_start); - let this_has_bindings = pat_util::pat_contains_bindings_or_wild(&pat); + let this_has_bindings = pat.contains_bindings_or_wild(); // If both this pattern and the previous pattern // were free of bindings, they must consist only @@ -570,23 +568,16 @@ impl<'a, 'tcx> CFGBuilder<'a, 'tcx> { fn find_scope(&self, expr: &hir::Expr, - label: Option) -> LoopScope { - if label.is_none() { - return *self.loop_scopes.last().unwrap(); - } - - match self.tcx.expect_def(expr.id) { - Def::Label(loop_id) => { + label: Option) -> LoopScope { + match label { + None => *self.loop_scopes.last().unwrap(), + Some(label) => { for l in &self.loop_scopes { - if l.loop_id == loop_id { + if l.loop_id == label.loop_id { return *l; } } - span_bug!(expr.span, "no loop scope for id {}", loop_id); - } - - r => { - span_bug!(expr.span, "bad entry `{:?}` in def_map for label", r); + span_bug!(expr.span, "no loop scope for id {}", label.loop_id); } } } diff --git a/src/librustc/dep_graph/dep_node.rs b/src/librustc/dep_graph/dep_node.rs index 351feaba034..e261c699b6a 100644 --- a/src/librustc/dep_graph/dep_node.rs +++ b/src/librustc/dep_graph/dep_node.rs @@ -42,6 +42,10 @@ pub enum DepNode { // Represents the HIR node with the given node-id Hir(D), + // Represents the body of a function or method. The def-id is that of the + // function/method. + HirBody(D), + // Represents the metadata for a given HIR node, typically found // in an extern crate. MetaData(D), @@ -59,6 +63,7 @@ pub enum DepNode { PluginRegistrar, StabilityIndex, CollectItem(D), + CollectItemSig(D), Coherence, EffectCheck, Liveness, @@ -90,7 +95,7 @@ pub enum DepNode { RvalueCheck(D), Reachability, DeadCheck, - StabilityCheck, + StabilityCheck(D), LateLintCheck, TransCrate, TransCrateItem(D), @@ -105,7 +110,6 @@ pub enum DepNode { // predicates for an item wind up in `ItemSignature`). AssociatedItems(D), ItemSignature(D), - FieldTy(D), SizedConstraint(D), AssociatedItemDefIds(D), InherentImpls(D), @@ -150,12 +154,12 @@ impl DepNode { CollectItem, BorrowCheck, Hir, + HirBody, TransCrateItem, TypeckItemType, TypeckItemBody, AssociatedItems, ItemSignature, - FieldTy, AssociatedItemDefIds, InherentImpls, TraitImpls, @@ -189,7 +193,6 @@ impl DepNode { Privacy => Some(Privacy), Reachability => Some(Reachability), DeadCheck => Some(DeadCheck), - StabilityCheck => Some(StabilityCheck), LateLintCheck => Some(LateLintCheck), TransCrate => Some(TransCrate), TransWriteMetadata => Some(TransWriteMetadata), @@ -200,8 +203,10 @@ impl DepNode { WorkProduct(ref id) => Some(WorkProduct(id.clone())), Hir(ref d) => op(d).map(Hir), + HirBody(ref d) => op(d).map(HirBody), MetaData(ref d) => op(d).map(MetaData), CollectItem(ref d) => op(d).map(CollectItem), + CollectItemSig(ref d) => op(d).map(CollectItemSig), CoherenceCheckImpl(ref d) => op(d).map(CoherenceCheckImpl), CoherenceOverlapCheck(ref d) => op(d).map(CoherenceOverlapCheck), CoherenceOverlapCheckSpecial(ref d) => op(d).map(CoherenceOverlapCheckSpecial), @@ -217,11 +222,11 @@ impl DepNode { Mir(ref d) => op(d).map(Mir), BorrowCheck(ref d) => op(d).map(BorrowCheck), RvalueCheck(ref d) => op(d).map(RvalueCheck), + StabilityCheck(ref d) => op(d).map(StabilityCheck), TransCrateItem(ref d) => op(d).map(TransCrateItem), TransInlinedItem(ref d) => op(d).map(TransInlinedItem), AssociatedItems(ref d) => op(d).map(AssociatedItems), ItemSignature(ref d) => op(d).map(ItemSignature), - FieldTy(ref d) => op(d).map(FieldTy), SizedConstraint(ref d) => op(d).map(SizedConstraint), AssociatedItemDefIds(ref d) => op(d).map(AssociatedItemDefIds), InherentImpls(ref d) => op(d).map(InherentImpls), diff --git a/src/librustc/hir/check_attr.rs b/src/librustc/hir/check_attr.rs index abc35634d15..6f5f548aa78 100644 --- a/src/librustc/hir/check_attr.rs +++ b/src/librustc/hir/check_attr.rs @@ -129,8 +129,8 @@ impl<'a> CheckAttrVisitor<'a> { } } -impl<'a> Visitor for CheckAttrVisitor<'a> { - fn visit_item(&mut self, item: &ast::Item) { +impl<'a> Visitor<'a> for CheckAttrVisitor<'a> { + fn visit_item(&mut self, item: &'a ast::Item) { let target = Target::from_item(item); for attr in &item.attrs { self.check_attribute(attr, target); diff --git a/src/librustc/hir/def.rs b/src/librustc/hir/def.rs index feefc43f401..b6fce2d6ca0 100644 --- a/src/librustc/hir/def.rs +++ b/src/librustc/hir/def.rs @@ -83,14 +83,6 @@ impl PathResolution { PathResolution { base_def: def, depth: 0 } } - /// Get the definition, if fully resolved, otherwise panic. - pub fn full_def(&self) -> Def { - if self.depth != 0 { - bug!("path not fully resolved: {:?}", self); - } - self.base_def - } - pub fn kind_name(&self) -> &'static str { if self.depth != 0 { "associated item" diff --git a/src/librustc/hir/intravisit.rs b/src/librustc/hir/intravisit.rs index 7dd88e36dd1..625bde2ca8b 100644 --- a/src/librustc/hir/intravisit.rs +++ b/src/librustc/hir/intravisit.rs @@ -38,6 +38,7 @@ use syntax::ast::{NodeId, CRATE_NODE_ID, Name, Attribute}; use syntax::codemap::Spanned; use syntax_pos::Span; use hir::*; +use hir::def::Def; use hir::map::Map; use super::itemlikevisit::DeepVisitor; @@ -66,6 +67,62 @@ impl<'a> FnKind<'a> { } } +/// Specifies what nested things a visitor wants to visit. The most +/// common choice is `OnlyBodies`, which will cause the visitor to +/// visit fn bodies for fns that it encounters, but skip over nested +/// item-like things. +/// +/// See the comments on `ItemLikeVisitor` for more details on the overall +/// visit strategy. +pub enum NestedVisitorMap<'this, 'tcx: 'this> { + /// Do not visit any nested things. When you add a new + /// "non-nested" thing, you will want to audit such uses to see if + /// they remain valid. + /// + /// Use this if you are only walking some particular kind of tree + /// (i.e., a type, or fn signature) and you don't want to thread a + /// HIR map around. + None, + + /// Do not visit nested item-like things, but visit nested things + /// that are inside of an item-like. + /// + /// **This is the most common choice.** A very commmon pattern is + /// to use `tcx.visit_all_item_likes_in_krate()` as an outer loop, + /// and to have the visitor that visits the contents of each item + /// using this setting. + OnlyBodies(&'this Map<'tcx>), + + /// Visit all nested things, including item-likes. + /// + /// **This is an unusual choice.** It is used when you want to + /// process everything within their lexical context. Typically you + /// kick off the visit by doing `walk_krate()`. + All(&'this Map<'tcx>), +} + +impl<'this, 'tcx> NestedVisitorMap<'this, 'tcx> { + /// Returns the map to use for an "intra item-like" thing (if any). + /// e.g., function body. + pub fn intra(self) -> Option<&'this Map<'tcx>> { + match self { + NestedVisitorMap::None => None, + NestedVisitorMap::OnlyBodies(map) => Some(map), + NestedVisitorMap::All(map) => Some(map), + } + } + + /// Returns the map to use for an "item-like" thing (if any). + /// e.g., item, impl-item. + pub fn inter(self) -> Option<&'this Map<'tcx>> { + match self { + NestedVisitorMap::None => None, + NestedVisitorMap::OnlyBodies(_) => None, + NestedVisitorMap::All(map) => Some(map), + } + } +} + /// Each method of the Visitor trait is a hook to be potentially /// overridden. Each method's default implementation recursively visits /// the substructure of the input via the corresponding `walk` method; @@ -87,13 +144,14 @@ pub trait Visitor<'v> : Sized { // Nested items. /// The default versions of the `visit_nested_XXX` routines invoke - /// this method to get a map to use; if they get back `None`, they - /// just skip nested things. Otherwise, they will lookup the - /// nested item-like things in the map and visit it. So the best - /// way to implement a nested visitor is to override this method - /// to return a `Map`; one advantage of this is that if we add - /// more types of nested things in the future, they will - /// automatically work. + /// this method to get a map to use. By selecting an enum variant, + /// you control which kinds of nested HIR are visited; see + /// `NestedVisitorMap` for details. By "nested HIR", we are + /// referring to bits of HIR that are not directly embedded within + /// one another but rather indirectly, through a table in the + /// crate. This is done to control dependencies during incremental + /// compilation: the non-inline bits of HIR can be tracked and + /// hashed separately. /// /// **If for some reason you want the nested behavior, but don't /// have a `Map` are your disposal:** then you should override the @@ -101,9 +159,7 @@ pub trait Visitor<'v> : Sized { /// `panic!()`. This way, if a new `visit_nested_XXX` variant is /// added in the future, we will see the panic in your code and /// fix it appropriately. - fn nested_visit_map(&mut self) -> Option<&Map<'v>> { - None - } + fn nested_visit_map<'this>(&'this mut self) -> NestedVisitorMap<'this, 'v>; /// Invoked when a nested item is encountered. By default does /// nothing unless you override `nested_visit_map` to return @@ -115,8 +171,7 @@ pub trait Visitor<'v> : Sized { /// but cannot supply a `Map`; see `nested_visit_map` for advice. #[allow(unused_variables)] fn visit_nested_item(&mut self, id: ItemId) { - let opt_item = self.nested_visit_map() - .map(|map| map.expect_item(id.id)); + let opt_item = self.nested_visit_map().inter().map(|map| map.expect_item(id.id)); if let Some(item) = opt_item { self.visit_item(item); } @@ -127,13 +182,23 @@ pub trait Visitor<'v> : Sized { /// method. #[allow(unused_variables)] fn visit_nested_impl_item(&mut self, id: ImplItemId) { - let opt_item = self.nested_visit_map() - .map(|map| map.impl_item(id)); + let opt_item = self.nested_visit_map().inter().map(|map| map.impl_item(id)); if let Some(item) = opt_item { self.visit_impl_item(item); } } + /// Invoked to visit the body of a function, method or closure. Like + /// visit_nested_item, does nothing by default unless you override + /// `nested_visit_map` to return `Some(_)`, in which case it will walk the + /// body. + fn visit_body(&mut self, id: ExprId) { + let opt_expr = self.nested_visit_map().intra().map(|map| map.expr(id)); + if let Some(expr) = opt_expr { + self.visit_expr(expr); + } + } + /// Visit the top-level item and (optionally) nested items / impl items. See /// `visit_nested_item` for details. fn visit_item(&mut self, i: &'v Item) { @@ -155,6 +220,9 @@ pub trait Visitor<'v> : Sized { fn visit_id(&mut self, _node_id: NodeId) { // Nothing to do. } + fn visit_def_mention(&mut self, _def: Def) { + // Nothing to do. + } fn visit_name(&mut self, _span: Span, _name: Name) { // Nothing to do. } @@ -196,7 +264,7 @@ pub trait Visitor<'v> : Sized { fn visit_where_predicate(&mut self, predicate: &'v WherePredicate) { walk_where_predicate(self, predicate) } - fn visit_fn(&mut self, fk: FnKind<'v>, fd: &'v FnDecl, b: &'v Expr, s: Span, id: NodeId) { + fn visit_fn(&mut self, fk: FnKind<'v>, fd: &'v FnDecl, b: ExprId, s: Span, id: NodeId) { walk_fn(self, fk, fd, b, s, id) } fn visit_trait_item(&mut self, ti: &'v TraitItem) { @@ -244,12 +312,12 @@ pub trait Visitor<'v> : Sized { fn visit_lifetime_def(&mut self, lifetime: &'v LifetimeDef) { walk_lifetime_def(self, lifetime) } + fn visit_qpath(&mut self, qpath: &'v QPath, id: NodeId, span: Span) { + walk_qpath(self, qpath, id, span) + } fn visit_path(&mut self, path: &'v Path, _id: NodeId) { walk_path(self, path) } - fn visit_path_list_item(&mut self, prefix: &'v Path, item: &'v PathListItem) { - walk_path_list_item(self, prefix, item) - } fn visit_path_segment(&mut self, path_span: Span, path_segment: &'v PathSegment) { walk_path_segment(self, path_span, path_segment) } @@ -349,23 +417,9 @@ pub fn walk_item<'v, V: Visitor<'v>>(visitor: &mut V, item: &'v Item) { visitor.visit_id(item.id); walk_opt_name(visitor, item.span, opt_name) } - ItemUse(ref vp) => { + ItemUse(ref path, _) => { visitor.visit_id(item.id); - match vp.node { - ViewPathSimple(name, ref path) => { - visitor.visit_name(vp.span, name); - visitor.visit_path(path, item.id); - } - ViewPathGlob(ref path) => { - visitor.visit_path(path, item.id); - } - ViewPathList(ref prefix, ref list) => { - visitor.visit_path(prefix, item.id); - for item in list { - visitor.visit_path_list_item(prefix, item) - } - } - } + visitor.visit_path(path, item.id); } ItemStatic(ref typ, _, ref expr) | ItemConst(ref typ, ref expr) => { @@ -373,7 +427,7 @@ pub fn walk_item<'v, V: Visitor<'v>>(visitor: &mut V, item: &'v Item) { visitor.visit_ty(typ); visitor.visit_expr(expr); } - ItemFn(ref declaration, unsafety, constness, abi, ref generics, ref body) => { + ItemFn(ref declaration, unsafety, constness, abi, ref generics, body_id) => { visitor.visit_fn(FnKind::ItemFn(item.name, generics, unsafety, @@ -382,7 +436,7 @@ pub fn walk_item<'v, V: Visitor<'v>>(visitor: &mut V, item: &'v Item) { &item.vis, &item.attrs), declaration, - body, + body_id, item.span, item.id) } @@ -481,11 +535,8 @@ pub fn walk_ty<'v, V: Visitor<'v>>(visitor: &mut V, typ: &'v Ty) { walk_fn_decl(visitor, &function_declaration.decl); walk_list!(visitor, visit_lifetime_def, &function_declaration.lifetimes); } - TyPath(ref maybe_qself, ref path) => { - if let Some(ref qself) = *maybe_qself { - visitor.visit_ty(&qself.ty); - } - visitor.visit_path(path, typ.id); + TyPath(ref qpath) => { + visitor.visit_qpath(qpath, typ.id, typ.span); } TyObjectSum(ref ty, ref bounds) => { visitor.visit_ty(ty); @@ -508,18 +559,26 @@ pub fn walk_ty<'v, V: Visitor<'v>>(visitor: &mut V, typ: &'v Ty) { } } -pub fn walk_path<'v, V: Visitor<'v>>(visitor: &mut V, path: &'v Path) { - for segment in &path.segments { - visitor.visit_path_segment(path.span, segment); +pub fn walk_qpath<'v, V: Visitor<'v>>(visitor: &mut V, qpath: &'v QPath, id: NodeId, span: Span) { + match *qpath { + QPath::Resolved(ref maybe_qself, ref path) => { + if let Some(ref qself) = *maybe_qself { + visitor.visit_ty(qself); + } + visitor.visit_path(path, id) + } + QPath::TypeRelative(ref qself, ref segment) => { + visitor.visit_ty(qself); + visitor.visit_path_segment(span, segment); + } } } -pub fn walk_path_list_item<'v, V>(visitor: &mut V, _prefix: &'v Path, item: &'v PathListItem) - where V: Visitor<'v>, -{ - visitor.visit_id(item.node.id); - visitor.visit_name(item.span, item.node.name); - walk_opt_name(visitor, item.span, item.node.rename); +pub fn walk_path<'v, V: Visitor<'v>>(visitor: &mut V, path: &'v Path) { + visitor.visit_def_mention(path.def); + for segment in &path.segments { + visitor.visit_path_segment(path.span, segment); + } } pub fn walk_path_segment<'v, V: Visitor<'v>>(visitor: &mut V, @@ -555,18 +614,15 @@ pub fn walk_assoc_type_binding<'v, V: Visitor<'v>>(visitor: &mut V, pub fn walk_pat<'v, V: Visitor<'v>>(visitor: &mut V, pattern: &'v Pat) { visitor.visit_id(pattern.id); match pattern.node { - PatKind::TupleStruct(ref path, ref children, _) => { - visitor.visit_path(path, pattern.id); + PatKind::TupleStruct(ref qpath, ref children, _) => { + visitor.visit_qpath(qpath, pattern.id, pattern.span); walk_list!(visitor, visit_pat, children); } - PatKind::Path(ref opt_qself, ref path) => { - if let Some(ref qself) = *opt_qself { - visitor.visit_ty(&qself.ty); - } - visitor.visit_path(path, pattern.id) + PatKind::Path(ref qpath) => { + visitor.visit_qpath(qpath, pattern.id, pattern.span); } - PatKind::Struct(ref path, ref fields, _) => { - visitor.visit_path(path, pattern.id); + PatKind::Struct(ref qpath, ref fields, _) => { + visitor.visit_qpath(qpath, pattern.id, pattern.span); for field in fields { visitor.visit_name(field.span, field.node.name); visitor.visit_pat(&field.node.pat) @@ -579,7 +635,8 @@ pub fn walk_pat<'v, V: Visitor<'v>>(visitor: &mut V, pattern: &'v Pat) { PatKind::Ref(ref subpattern, _) => { visitor.visit_pat(subpattern) } - PatKind::Binding(_, ref pth1, ref optional_subpattern) => { + PatKind::Binding(_, def_id, ref pth1, ref optional_subpattern) => { + visitor.visit_def_mention(Def::Local(def_id)); visitor.visit_name(pth1.span, pth1.node); walk_list!(visitor, visit_pat, optional_subpattern); } @@ -704,13 +761,25 @@ pub fn walk_fn_kind<'v, V: Visitor<'v>>(visitor: &mut V, function_kind: FnKind<' pub fn walk_fn<'v, V: Visitor<'v>>(visitor: &mut V, function_kind: FnKind<'v>, function_declaration: &'v FnDecl, - function_body: &'v Expr, + body_id: ExprId, _span: Span, id: NodeId) { visitor.visit_id(id); walk_fn_decl(visitor, function_declaration); walk_fn_kind(visitor, function_kind); - visitor.visit_expr(function_body) + visitor.visit_body(body_id) +} + +pub fn walk_fn_with_body<'v, V: Visitor<'v>>(visitor: &mut V, + function_kind: FnKind<'v>, + function_declaration: &'v FnDecl, + body: &'v Expr, + _span: Span, + id: NodeId) { + visitor.visit_id(id); + walk_fn_decl(visitor, function_declaration); + walk_fn_kind(visitor, function_kind); + visitor.visit_expr(body) } pub fn walk_trait_item<'v, V: Visitor<'v>>(visitor: &mut V, trait_item: &'v TraitItem) { @@ -727,13 +796,13 @@ pub fn walk_trait_item<'v, V: Visitor<'v>>(visitor: &mut V, trait_item: &'v Trai visitor.visit_generics(&sig.generics); walk_fn_decl(visitor, &sig.decl); } - MethodTraitItem(ref sig, Some(ref body)) => { + MethodTraitItem(ref sig, Some(body_id)) => { visitor.visit_fn(FnKind::Method(trait_item.name, sig, None, &trait_item.attrs), &sig.decl, - body, + body_id, trait_item.span, trait_item.id); } @@ -759,13 +828,13 @@ pub fn walk_impl_item<'v, V: Visitor<'v>>(visitor: &mut V, impl_item: &'v ImplIt visitor.visit_ty(ty); visitor.visit_expr(expr); } - ImplItemKind::Method(ref sig, ref body) => { + ImplItemKind::Method(ref sig, body_id) => { visitor.visit_fn(FnKind::Method(impl_item.name, sig, Some(&impl_item.vis), &impl_item.attrs), &sig.decl, - body, + body_id, impl_item.span, impl_item.id); } @@ -840,8 +909,8 @@ pub fn walk_expr<'v, V: Visitor<'v>>(visitor: &mut V, expression: &'v Expr) { visitor.visit_expr(element); visitor.visit_expr(count) } - ExprStruct(ref path, ref fields, ref optional_base) => { - visitor.visit_path(path, expression.id); + ExprStruct(ref qpath, ref fields, ref optional_base) => { + visitor.visit_qpath(qpath, expression.id, expression.span); for field in fields { visitor.visit_name(field.name.span, field.name.node); visitor.visit_expr(&field.expr) @@ -890,7 +959,7 @@ pub fn walk_expr<'v, V: Visitor<'v>>(visitor: &mut V, expression: &'v Expr) { visitor.visit_expr(subexpression); walk_list!(visitor, visit_arm, arms); } - ExprClosure(_, ref function_declaration, ref body, _fn_decl_span) => { + ExprClosure(_, ref function_declaration, body, _fn_decl_span) => { visitor.visit_fn(FnKind::Closure(&expression.attrs), function_declaration, body, @@ -917,18 +986,21 @@ pub fn walk_expr<'v, V: Visitor<'v>>(visitor: &mut V, expression: &'v Expr) { visitor.visit_expr(main_expression); visitor.visit_expr(index_expression) } - ExprPath(ref maybe_qself, ref path) => { - if let Some(ref qself) = *maybe_qself { - visitor.visit_ty(&qself.ty); - } - visitor.visit_path(path, expression.id) + ExprPath(ref qpath) => { + visitor.visit_qpath(qpath, expression.id, expression.span); } - ExprBreak(ref opt_sp_name, ref opt_expr) => { - walk_opt_sp_name(visitor, opt_sp_name); + ExprBreak(None, ref opt_expr) => { walk_list!(visitor, visit_expr, opt_expr); } - ExprAgain(ref opt_sp_name) => { - walk_opt_sp_name(visitor, opt_sp_name); + ExprBreak(Some(label), ref opt_expr) => { + visitor.visit_def_mention(Def::Label(label.loop_id)); + visitor.visit_name(label.span, label.name); + walk_list!(visitor, visit_expr, opt_expr); + } + ExprAgain(None) => {} + ExprAgain(Some(label)) => { + visitor.visit_def_mention(Def::Label(label.loop_id)); + visitor.visit_name(label.span, label.name); } ExprRet(ref optional_expression) => { walk_list!(visitor, visit_expr, optional_expression); @@ -1002,13 +1074,14 @@ impl IdRange { } -pub struct IdRangeComputingVisitor { - pub result: IdRange, +pub struct IdRangeComputingVisitor<'a, 'ast: 'a> { + result: IdRange, + map: &'a map::Map<'ast>, } -impl IdRangeComputingVisitor { - pub fn new() -> IdRangeComputingVisitor { - IdRangeComputingVisitor { result: IdRange::max() } +impl<'a, 'ast> IdRangeComputingVisitor<'a, 'ast> { + pub fn new(map: &'a map::Map<'ast>) -> IdRangeComputingVisitor<'a, 'ast> { + IdRangeComputingVisitor { result: IdRange::max(), map: map } } pub fn result(&self) -> IdRange { @@ -1016,20 +1089,25 @@ impl IdRangeComputingVisitor { } } -impl<'v> Visitor<'v> for IdRangeComputingVisitor { +impl<'a, 'ast> Visitor<'ast> for IdRangeComputingVisitor<'a, 'ast> { + fn nested_visit_map<'this>(&'this mut self) -> NestedVisitorMap<'this, 'ast> { + NestedVisitorMap::OnlyBodies(&self.map) + } + fn visit_id(&mut self, id: NodeId) { self.result.add(id); } } /// Computes the id range for a single fn body, ignoring nested items. -pub fn compute_id_range_for_fn_body(fk: FnKind, - decl: &FnDecl, - body: &Expr, - sp: Span, - id: NodeId) - -> IdRange { - let mut visitor = IdRangeComputingVisitor::new(); - visitor.visit_fn(fk, decl, body, sp, id); +pub fn compute_id_range_for_fn_body<'v>(fk: FnKind<'v>, + decl: &'v FnDecl, + body: &'v Expr, + sp: Span, + id: NodeId, + map: &map::Map<'v>) + -> IdRange { + let mut visitor = IdRangeComputingVisitor::new(map); + walk_fn_with_body(&mut visitor, fk, decl, body, sp, id); visitor.result() } diff --git a/src/librustc/hir/itemlikevisit.rs b/src/librustc/hir/itemlikevisit.rs index 1e373441e9e..71ef7131440 100644 --- a/src/librustc/hir/itemlikevisit.rs +++ b/src/librustc/hir/itemlikevisit.rs @@ -41,8 +41,10 @@ use super::intravisit::Visitor; /// item-like things. /// - Example: Lifetime resolution, which wants to bring lifetimes declared on the /// impl into scope while visiting the impl-items, and then back out again. -/// - How: Implement `intravisit::Visitor` and override the `visit_nested_foo()` foo methods -/// as needed. Walk your crate with `intravisit::walk_crate()` invoked on `tcx.map.krate()`. +/// - How: Implement `intravisit::Visitor` and override the +/// `visit_nested_map()` methods to return +/// `NestedVisitorMap::All`. Walk your crate with +/// `intravisit::walk_crate()` invoked on `tcx.map.krate()`. /// - Pro: Visitor methods for any kind of HIR node, not just item-like things. /// - Pro: Preserves nesting information /// - Con: Does not integrate well into dependency tracking. diff --git a/src/librustc/hir/lowering.rs b/src/librustc/hir/lowering.rs index 5af7c18e1a1..74876eb59ee 100644 --- a/src/librustc/hir/lowering.rs +++ b/src/librustc/hir/lowering.rs @@ -46,15 +46,20 @@ use hir::map::definitions::DefPathData; use hir::def_id::{DefIndex, DefId}; use hir::def::{Def, PathResolution}; use session::Session; +use util::nodemap::NodeMap; +use rustc_data_structures::fnv::FnvHashMap; use std::collections::BTreeMap; use std::iter; +use std::mem; + use syntax::ast::*; use syntax::errors; use syntax::ptr::P; -use syntax::codemap::{respan, Spanned}; +use syntax::codemap::{self, respan, Spanned}; use syntax::std_inject; use syntax::symbol::{Symbol, keywords}; +use syntax::util::small_vector::SmallVector; use syntax::visit::{self, Visitor}; use syntax_pos::Span; @@ -66,19 +71,22 @@ pub struct LoweringContext<'a> { // the form of a DefIndex) so that if we create a new node which introduces // a definition, then we can properly create the def id. parent_def: Option, + exprs: FnvHashMap, resolver: &'a mut Resolver, + + /// The items being lowered are collected here. + items: BTreeMap, + + impl_items: BTreeMap, } pub trait Resolver { // Resolve a global hir path generated by the lowerer when expanding `for`, `if let`, etc. - fn resolve_generated_global_path(&mut self, path: &hir::Path, is_value: bool) -> Def; + fn resolve_hir_path(&mut self, path: &mut hir::Path, is_value: bool); // Obtain the resolution for a node id fn get_resolution(&mut self, id: NodeId) -> Option; - // Record the resolution of a path or binding generated by the lowerer when expanding. - fn record_resolution(&mut self, id: NodeId, def: Def); - // We must keep the set of definitions up to date as we add nodes that weren't in the AST. // This should only return `None` during testing. fn definitions(&mut self) -> &mut Definitions; @@ -97,53 +105,82 @@ pub fn lower_crate(sess: &Session, crate_root: std_inject::injected_crate_name(krate), sess: sess, parent_def: None, + exprs: FnvHashMap(), resolver: resolver, + items: BTreeMap::new(), + impl_items: BTreeMap::new(), }.lower_crate(krate) } +#[derive(Copy, Clone, PartialEq, Eq)] +enum ParamMode { + /// Any path in a type context. + Explicit, + /// The `module::Type` in `module::Type::method` in an expression. + Optional +} + impl<'a> LoweringContext<'a> { - fn lower_crate(&mut self, c: &Crate) -> hir::Crate { + fn lower_crate(mut self, c: &Crate) -> hir::Crate { + self.lower_items(c); + let module = self.lower_mod(&c.module); + let attrs = self.lower_attrs(&c.attrs); + let exported_macros = c.exported_macros.iter().map(|m| self.lower_macro_def(m)).collect(); + + hir::Crate { + module: module, + attrs: attrs, + span: c.span, + exported_macros: exported_macros, + items: self.items, + impl_items: self.impl_items, + exprs: mem::replace(&mut self.exprs, FnvHashMap()), + } + } + + fn lower_items(&mut self, c: &Crate) { struct ItemLowerer<'lcx, 'interner: 'lcx> { - items: BTreeMap, - impl_items: BTreeMap, lctx: &'lcx mut LoweringContext<'interner>, } - impl<'lcx, 'interner> Visitor for ItemLowerer<'lcx, 'interner> { - fn visit_item(&mut self, item: &Item) { - self.items.insert(item.id, self.lctx.lower_item(item)); + impl<'lcx, 'interner> Visitor<'lcx> for ItemLowerer<'lcx, 'interner> { + fn visit_item(&mut self, item: &'lcx Item) { + let hir_item = self.lctx.lower_item(item); + self.lctx.items.insert(item.id, hir_item); visit::walk_item(self, item); } - fn visit_impl_item(&mut self, item: &ImplItem) { + fn visit_impl_item(&mut self, item: &'lcx ImplItem) { let id = self.lctx.lower_impl_item_ref(item).id; - self.impl_items.insert(id, self.lctx.lower_impl_item(item)); + let hir_item = self.lctx.lower_impl_item(item); + self.lctx.impl_items.insert(id, hir_item); visit::walk_impl_item(self, item); } } - let (items, impl_items) = { - let mut item_lowerer = ItemLowerer { items: BTreeMap::new(), - impl_items: BTreeMap::new(), - lctx: self }; - visit::walk_crate(&mut item_lowerer, c); - (item_lowerer.items, item_lowerer.impl_items) - }; + let mut item_lowerer = ItemLowerer { lctx: self }; + visit::walk_crate(&mut item_lowerer, c); + } - hir::Crate { - module: self.lower_mod(&c.module), - attrs: self.lower_attrs(&c.attrs), - span: c.span, - exported_macros: c.exported_macros.iter().map(|m| self.lower_macro_def(m)).collect(), - items: items, - impl_items: impl_items, - } + fn record_expr(&mut self, expr: hir::Expr) -> hir::ExprId { + let id = hir::ExprId(expr.id); + self.exprs.insert(id, expr); + id } fn next_id(&self) -> NodeId { self.sess.next_node_id() } + fn expect_full_def(&mut self, id: NodeId) -> Def { + self.resolver.get_resolution(id).map_or(Def::Err, |pr| { + if pr.depth != 0 { + bug!("path not fully resolved: {:?}", pr); + } + pr.base_def + }) + } + fn diagnostic(&self) -> &errors::Handler { self.sess.diagnostic() } @@ -152,6 +189,18 @@ impl<'a> LoweringContext<'a> { Symbol::gensym(s) } + fn allow_internal_unstable(&self, reason: &'static str, mut span: Span) -> Span { + span.expn_id = self.sess.codemap().record_expansion(codemap::ExpnInfo { + call_site: span, + callee: codemap::NameAndSpan { + format: codemap::CompilerDesugaring(Symbol::intern(reason)), + span: Some(span), + allow_internal_unstable: true, + }, + }); + span + } + fn with_parent_def(&mut self, parent_id: NodeId, f: F) -> T where F: FnOnce(&mut LoweringContext) -> T { @@ -171,39 +220,21 @@ impl<'a> LoweringContext<'a> { o_id.map(|sp_ident| respan(sp_ident.span, sp_ident.node.name)) } - fn lower_attrs(&mut self, attrs: &Vec) -> hir::HirVec { - attrs.clone().into() - } - - fn lower_view_path(&mut self, view_path: &ViewPath) -> P { - P(Spanned { - node: match view_path.node { - ViewPathSimple(ident, ref path) => { - hir::ViewPathSimple(ident.name, self.lower_path(path)) + fn lower_label(&mut self, id: NodeId, label: Option>) -> Option { + label.map(|sp_ident| { + hir::Label { + span: sp_ident.span, + name: sp_ident.node.name, + loop_id: match self.expect_full_def(id) { + Def::Label(loop_id) => loop_id, + _ => DUMMY_NODE_ID } - ViewPathGlob(ref path) => { - hir::ViewPathGlob(self.lower_path(path)) - } - ViewPathList(ref path, ref path_list_idents) => { - hir::ViewPathList(self.lower_path(path), - path_list_idents.iter() - .map(|item| self.lower_path_list_item(item)) - .collect()) - } - }, - span: view_path.span, + } }) } - fn lower_path_list_item(&mut self, path_list_ident: &PathListItem) -> hir::PathListItem { - Spanned { - node: hir::PathListItem_ { - id: path_list_ident.node.id, - name: path_list_ident.node.name.name, - rename: path_list_ident.node.rename.map(|rename| rename.name), - }, - span: path_list_ident.span, - } + fn lower_attrs(&mut self, attrs: &Vec) -> hir::HirVec { + attrs.clone().into() } fn lower_arm(&mut self, arm: &Arm) -> hir::Arm { @@ -250,13 +281,7 @@ impl<'a> LoweringContext<'a> { return self.lower_ty(ty); } TyKind::Path(ref qself, ref path) => { - let qself = qself.as_ref().map(|&QSelf { ref ty, position }| { - hir::QSelf { - ty: self.lower_ty(ty), - position: position, - } - }); - hir::TyPath(qself, self.lower_path(path)) + hir::TyPath(self.lower_qpath(t.id, qself, path, ParamMode::Explicit)) } TyKind::ObjectSum(ref ty, ref bounds) => { hir::TyObjectSum(self.lower_ty(ty), self.lower_bounds(bounds)) @@ -298,38 +323,140 @@ impl<'a> LoweringContext<'a> { } } - fn lower_path(&mut self, p: &Path) -> hir::Path { + fn lower_qpath(&mut self, + id: NodeId, + qself: &Option, + p: &Path, + param_mode: ParamMode) + -> hir::QPath { + let qself_position = qself.as_ref().map(|q| q.position); + let qself = qself.as_ref().map(|q| self.lower_ty(&q.ty)); + + let resolution = self.resolver.get_resolution(id) + .unwrap_or(PathResolution::new(Def::Err)); + + let proj_start = p.segments.len() - resolution.depth; + let path = P(hir::Path { + global: p.global, + def: resolution.base_def, + segments: p.segments[..proj_start].iter().enumerate().map(|(i, segment)| { + let param_mode = match (qself_position, param_mode) { + (Some(j), ParamMode::Optional) if i < j => { + // This segment is part of the trait path in a + // qualified path - one of `a`, `b` or `Trait` + // in `::T::U::method`. + ParamMode::Explicit + } + _ => param_mode + }; + self.lower_path_segment(segment, param_mode) + }).collect(), + span: p.span, + }); + + // Simple case, either no projections, or only fully-qualified. + // E.g. `std::mem::size_of` or `::Item`. + if resolution.depth == 0 { + return hir::QPath::Resolved(qself, path); + } + + // Create the innermost type that we're projecting from. + let mut ty = if path.segments.is_empty() { + // If the base path is empty that means there exists a + // syntactical `Self`, e.g. `&i32` in `<&i32>::clone`. + qself.expect("missing QSelf for ::...") + } else { + // Otherwise, the base path is an implicit `Self` type path, + // e.g. `Vec` in `Vec::new` or `::Item` in + // `::Item::default`. + self.ty(p.span, hir::TyPath(hir::QPath::Resolved(qself, path))) + }; + + // Anything after the base path are associated "extensions", + // out of which all but the last one are associated types, + // e.g. for `std::vec::Vec::::IntoIter::Item::clone`: + // * base path is `std::vec::Vec` + // * "extensions" are `IntoIter`, `Item` and `clone` + // * type nodes are: + // 1. `std::vec::Vec` (created above) + // 2. `>::IntoIter` + // 3. `<>::IntoIter>::Item` + // * final path is `<<>::IntoIter>::Item>::clone` + for (i, segment) in p.segments.iter().enumerate().skip(proj_start) { + let segment = P(self.lower_path_segment(segment, param_mode)); + let qpath = hir::QPath::TypeRelative(ty, segment); + + // It's finished, return the extension of the right node type. + if i == p.segments.len() - 1 { + return qpath; + } + + // Wrap the associated extension in another type node. + ty = self.ty(p.span, hir::TyPath(qpath)); + } + + // Should've returned in the for loop above. + span_bug!(p.span, "lower_qpath: no final extension segment in {}..{}", + proj_start, p.segments.len()) + } + + fn lower_path_extra(&mut self, + id: NodeId, + p: &Path, + name: Option, + param_mode: ParamMode) + -> hir::Path { hir::Path { global: p.global, - segments: p.segments - .iter() - .map(|&PathSegment { identifier, ref parameters }| { - hir::PathSegment { - name: identifier.name, - parameters: self.lower_path_parameters(parameters), - } - }) - .collect(), + def: self.expect_full_def(id), + segments: p.segments.iter().map(|segment| { + self.lower_path_segment(segment, param_mode) + }).chain(name.map(|name| { + hir::PathSegment { + name: name, + parameters: hir::PathParameters::none() + } + })).collect(), span: p.span, } } - fn lower_path_parameters(&mut self, path_parameters: &PathParameters) -> hir::PathParameters { - match *path_parameters { - PathParameters::AngleBracketed(ref data) => - hir::AngleBracketedParameters(self.lower_angle_bracketed_parameter_data(data)), + fn lower_path(&mut self, + id: NodeId, + p: &Path, + param_mode: ParamMode) + -> hir::Path { + self.lower_path_extra(id, p, None, param_mode) + } + + fn lower_path_segment(&mut self, + segment: &PathSegment, + param_mode: ParamMode) + -> hir::PathSegment { + let parameters = match segment.parameters { + PathParameters::AngleBracketed(ref data) => { + let data = self.lower_angle_bracketed_parameter_data(data, param_mode); + hir::AngleBracketedParameters(data) + } PathParameters::Parenthesized(ref data) => hir::ParenthesizedParameters(self.lower_parenthesized_parameter_data(data)), + }; + + hir::PathSegment { + name: segment.identifier.name, + parameters: parameters, } } fn lower_angle_bracketed_parameter_data(&mut self, - data: &AngleBracketedParameterData) + data: &AngleBracketedParameterData, + param_mode: ParamMode) -> hir::AngleBracketedParameterData { let &AngleBracketedParameterData { ref lifetimes, ref types, ref bindings } = data; hir::AngleBracketedParameterData { lifetimes: self.lower_lifetimes(lifetimes), types: types.iter().map(|ty| self.lower_ty(ty)).collect(), + infer_types: types.is_empty() && param_mode == ParamMode::Optional, bindings: bindings.iter().map(|b| self.lower_ty_binding(b)).collect(), } } @@ -394,7 +521,7 @@ impl<'a> LoweringContext<'a> { } } - fn lower_ty_param(&mut self, tp: &TyParam) -> hir::TyParam { + fn lower_ty_param(&mut self, tp: &TyParam, add_bounds: &[TyParamBound]) -> hir::TyParam { let mut name = tp.ident.name; // Don't expose `Self` (recovered "keyword used as ident" parse error). @@ -404,18 +531,26 @@ impl<'a> LoweringContext<'a> { name = Symbol::gensym("Self"); } + let mut bounds = self.lower_bounds(&tp.bounds); + if !add_bounds.is_empty() { + bounds = bounds.into_iter().chain(self.lower_bounds(add_bounds).into_iter()).collect(); + } + hir::TyParam { id: tp.id, name: name, - bounds: self.lower_bounds(&tp.bounds), + bounds: bounds, default: tp.default.as_ref().map(|x| self.lower_ty(x)), span: tp.span, pure_wrt_drop: tp.attrs.iter().any(|attr| attr.check_name("may_dangle")), } } - fn lower_ty_params(&mut self, tps: &P<[TyParam]>) -> hir::HirVec { - tps.iter().map(|tp| self.lower_ty_param(tp)).collect() + fn lower_ty_params(&mut self, tps: &P<[TyParam]>, add_bounds: &NodeMap>) + -> hir::HirVec { + tps.iter().map(|tp| { + self.lower_ty_param(tp, add_bounds.get(&tp.id).map_or(&[][..], |x| &x)) + }).collect() } fn lower_lifetime(&mut self, l: &Lifetime) -> hir::Lifetime { @@ -447,8 +582,47 @@ impl<'a> LoweringContext<'a> { } fn lower_generics(&mut self, g: &Generics) -> hir::Generics { + // Collect `?Trait` bounds in where clause and move them to parameter definitions. + let mut add_bounds = NodeMap(); + for pred in &g.where_clause.predicates { + if let WherePredicate::BoundPredicate(ref bound_pred) = *pred { + 'next_bound: for bound in &bound_pred.bounds { + if let TraitTyParamBound(_, TraitBoundModifier::Maybe) = *bound { + let report_error = |this: &mut Self| { + this.diagnostic().span_err(bound_pred.bounded_ty.span, + "`?Trait` bounds are only permitted at the \ + point where a type parameter is declared"); + }; + // Check if the where clause type is a plain type parameter. + match bound_pred.bounded_ty.node { + TyKind::Path(None, ref path) + if !path.global && path.segments.len() == 1 && + bound_pred.bound_lifetimes.is_empty() => { + if let Some(Def::TyParam(def_id)) = + self.resolver.get_resolution(bound_pred.bounded_ty.id) + .map(|d| d.base_def) { + if let Some(node_id) = + self.resolver.definitions().as_local_node_id(def_id) { + for ty_param in &g.ty_params { + if node_id == ty_param.id { + add_bounds.entry(ty_param.id).or_insert(Vec::new()) + .push(bound.clone()); + continue 'next_bound; + } + } + } + } + report_error(self) + } + _ => report_error(self) + } + } + } + } + } + hir::Generics { - ty_params: self.lower_ty_params(&g.ty_params), + ty_params: self.lower_ty_params(&g.ty_params, &add_bounds), lifetimes: self.lower_lifetime_defs(&g.lifetimes), where_clause: self.lower_where_clause(&g.where_clause), span: g.span, @@ -474,7 +648,11 @@ impl<'a> LoweringContext<'a> { hir::WherePredicate::BoundPredicate(hir::WhereBoundPredicate { bound_lifetimes: self.lower_lifetime_defs(bound_lifetimes), bounded_ty: self.lower_ty(bounded_ty), - bounds: bounds.iter().map(|x| self.lower_ty_param_bound(x)).collect(), + bounds: bounds.iter().filter_map(|bound| match *bound { + // Ignore `?Trait` bounds, they were copied into type parameters already. + TraitTyParamBound(_, TraitBoundModifier::Maybe) => None, + _ => Some(self.lower_ty_param_bound(bound)) + }).collect(), span: span, }) } @@ -493,7 +671,7 @@ impl<'a> LoweringContext<'a> { span}) => { hir::WherePredicate::EqPredicate(hir::WhereEqPredicate { id: id, - path: self.lower_path(path), + path: self.lower_path(id, path, ParamMode::Explicit), ty: self.lower_ty(ty), span: span, }) @@ -523,7 +701,7 @@ impl<'a> LoweringContext<'a> { fn lower_trait_ref(&mut self, p: &TraitRef) -> hir::TraitRef { hir::TraitRef { - path: self.lower_path(&p.path), + path: self.lower_path(p.ref_id, &p.path, ParamMode::Explicit), ref_id: p.ref_id, } } @@ -563,17 +741,15 @@ impl<'a> LoweringContext<'a> { } } - fn lower_bounds(&mut self, bounds: &TyParamBounds) -> hir::TyParamBounds { + fn lower_bounds(&mut self, bounds: &[TyParamBound]) -> hir::TyParamBounds { bounds.iter().map(|bound| self.lower_ty_param_bound(bound)).collect() } fn lower_block(&mut self, b: &Block) -> P { - let mut stmts = Vec::new(); let mut expr = None; - if let Some((last, rest)) = b.stmts.split_last() { - stmts = rest.iter().map(|s| self.lower_stmt(s)).collect::>(); - let last = self.lower_stmt(last); + let mut stmts = b.stmts.iter().flat_map(|s| self.lower_stmt(s)).collect::>(); + if let Some(last) = stmts.pop() { if let hir::StmtExpr(e, _) = last.node { expr = Some(e); } else { @@ -590,11 +766,66 @@ impl<'a> LoweringContext<'a> { }) } - fn lower_item_kind(&mut self, i: &ItemKind) -> hir::Item_ { + fn lower_item_kind(&mut self, + id: NodeId, + name: &mut Name, + attrs: &hir::HirVec, + vis: &mut hir::Visibility, + i: &ItemKind) + -> hir::Item_ { match *i { ItemKind::ExternCrate(string) => hir::ItemExternCrate(string), ItemKind::Use(ref view_path) => { - hir::ItemUse(self.lower_view_path(view_path)) + let path = match view_path.node { + ViewPathSimple(_, ref path) => path, + ViewPathGlob(ref path) => path, + ViewPathList(ref path, ref path_list_idents) => { + for &Spanned { node: ref import, span } in path_list_idents { + // `use a::{self as x, b as y};` lowers to + // `use a as x; use a::b as y;` + let mut ident = import.name; + let suffix = if ident.name == keywords::SelfValue.name() { + if let Some(last) = path.segments.last() { + ident = last.identifier; + } + None + } else { + Some(ident.name) + }; + + let mut path = self.lower_path_extra(import.id, path, suffix, + ParamMode::Explicit); + path.span = span; + self.items.insert(import.id, hir::Item { + id: import.id, + name: import.rename.unwrap_or(ident).name, + attrs: attrs.clone(), + node: hir::ItemUse(P(path), hir::UseKind::Single), + vis: vis.clone(), + span: span, + }); + } + path + } + }; + let path = P(self.lower_path(id, path, ParamMode::Explicit)); + let kind = match view_path.node { + ViewPathSimple(ident, _) => { + *name = ident.name; + hir::UseKind::Single + } + ViewPathGlob(_) => { + hir::UseKind::Glob + } + ViewPathList(..) => { + // Privatize the degenerate import base, used only to check + // the stability of `use a::{};`, to avoid it showing up as + // a reexport by accident when `pub`, e.g. in documentation. + *vis = hir::Inherited; + hir::UseKind::ListStem + } + }; + hir::ItemUse(path, kind) } ItemKind::Static(ref t, m, ref e) => { hir::ItemStatic(self.lower_ty(t), @@ -606,12 +837,14 @@ impl<'a> LoweringContext<'a> { } ItemKind::Fn(ref decl, unsafety, constness, abi, ref generics, ref body) => { let body = self.lower_block(body); + let body = self.expr_block(body, ThinVec::new()); + let body_id = self.record_expr(body); hir::ItemFn(self.lower_fn_decl(decl), self.lower_unsafety(unsafety), self.lower_constness(constness), abi, self.lower_generics(generics), - P(self.expr_block(body, ThinVec::new()))) + body_id) } ItemKind::Mod(ref m) => hir::ItemMod(self.lower_mod(m)), ItemKind::ForeignMod(ref nm) => hir::ItemForeignMod(self.lower_foreign_mod(nm)), @@ -678,7 +911,8 @@ impl<'a> LoweringContext<'a> { hir::MethodTraitItem(this.lower_method_sig(sig), body.as_ref().map(|x| { let body = this.lower_block(x); - P(this.expr_block(body, ThinVec::new())) + let expr = this.expr_block(body, ThinVec::new()); + this.record_expr(expr) })) } TraitItemKind::Type(ref bounds, ref default) => { @@ -706,8 +940,9 @@ impl<'a> LoweringContext<'a> { } ImplItemKind::Method(ref sig, ref body) => { let body = this.lower_block(body); - hir::ImplItemKind::Method(this.lower_method_sig(sig), - P(this.expr_block(body, ThinVec::new()))) + let expr = this.expr_block(body, ThinVec::new()); + let expr_id = this.record_expr(expr); + hir::ImplItemKind::Method(this.lower_method_sig(sig), expr_id) } ImplItemKind::Type(ref ty) => hir::ImplItemKind::Type(this.lower_ty(ty)), ImplItemKind::Macro(..) => panic!("Shouldn't exist any more"), @@ -742,7 +977,7 @@ impl<'a> LoweringContext<'a> { fn lower_mod(&mut self, m: &Mod) -> hir::Mod { hir::Mod { inner: m.inner, - item_ids: m.items.iter().map(|x| self.lower_item_id(x)).collect(), + item_ids: m.items.iter().flat_map(|x| self.lower_item_id(x)).collect(), } } @@ -758,21 +993,30 @@ impl<'a> LoweringContext<'a> { } } - fn lower_item_id(&mut self, i: &Item) -> hir::ItemId { - hir::ItemId { id: i.id } + fn lower_item_id(&mut self, i: &Item) -> SmallVector { + if let ItemKind::Use(ref view_path) = i.node { + if let ViewPathList(_, ref imports) = view_path.node { + return iter::once(i.id).chain(imports.iter().map(|import| import.node.id)) + .map(|id| hir::ItemId { id: id }).collect(); + } + } + SmallVector::one(hir::ItemId { id: i.id }) } pub fn lower_item(&mut self, i: &Item) -> hir::Item { + let mut name = i.ident.name; + let attrs = self.lower_attrs(&i.attrs); + let mut vis = self.lower_visibility(&i.vis); let node = self.with_parent_def(i.id, |this| { - this.lower_item_kind(&i.node) + this.lower_item_kind(i.id, &mut name, &attrs, &mut vis, &i.node) }); hir::Item { id: i.id, - name: i.ident.name, - attrs: self.lower_attrs(&i.attrs), + name: name, + attrs: attrs, node: node, - vis: self.lower_visibility(&i.vis), + vis: vis, span: i.span, } } @@ -875,29 +1119,41 @@ impl<'a> LoweringContext<'a> { self.with_parent_def(p.id, |this| { match this.resolver.get_resolution(p.id).map(|d| d.base_def) { // `None` can occur in body-less function signatures - None | Some(Def::Local(..)) => { + def @ None | def @ Some(Def::Local(_)) => { + let def_id = def.map(|d| d.def_id()).unwrap_or_else(|| { + this.resolver.definitions().local_def_id(p.id) + }); hir::PatKind::Binding(this.lower_binding_mode(binding_mode), + def_id, respan(pth1.span, pth1.node.name), sub.as_ref().map(|x| this.lower_pat(x))) } - _ => hir::PatKind::Path(None, hir::Path::from_name(pth1.span, - pth1.node.name)) + Some(def) => { + hir::PatKind::Path(hir::QPath::Resolved(None, P(hir::Path { + span: pth1.span, + global: false, + def: def, + segments: hir_vec![ + hir::PathSegment::from_name(pth1.node.name) + ], + }))) + } } }) } PatKind::Lit(ref e) => hir::PatKind::Lit(P(self.lower_expr(e))), PatKind::TupleStruct(ref path, ref pats, ddpos) => { - hir::PatKind::TupleStruct(self.lower_path(path), - pats.iter().map(|x| self.lower_pat(x)).collect(), ddpos) + let qpath = self.lower_qpath(p.id, &None, path, ParamMode::Optional); + hir::PatKind::TupleStruct(qpath, + pats.iter().map(|x| self.lower_pat(x)).collect(), + ddpos) } - PatKind::Path(ref opt_qself, ref path) => { - let opt_qself = opt_qself.as_ref().map(|qself| { - hir::QSelf { ty: self.lower_ty(&qself.ty), position: qself.position } - }); - hir::PatKind::Path(opt_qself, self.lower_path(path)) + PatKind::Path(ref qself, ref path) => { + hir::PatKind::Path(self.lower_qpath(p.id, qself, path, ParamMode::Optional)) } - PatKind::Struct(ref pth, ref fields, etc) => { - let pth = self.lower_path(pth); + PatKind::Struct(ref path, ref fields, etc) => { + let qpath = self.lower_qpath(p.id, &None, path, ParamMode::Optional); + let fs = fields.iter() .map(|f| { Spanned { @@ -910,7 +1166,7 @@ impl<'a> LoweringContext<'a> { } }) .collect(); - hir::PatKind::Struct(pth, fs, etc) + hir::PatKind::Struct(qpath, fs, etc) } PatKind::Tuple(ref elts, ddpos) => { hir::PatKind::Tuple(elts.iter().map(|x| self.lower_pat(x)).collect(), ddpos) @@ -980,9 +1236,9 @@ impl<'a> LoweringContext<'a> { let move_val_init = ["intrinsics", "move_val_init"]; let inplace_finalize = ["ops", "InPlace", "finalize"]; + let unstable_span = self.allow_internal_unstable("<-", e.span); let make_call = |this: &mut LoweringContext, p, args| { - let path = this.std_path(e.span, p); - let path = this.expr_path(path, ThinVec::new()); + let path = P(this.expr_std_path(unstable_span, p, ThinVec::new())); P(this.expr_call(e.span, path, args)) }; @@ -996,11 +1252,6 @@ impl<'a> LoweringContext<'a> { // let placer = ; let (s1, placer_binding) = { - let placer_expr = P(self.signal_block_expr(hir_vec![], - placer_expr, - e.span, - hir::PopUnstableBlock, - ThinVec::new())); mk_stmt_let(self, placer_ident, placer_expr) }; @@ -1021,11 +1272,6 @@ impl<'a> LoweringContext<'a> { // pop_unsafe!(EXPR)); let pop_unsafe_expr = { - let value_expr = P(self.signal_block_expr(hir_vec![], - value_expr, - e.span, - hir::PopUnstableBlock, - ThinVec::new())); self.signal_block_expr(hir_vec![], value_expr, e.span, @@ -1054,11 +1300,9 @@ impl<'a> LoweringContext<'a> { ThinVec::new())) }; - return self.signal_block_expr(hir_vec![s1, s2, s3], - expr, - e.span, - hir::PushUnstableBlock, - e.attrs.clone()); + let block = self.block_all(e.span, hir_vec![s1, s2, s3], Some(expr)); + // add the attributes to the outer returned expr node + return self.expr_block(P(block), e.attrs.clone()); } ExprKind::Vec(ref exprs) => { @@ -1147,9 +1391,10 @@ impl<'a> LoweringContext<'a> { } ExprKind::Closure(capture_clause, ref decl, ref body, fn_decl_span) => { self.with_parent_def(e.id, |this| { + let expr = this.lower_expr(body); hir::ExprClosure(this.lower_capture_clause(capture_clause), this.lower_fn_decl(decl), - P(this.lower_expr(body)), + this.record_expr(expr), fn_decl_span) }) } @@ -1176,33 +1421,23 @@ impl<'a> LoweringContext<'a> { ast_expr: &Expr, path: &[&str], fields: &[(&str, &P)]) -> hir::Expr { - let struct_path = this.std_path(ast_expr.span, - &iter::once(&"ops").chain(path) - .map(|s| *s) - .collect::>()); + let struct_path = &iter::once(&"ops").chain(path).map(|s| *s) + .collect::>(); + let unstable_span = this.allow_internal_unstable("...", ast_expr.span); - let hir_expr = if fields.len() == 0 { - this.expr_path(struct_path, ast_expr.attrs.clone()) + if fields.len() == 0 { + this.expr_std_path(unstable_span, struct_path, + ast_expr.attrs.clone()) } else { let fields = fields.into_iter().map(|&(s, e)| { let expr = P(this.lower_expr(&e)); - let signal_block = P(this.signal_block_expr(hir_vec![], - expr, - e.span, - hir::PopUnstableBlock, - ThinVec::new())); - this.field(Symbol::intern(s), signal_block, ast_expr.span) + let unstable_span = this.allow_internal_unstable("...", e.span); + this.field(Symbol::intern(s), expr, unstable_span) }).collect(); let attrs = ast_expr.attrs.clone(); - this.expr_struct(ast_expr.span, struct_path, fields, None, attrs) - }; - - this.signal_block_expr(hir_vec![], - hir_expr, - ast_expr.span, - hir::PushUnstableBlock, - ThinVec::new()) + this.expr_std_struct(unstable_span, struct_path, fields, None, attrs) + } } use syntax::ast::RangeLimits::*; @@ -1236,19 +1471,13 @@ impl<'a> LoweringContext<'a> { }; } ExprKind::Path(ref qself, ref path) => { - let hir_qself = qself.as_ref().map(|&QSelf { ref ty, position }| { - hir::QSelf { - ty: self.lower_ty(ty), - position: position, - } - }); - hir::ExprPath(hir_qself, self.lower_path(path)) + hir::ExprPath(self.lower_qpath(e.id, qself, path, ParamMode::Optional)) } ExprKind::Break(opt_ident, ref opt_expr) => { - hir::ExprBreak(self.lower_opt_sp_ident(opt_ident), + hir::ExprBreak(self.lower_label(e.id, opt_ident), opt_expr.as_ref().map(|x| P(self.lower_expr(x)))) } - ExprKind::Continue(opt_ident) => hir::ExprAgain(self.lower_opt_sp_ident(opt_ident)), + ExprKind::Continue(opt_ident) => hir::ExprAgain(self.lower_label(e.id, opt_ident)), ExprKind::Ret(ref e) => hir::ExprRet(e.as_ref().map(|x| P(self.lower_expr(x)))), ExprKind::InlineAsm(ref asm) => { let hir_asm = hir::InlineAsm { @@ -1275,7 +1504,7 @@ impl<'a> LoweringContext<'a> { hir::ExprInlineAsm(P(hir_asm), outputs, inputs) } ExprKind::Struct(ref path, ref fields, ref maybe_expr) => { - hir::ExprStruct(P(self.lower_path(path)), + hir::ExprStruct(self.lower_qpath(e.id, &None, path, ParamMode::Optional), fields.iter().map(|x| self.lower_field(x)).collect(), maybe_expr.as_ref().map(|x| P(self.lower_expr(x)))) } @@ -1475,10 +1704,10 @@ impl<'a> LoweringContext<'a> { // `match ::std::iter::Iterator::next(&mut iter) { ... }` let match_expr = { - let next_path = self.std_path(e.span, &["iter", "Iterator", "next"]); let iter = P(self.expr_ident(e.span, iter, iter_pat.id)); let ref_mut_iter = self.expr_mut_addr_of(e.span, iter); - let next_path = self.expr_path(next_path, ThinVec::new()); + let next_path = &["iter", "Iterator", "next"]; + let next_path = P(self.expr_std_path(e.span, next_path, ThinVec::new())); let next_expr = P(self.expr_call(e.span, next_path, hir_vec![ref_mut_iter])); let arms = hir_vec![pat_arm, break_arm]; @@ -1505,10 +1734,9 @@ impl<'a> LoweringContext<'a> { // `match ::std::iter::IntoIterator::into_iter() { ... }` let into_iter_expr = { - let into_iter_path = self.std_path(e.span, - &["iter", "IntoIterator", "into_iter"]); - - let into_iter = self.expr_path(into_iter_path, ThinVec::new()); + let into_iter_path = &["iter", "IntoIterator", "into_iter"]; + let into_iter = P(self.expr_std_path(e.span, into_iter_path, + ThinVec::new())); P(self.expr_call(e.span, into_iter, hir_vec![head])) }; @@ -1534,32 +1762,20 @@ impl<'a> LoweringContext<'a> { ExprKind::Try(ref sub_expr) => { // to: // - // { - // match { Carrier::translate( { } ) } { - // Ok(val) => val, - // Err(err) => { return Carrier::from_error(From::from(err)); } - // } + // match Carrier::translate() { + // Ok(val) => val, + // Err(err) => return Carrier::from_error(From::from(err)) // } + let unstable_span = self.allow_internal_unstable("?", e.span); - // { Carrier::translate( { } ) } + // Carrier::translate() let discr = { // expand - let sub_expr = P(self.lower_expr(sub_expr)); - let sub_expr = self.signal_block_expr(hir_vec![], - sub_expr, - e.span, - hir::PopUnstableBlock, - ThinVec::new()); + let sub_expr = self.lower_expr(sub_expr); - let path = self.std_path(e.span, &["ops", "Carrier", "translate"]); - let path = self.expr_path(path, ThinVec::new()); - let call = P(self.expr_call(e.span, path, hir_vec![sub_expr])); - - P(self.signal_block_expr(hir_vec![], - call, - e.span, - hir::PushUnstableBlock, - ThinVec::new())) + let path = &["ops", "Carrier", "translate"]; + let path = P(self.expr_std_path(unstable_span, path, ThinVec::new())); + P(self.expr_call(e.span, path, hir_vec![sub_expr])) }; // Ok(val) => val @@ -1572,33 +1788,30 @@ impl<'a> LoweringContext<'a> { self.arm(hir_vec![ok_pat], val_expr) }; - // Err(err) => { return Carrier::from_error(From::from(err)); } + // Err(err) => return Carrier::from_error(From::from(err)) let err_arm = { let err_ident = self.str_to_ident("err"); let err_local = self.pat_ident(e.span, err_ident); let from_expr = { - let path = self.std_path(e.span, &["convert", "From", "from"]); - let from = self.expr_path(path, ThinVec::new()); + let path = &["convert", "From", "from"]; + let from = P(self.expr_std_path(e.span, path, ThinVec::new())); let err_expr = self.expr_ident(e.span, err_ident, err_local.id); self.expr_call(e.span, from, hir_vec![err_expr]) }; let from_err_expr = { - let path = self.std_path(e.span, &["ops", "Carrier", "from_error"]); - let from_err = self.expr_path(path, ThinVec::new()); + let path = &["ops", "Carrier", "from_error"]; + let from_err = P(self.expr_std_path(unstable_span, path, + ThinVec::new())); P(self.expr_call(e.span, from_err, hir_vec![from_expr])) }; let ret_expr = P(self.expr(e.span, hir::Expr_::ExprRet(Some(from_err_expr)), ThinVec::new())); - let ret_stmt = self.stmt_expr(ret_expr); - let block = P(self.signal_block_stmt(ret_stmt, e.span, - hir::PushUnstableBlock, - ThinVec::new())); let err_pat = self.pat_err(e.span, err_local); - self.arm(hir_vec![err_pat], block) + self.arm(hir_vec![err_pat], ret_expr) }; return self.expr_match(e.span, discr, hir_vec![err_arm, ok_arm], @@ -1612,8 +1825,8 @@ impl<'a> LoweringContext<'a> { } } - fn lower_stmt(&mut self, s: &Stmt) -> hir::Stmt { - match s.node { + fn lower_stmt(&mut self, s: &Stmt) -> SmallVector { + SmallVector::one(match s.node { StmtKind::Local(ref l) => Spanned { node: hir::StmtDecl(P(Spanned { node: hir::DeclLocal(self.lower_local(l)), @@ -1621,13 +1834,17 @@ impl<'a> LoweringContext<'a> { }), s.id), span: s.span, }, - StmtKind::Item(ref it) => Spanned { - node: hir::StmtDecl(P(Spanned { - node: hir::DeclItem(self.lower_item_id(it)), + StmtKind::Item(ref it) => { + // Can only use the ID once. + let mut id = Some(s.id); + return self.lower_item_id(it).into_iter().map(|item_id| Spanned { + node: hir::StmtDecl(P(Spanned { + node: hir::DeclItem(item_id), + span: s.span, + }), id.take().unwrap_or_else(|| self.next_id())), span: s.span, - }), s.id), - span: s.span, - }, + }).collect(); + } StmtKind::Expr(ref e) => { Spanned { node: hir::StmtExpr(P(self.lower_expr(e)), s.id), @@ -1641,7 +1858,7 @@ impl<'a> LoweringContext<'a> { } } StmtKind::Mac(..) => panic!("Shouldn't exist here"), - } + }) } fn lower_capture_clause(&mut self, c: CaptureBy) -> hir::CaptureClause { @@ -1655,8 +1872,12 @@ impl<'a> LoweringContext<'a> { match *v { Visibility::Public => hir::Public, Visibility::Crate(_) => hir::Visibility::Crate, - Visibility::Restricted { ref path, id } => - hir::Visibility::Restricted { path: P(self.lower_path(path)), id: id }, + Visibility::Restricted { ref path, id } => { + hir::Visibility::Restricted { + path: P(self.lower_path(id, path, ParamMode::Explicit)), + id: id + } + } Visibility::Inherited => hir::Inherited, } } @@ -1739,27 +1960,32 @@ impl<'a> LoweringContext<'a> { } fn expr_ident(&mut self, span: Span, id: Name, binding: NodeId) -> hir::Expr { - let expr_path = hir::ExprPath(None, self.path_ident(span, id)); - let expr = self.expr(span, expr_path, ThinVec::new()); - let def = { let defs = self.resolver.definitions(); Def::Local(defs.local_def_id(binding)) }; - self.resolver.record_resolution(expr.id, def); - expr + let expr_path = hir::ExprPath(hir::QPath::Resolved(None, P(hir::Path { + span: span, + global: false, + def: def, + segments: hir_vec![hir::PathSegment::from_name(id)], + }))); + + self.expr(span, expr_path, ThinVec::new()) } fn expr_mut_addr_of(&mut self, span: Span, e: P) -> hir::Expr { self.expr(span, hir::ExprAddrOf(hir::MutMutable, e), ThinVec::new()) } - fn expr_path(&mut self, path: hir::Path, attrs: ThinVec) -> P { - let def = self.resolver.resolve_generated_global_path(&path, true); - let expr = P(self.expr(path.span, hir::ExprPath(None, path), attrs)); - self.resolver.record_resolution(expr.id, def); - expr + fn expr_std_path(&mut self, + span: Span, + components: &[&str], + attrs: ThinVec) + -> hir::Expr { + let path = self.std_path(span, components, true); + self.expr(span, hir::ExprPath(hir::QPath::Resolved(None, P(path))), attrs) } fn expr_match(&mut self, @@ -1779,16 +2005,15 @@ impl<'a> LoweringContext<'a> { P(self.expr(sp, hir::ExprTup(exprs), ThinVec::new())) } - fn expr_struct(&mut self, - sp: Span, - path: hir::Path, - fields: hir::HirVec, - e: Option>, - attrs: ThinVec) -> P { - let def = self.resolver.resolve_generated_global_path(&path, false); - let expr = P(self.expr(sp, hir::ExprStruct(P(path), fields, e), attrs)); - self.resolver.record_resolution(expr.id, def); - expr + fn expr_std_struct(&mut self, + span: Span, + components: &[&str], + fields: hir::HirVec, + e: Option>, + attrs: ThinVec) -> hir::Expr { + let path = self.std_path(span, components, false); + let qpath = hir::QPath::Resolved(None, P(path)); + self.expr(span, hir::ExprStruct(qpath, fields, e), attrs) } fn expr(&mut self, span: Span, node: hir::Expr_, attrs: ThinVec) -> hir::Expr { @@ -1820,15 +2045,6 @@ impl<'a> LoweringContext<'a> { (respan(sp, hir::StmtDecl(P(decl), self.next_id())), pat_id) } - // Turns `` into `;`, note that this produces a StmtSemi, not a - // StmtExpr. - fn stmt_expr(&self, expr: P) -> hir::Stmt { - hir::Stmt { - span: expr.span, - node: hir::StmtSemi(expr, self.next_id()), - } - } - fn block_expr(&mut self, expr: P) -> hir::Block { self.block_all(expr.span, hir::HirVec::new(), Some(expr)) } @@ -1845,36 +2061,34 @@ impl<'a> LoweringContext<'a> { } fn pat_ok(&mut self, span: Span, pat: P) -> P { - let path = self.std_path(span, &["result", "Result", "Ok"]); - self.pat_enum(span, path, hir_vec![pat]) + self.pat_std_enum(span, &["result", "Result", "Ok"], hir_vec![pat]) } fn pat_err(&mut self, span: Span, pat: P) -> P { - let path = self.std_path(span, &["result", "Result", "Err"]); - self.pat_enum(span, path, hir_vec![pat]) + self.pat_std_enum(span, &["result", "Result", "Err"], hir_vec![pat]) } fn pat_some(&mut self, span: Span, pat: P) -> P { - let path = self.std_path(span, &["option", "Option", "Some"]); - self.pat_enum(span, path, hir_vec![pat]) + self.pat_std_enum(span, &["option", "Option", "Some"], hir_vec![pat]) } fn pat_none(&mut self, span: Span) -> P { - let path = self.std_path(span, &["option", "Option", "None"]); - self.pat_enum(span, path, hir_vec![]) + self.pat_std_enum(span, &["option", "Option", "None"], hir_vec![]) } - fn pat_enum(&mut self, span: Span, path: hir::Path, subpats: hir::HirVec>) - -> P { - let def = self.resolver.resolve_generated_global_path(&path, true); + fn pat_std_enum(&mut self, + span: Span, + components: &[&str], + subpats: hir::HirVec>) + -> P { + let path = self.std_path(span, components, true); + let qpath = hir::QPath::Resolved(None, P(path)); let pt = if subpats.is_empty() { - hir::PatKind::Path(None, path) + hir::PatKind::Path(qpath) } else { - hir::PatKind::TupleStruct(path, subpats, None) + hir::PatKind::TupleStruct(qpath, subpats, None) }; - let pat = self.pat(span, pt); - self.resolver.record_resolution(pat.id, def); - pat + self.pat(span, pt) } fn pat_ident(&mut self, span: Span, name: Name) -> P { @@ -1883,25 +2097,26 @@ impl<'a> LoweringContext<'a> { fn pat_ident_binding_mode(&mut self, span: Span, name: Name, bm: hir::BindingMode) -> P { - let pat_ident = hir::PatKind::Binding(bm, - Spanned { - span: span, - node: name, - }, - None); - - let pat = self.pat(span, pat_ident); - + let id = self.next_id(); let parent_def = self.parent_def; - let def = { + let def_id = { let defs = self.resolver.definitions(); let def_path_data = DefPathData::Binding(name.as_str()); - let def_index = defs.create_def_with_parent(parent_def, pat.id, def_path_data); - Def::Local(DefId::local(def_index)) + let def_index = defs.create_def_with_parent(parent_def, id, def_path_data); + DefId::local(def_index) }; - self.resolver.record_resolution(pat.id, def); - pat + P(hir::Pat { + id: id, + node: hir::PatKind::Binding(bm, + def_id, + Spanned { + span: span, + node: name, + }, + None), + span: span, + }) } fn pat_wild(&mut self, span: Span) -> P { @@ -1916,63 +2131,25 @@ impl<'a> LoweringContext<'a> { }) } - fn path_ident(&mut self, span: Span, id: Name) -> hir::Path { - self.path(span, vec![id]) - } + /// Given suffix ["b","c","d"], returns path `::std::b::c::d` when + /// `fld.cx.use_std`, and `::core::b::c::d` otherwise. + /// The path is also resolved according to `is_value`. + fn std_path(&mut self, span: Span, components: &[&str], is_value: bool) -> hir::Path { + let idents = self.crate_root.iter().chain(components); - fn path(&mut self, span: Span, strs: Vec) -> hir::Path { - self.path_all(span, false, strs, hir::HirVec::new(), hir::HirVec::new(), hir::HirVec::new()) - } - - fn path_global(&mut self, span: Span, strs: Vec) -> hir::Path { - self.path_all(span, true, strs, hir::HirVec::new(), hir::HirVec::new(), hir::HirVec::new()) - } - - fn path_all(&mut self, - sp: Span, - global: bool, - mut names: Vec, - lifetimes: hir::HirVec, - types: hir::HirVec>, - bindings: hir::HirVec) - -> hir::Path { - let last_identifier = names.pop().unwrap(); - let mut segments: Vec = names.into_iter().map(|name| { - hir::PathSegment { - name: name, - parameters: hir::PathParameters::none(), - } + let segments: Vec<_> = idents.map(|name| { + hir::PathSegment::from_name(Symbol::intern(name)) }).collect(); - segments.push(hir::PathSegment { - name: last_identifier, - parameters: hir::AngleBracketedParameters(hir::AngleBracketedParameterData { - lifetimes: lifetimes, - types: types, - bindings: bindings, - }), - }); - hir::Path { - span: sp, - global: global, + let mut path = hir::Path { + span: span, + global: true, + def: Def::Err, segments: segments.into(), - } - } + }; - fn std_path_components(&mut self, components: &[&str]) -> Vec { - let mut v = Vec::new(); - if let Some(s) = self.crate_root { - v.push(Symbol::intern(s)); - } - v.extend(components.iter().map(|s| Symbol::intern(s))); - return v; - } - - // Given suffix ["b","c","d"], returns path `::std::b::c::d` when - // `fld.cx.use_std`, and `::core::b::c::d` otherwise. - fn std_path(&mut self, span: Span, components: &[&str]) -> hir::Path { - let idents = self.std_path_components(components); - self.path_global(span, idents) + self.resolver.resolve_hir_path(&mut path, is_value); + path } fn signal_block_expr(&mut self, @@ -1993,20 +2170,11 @@ impl<'a> LoweringContext<'a> { self.expr_block(block, attrs) } - fn signal_block_stmt(&mut self, - stmt: hir::Stmt, - span: Span, - rule: hir::BlockCheckMode, - attrs: ThinVec) - -> hir::Expr { - let id = self.next_id(); - let block = P(hir::Block { - rules: rule, + fn ty(&mut self, span: Span, node: hir::Ty_) -> P { + P(hir::Ty { + id: self.next_id(), + node: node, span: span, - id: id, - stmts: hir_vec![stmt], - expr: None, - }); - self.expr_block(block, attrs) + }) } } diff --git a/src/librustc/hir/map/blocks.rs b/src/librustc/hir/map/blocks.rs index 325a90ea91e..068e7ed8624 100644 --- a/src/librustc/hir/map/blocks.rs +++ b/src/librustc/hir/map/blocks.rs @@ -48,7 +48,7 @@ pub trait MaybeFnLike { fn is_fn_like(&self) -> bool; } /// Components shared by fn-like things (fn items, methods, closures). pub struct FnParts<'a> { pub decl: &'a FnDecl, - pub body: &'a Expr, + pub body: ast::ExprId, pub kind: FnKind<'a>, pub span: Span, pub id: NodeId, @@ -115,7 +115,7 @@ struct ItemFnParts<'a> { abi: abi::Abi, vis: &'a ast::Visibility, generics: &'a ast::Generics, - body: &'a Expr, + body: ast::ExprId, id: NodeId, span: Span, attrs: &'a [Attribute], @@ -125,14 +125,14 @@ struct ItemFnParts<'a> { /// for use when implementing FnLikeNode operations. struct ClosureParts<'a> { decl: &'a FnDecl, - body: &'a Expr, + body: ast::ExprId, id: NodeId, span: Span, attrs: &'a [Attribute], } impl<'a> ClosureParts<'a> { - fn new(d: &'a FnDecl, b: &'a Expr, id: NodeId, s: Span, attrs: &'a [Attribute]) -> Self { + fn new(d: &'a FnDecl, b: ast::ExprId, id: NodeId, s: Span, attrs: &'a [Attribute]) -> Self { ClosureParts { decl: d, body: b, @@ -172,9 +172,9 @@ impl<'a> FnLikeNode<'a> { } } - pub fn body(self) -> &'a Expr { - self.handle(|i: ItemFnParts<'a>| &*i.body, - |_, _, _: &'a ast::MethodSig, _, body: &'a ast::Expr, _, _| body, + pub fn body(self) -> ast::ExprId { + self.handle(|i: ItemFnParts<'a>| i.body, + |_, _, _: &'a ast::MethodSig, _, body: ast::ExprId, _, _| body, |c: ClosureParts<'a>| c.body) } @@ -196,6 +196,18 @@ impl<'a> FnLikeNode<'a> { |c: ClosureParts| c.id) } + pub fn constness(self) -> ast::Constness { + match self.kind() { + FnKind::ItemFn(_, _, _, constness, ..) => { + constness + } + FnKind::Method(_, m, ..) => { + m.constness + } + _ => ast::Constness::NotConst + } + } + pub fn kind(self) -> FnKind<'a> { let item = |p: ItemFnParts<'a>| -> FnKind<'a> { FnKind::ItemFn(p.name, p.generics, p.unsafety, p.constness, p.abi, p.vis, p.attrs) @@ -215,7 +227,7 @@ impl<'a> FnLikeNode<'a> { Name, &'a ast::MethodSig, Option<&'a ast::Visibility>, - &'a ast::Expr, + ast::ExprId, Span, &'a [Attribute]) -> A, @@ -223,13 +235,13 @@ impl<'a> FnLikeNode<'a> { { match self.node { map::NodeItem(i) => match i.node { - ast::ItemFn(ref decl, unsafety, constness, abi, ref generics, ref block) => + ast::ItemFn(ref decl, unsafety, constness, abi, ref generics, block) => item_fn(ItemFnParts { id: i.id, name: i.name, decl: &decl, unsafety: unsafety, - body: &block, + body: block, generics: generics, abi: abi, vis: &i.vis, @@ -240,24 +252,24 @@ impl<'a> FnLikeNode<'a> { _ => bug!("item FnLikeNode that is not fn-like"), }, map::NodeTraitItem(ti) => match ti.node { - ast::MethodTraitItem(ref sig, Some(ref body)) => { + ast::MethodTraitItem(ref sig, Some(body)) => { method(ti.id, ti.name, sig, None, body, ti.span, &ti.attrs) } _ => bug!("trait method FnLikeNode that is not fn-like"), }, map::NodeImplItem(ii) => { match ii.node { - ast::ImplItemKind::Method(ref sig, ref body) => { + ast::ImplItemKind::Method(ref sig, body) => { method(ii.id, ii.name, sig, Some(&ii.vis), body, ii.span, &ii.attrs) } _ => { bug!("impl method FnLikeNode that is not fn-like") } } - } + }, map::NodeExpr(e) => match e.node { - ast::ExprClosure(_, ref decl, ref block, _fn_decl_span) => - closure(ClosureParts::new(&decl, &block, e.id, e.span, &e.attrs)), + ast::ExprClosure(_, ref decl, block, _fn_decl_span) => + closure(ClosureParts::new(&decl, block, e.id, e.span, &e.attrs)), _ => bug!("expr FnLikeNode that is not fn-like"), }, _ => bug!("other FnLikeNode that is not fn-like"), diff --git a/src/librustc/hir/map/collector.rs b/src/librustc/hir/map/collector.rs index 89217e83ca2..c46c8f044e0 100644 --- a/src/librustc/hir/map/collector.rs +++ b/src/librustc/hir/map/collector.rs @@ -10,7 +10,7 @@ use super::*; -use hir::intravisit::Visitor; +use hir::intravisit::{Visitor, NestedVisitorMap}; use hir::def_id::DefId; use middle::cstore::InlinedItem; use std::iter::repeat; @@ -91,7 +91,7 @@ impl<'ast> Visitor<'ast> for NodeCollector<'ast> { /// deep walking so that we walk nested items in the context of /// their outer items. - fn nested_visit_map(&mut self) -> Option<&map::Map<'ast>> { + fn nested_visit_map<'this>(&'this mut self) -> NestedVisitorMap<'this, 'ast> { panic!("visit_nested_xxx must be manually implemented in this visitor") } @@ -106,6 +106,10 @@ impl<'ast> Visitor<'ast> for NodeCollector<'ast> { self.visit_impl_item(self.krate.impl_item(item_id)) } + fn visit_body(&mut self, id: ExprId) { + self.visit_expr(self.krate.expr(id)) + } + fn visit_item(&mut self, i: &'ast Item) { debug!("visit_item: {:?}", i); @@ -124,23 +128,6 @@ impl<'ast> Visitor<'ast> for NodeCollector<'ast> { this.insert(struct_def.id(), NodeStructCtor(struct_def)); } } - ItemTrait(.., ref bounds, _) => { - for b in bounds.iter() { - if let TraitTyParamBound(ref t, TraitBoundModifier::None) = *b { - this.insert(t.trait_ref.ref_id, NodeItem(i)); - } - } - } - ItemUse(ref view_path) => { - match view_path.node { - ViewPathList(_, ref paths) => { - for path in paths { - this.insert(path.node.id, NodeItem(i)); - } - } - _ => () - } - } _ => {} } intravisit::walk_item(this, i); @@ -217,8 +204,16 @@ impl<'ast> Visitor<'ast> for NodeCollector<'ast> { }); } + fn visit_trait_ref(&mut self, tr: &'ast TraitRef) { + self.insert(tr.ref_id, NodeTraitRef(tr)); + + self.with_parent(tr.ref_id, |this| { + intravisit::walk_trait_ref(this, tr); + }); + } + fn visit_fn(&mut self, fk: intravisit::FnKind<'ast>, fd: &'ast FnDecl, - b: &'ast Expr, s: Span, id: NodeId) { + b: ExprId, s: Span, id: NodeId) { assert_eq!(self.parent_node, id); intravisit::walk_fn(self, fk, fd, b, s, id); } @@ -234,7 +229,28 @@ impl<'ast> Visitor<'ast> for NodeCollector<'ast> { self.insert(lifetime.id, NodeLifetime(lifetime)); } + fn visit_vis(&mut self, visibility: &'ast Visibility) { + match *visibility { + Visibility::Public | + Visibility::Crate | + Visibility::Inherited => {} + Visibility::Restricted { id, .. } => { + self.insert(id, NodeVisibility(visibility)); + self.with_parent(id, |this| { + intravisit::walk_vis(this, visibility); + }); + } + } + } + fn visit_macro_def(&mut self, macro_def: &'ast MacroDef) { self.insert_entry(macro_def.id, NotPresent); } + + fn visit_struct_field(&mut self, field: &'ast StructField) { + self.insert(field.id, NodeField(field)); + self.with_parent(field.id, |this| { + intravisit::walk_struct_field(this, field); + }); + } } diff --git a/src/librustc/hir/map/def_collector.rs b/src/librustc/hir/map/def_collector.rs index 7486d954c48..eb5a89f320e 100644 --- a/src/librustc/hir/map/def_collector.rs +++ b/src/librustc/hir/map/def_collector.rs @@ -11,7 +11,7 @@ use hir::map::definitions::*; use hir; -use hir::intravisit; +use hir::intravisit::{self, Visitor, NestedVisitorMap}; use hir::def_id::{CRATE_DEF_INDEX, DefId, DefIndex}; use middle::cstore::InlinedItem; @@ -135,8 +135,8 @@ impl<'a> DefCollector<'a> { } } -impl<'a> visit::Visitor for DefCollector<'a> { - fn visit_item(&mut self, i: &Item) { +impl<'a> visit::Visitor<'a> for DefCollector<'a> { + fn visit_item(&mut self, i: &'a Item) { debug!("visit_item: {:?}", i); // Pick the def data. This need not be unique, but the more @@ -155,7 +155,20 @@ impl<'a> visit::Visitor for DefCollector<'a> { DefPathData::ValueNs(i.ident.name.as_str()), ItemKind::Mac(..) if i.id == DUMMY_NODE_ID => return, // Scope placeholder ItemKind::Mac(..) => return self.visit_macro_invoc(i.id, false), - ItemKind::Use(..) => DefPathData::Misc, + ItemKind::Use(ref view_path) => { + match view_path.node { + ViewPathGlob(..) => {} + + // FIXME(eddyb) Should use the real name. Which namespace? + ViewPathSimple(..) => {} + ViewPathList(_, ref imports) => { + for import in imports { + self.create_def(import.node.id, DefPathData::Misc); + } + } + } + DefPathData::Misc + } }; let def = self.create_def(i.id, def_data); @@ -198,7 +211,7 @@ impl<'a> visit::Visitor for DefCollector<'a> { }); } - fn visit_foreign_item(&mut self, foreign_item: &ForeignItem) { + fn visit_foreign_item(&mut self, foreign_item: &'a ForeignItem) { let def = self.create_def(foreign_item.id, DefPathData::ValueNs(foreign_item.ident.name.as_str())); @@ -207,7 +220,7 @@ impl<'a> visit::Visitor for DefCollector<'a> { }); } - fn visit_generics(&mut self, generics: &Generics) { + fn visit_generics(&mut self, generics: &'a Generics) { for ty_param in generics.ty_params.iter() { self.create_def(ty_param.id, DefPathData::TypeParam(ty_param.ident.name.as_str())); } @@ -215,7 +228,7 @@ impl<'a> visit::Visitor for DefCollector<'a> { visit::walk_generics(self, generics); } - fn visit_trait_item(&mut self, ti: &TraitItem) { + fn visit_trait_item(&mut self, ti: &'a TraitItem) { let def_data = match ti.node { TraitItemKind::Method(..) | TraitItemKind::Const(..) => DefPathData::ValueNs(ti.ident.name.as_str()), @@ -233,7 +246,7 @@ impl<'a> visit::Visitor for DefCollector<'a> { }); } - fn visit_impl_item(&mut self, ii: &ImplItem) { + fn visit_impl_item(&mut self, ii: &'a ImplItem) { let def_data = match ii.node { ImplItemKind::Method(..) | ImplItemKind::Const(..) => DefPathData::ValueNs(ii.ident.name.as_str()), @@ -251,7 +264,7 @@ impl<'a> visit::Visitor for DefCollector<'a> { }); } - fn visit_pat(&mut self, pat: &Pat) { + fn visit_pat(&mut self, pat: &'a Pat) { let parent_def = self.parent_def; match pat.node { @@ -267,7 +280,7 @@ impl<'a> visit::Visitor for DefCollector<'a> { self.parent_def = parent_def; } - fn visit_expr(&mut self, expr: &Expr) { + fn visit_expr(&mut self, expr: &'a Expr) { let parent_def = self.parent_def; match expr.node { @@ -284,7 +297,7 @@ impl<'a> visit::Visitor for DefCollector<'a> { self.parent_def = parent_def; } - fn visit_ty(&mut self, ty: &Ty) { + fn visit_ty(&mut self, ty: &'a Ty) { match ty.node { TyKind::Mac(..) => return self.visit_macro_invoc(ty.id, false), TyKind::Array(_, ref length) => self.visit_ast_const_integer(length), @@ -296,15 +309,15 @@ impl<'a> visit::Visitor for DefCollector<'a> { visit::walk_ty(self, ty); } - fn visit_lifetime_def(&mut self, def: &LifetimeDef) { + fn visit_lifetime_def(&mut self, def: &'a LifetimeDef) { self.create_def(def.lifetime.id, DefPathData::LifetimeDef(def.lifetime.name.as_str())); } - fn visit_macro_def(&mut self, macro_def: &MacroDef) { + fn visit_macro_def(&mut self, macro_def: &'a MacroDef) { self.create_def(macro_def.id, DefPathData::MacroDef(macro_def.ident.name.as_str())); } - fn visit_stmt(&mut self, stmt: &Stmt) { + fn visit_stmt(&mut self, stmt: &'a Stmt) { match stmt.node { StmtKind::Mac(..) => self.visit_macro_invoc(stmt.id, false), _ => visit::walk_stmt(self, stmt), @@ -313,7 +326,18 @@ impl<'a> visit::Visitor for DefCollector<'a> { } // We walk the HIR rather than the AST when reading items from metadata. -impl<'ast> intravisit::Visitor<'ast> for DefCollector<'ast> { +impl<'ast> Visitor<'ast> for DefCollector<'ast> { + fn nested_visit_map<'this>(&'this mut self) -> NestedVisitorMap<'this, 'ast> { + // note however that we override `visit_body` below + NestedVisitorMap::None + } + + fn visit_body(&mut self, id: hir::ExprId) { + if let Some(krate) = self.hir_crate { + self.visit_expr(krate.expr(id)); + } + } + fn visit_item(&mut self, i: &'ast hir::Item) { debug!("visit_item: {:?}", i); @@ -423,7 +447,7 @@ impl<'ast> intravisit::Visitor<'ast> for DefCollector<'ast> { fn visit_pat(&mut self, pat: &'ast hir::Pat) { let parent_def = self.parent_def; - if let hir::PatKind::Binding(_, name, _) = pat.node { + if let hir::PatKind::Binding(_, _, name, _) = pat.node { let def = self.create_def(pat.id, DefPathData::Binding(name.node.as_str())); self.parent_def = Some(def); } diff --git a/src/librustc/hir/map/mod.rs b/src/librustc/hir/map/mod.rs index a90577b3426..117edcf14a1 100644 --- a/src/librustc/hir/map/mod.rs +++ b/src/librustc/hir/map/mod.rs @@ -18,7 +18,6 @@ pub use self::definitions::{Definitions, DefKey, DefPath, DefPathData, use dep_graph::{DepGraph, DepNode}; use middle::cstore::InlinedItem; -use middle::cstore::InlinedItem as II; use hir::def_id::{CRATE_DEF_INDEX, DefId, DefIndex}; use syntax::abi::Abi; @@ -46,9 +45,11 @@ pub enum Node<'ast> { NodeTraitItem(&'ast TraitItem), NodeImplItem(&'ast ImplItem), NodeVariant(&'ast Variant), + NodeField(&'ast StructField), NodeExpr(&'ast Expr), NodeStmt(&'ast Stmt), NodeTy(&'ast Ty), + NodeTraitRef(&'ast TraitRef), NodeLocal(&'ast Pat), NodePat(&'ast Pat), NodeBlock(&'ast Block), @@ -57,7 +58,10 @@ pub enum Node<'ast> { NodeStructCtor(&'ast VariantData), NodeLifetime(&'ast Lifetime), - NodeTyParam(&'ast TyParam) + NodeTyParam(&'ast TyParam), + NodeVisibility(&'ast Visibility), + + NodeInlinedItem(&'ast InlinedItem), } /// Represents an entry and its parent NodeID. @@ -73,15 +77,18 @@ pub enum MapEntry<'ast> { EntryTraitItem(NodeId, &'ast TraitItem), EntryImplItem(NodeId, &'ast ImplItem), EntryVariant(NodeId, &'ast Variant), + EntryField(NodeId, &'ast StructField), EntryExpr(NodeId, &'ast Expr), EntryStmt(NodeId, &'ast Stmt), EntryTy(NodeId, &'ast Ty), + EntryTraitRef(NodeId, &'ast TraitRef), EntryLocal(NodeId, &'ast Pat), EntryPat(NodeId, &'ast Pat), EntryBlock(NodeId, &'ast Block), EntryStructCtor(NodeId, &'ast VariantData), EntryLifetime(NodeId, &'ast Lifetime), EntryTyParam(NodeId, &'ast TyParam), + EntryVisibility(NodeId, &'ast Visibility), /// Roots for node trees. RootCrate, @@ -102,15 +109,20 @@ impl<'ast> MapEntry<'ast> { NodeTraitItem(n) => EntryTraitItem(p, n), NodeImplItem(n) => EntryImplItem(p, n), NodeVariant(n) => EntryVariant(p, n), + NodeField(n) => EntryField(p, n), NodeExpr(n) => EntryExpr(p, n), NodeStmt(n) => EntryStmt(p, n), NodeTy(n) => EntryTy(p, n), + NodeTraitRef(n) => EntryTraitRef(p, n), NodeLocal(n) => EntryLocal(p, n), NodePat(n) => EntryPat(p, n), NodeBlock(n) => EntryBlock(p, n), NodeStructCtor(n) => EntryStructCtor(p, n), NodeLifetime(n) => EntryLifetime(p, n), NodeTyParam(n) => EntryTyParam(p, n), + NodeVisibility(n) => EntryVisibility(p, n), + + NodeInlinedItem(n) => RootInlinedParent(n), } } @@ -121,15 +133,18 @@ impl<'ast> MapEntry<'ast> { EntryTraitItem(id, _) => id, EntryImplItem(id, _) => id, EntryVariant(id, _) => id, + EntryField(id, _) => id, EntryExpr(id, _) => id, EntryStmt(id, _) => id, EntryTy(id, _) => id, + EntryTraitRef(id, _) => id, EntryLocal(id, _) => id, EntryPat(id, _) => id, EntryBlock(id, _) => id, EntryStructCtor(id, _) => id, EntryLifetime(id, _) => id, EntryTyParam(id, _) => id, + EntryVisibility(id, _) => id, NotPresent | RootCrate | @@ -144,15 +159,19 @@ impl<'ast> MapEntry<'ast> { EntryTraitItem(_, n) => NodeTraitItem(n), EntryImplItem(_, n) => NodeImplItem(n), EntryVariant(_, n) => NodeVariant(n), + EntryField(_, n) => NodeField(n), EntryExpr(_, n) => NodeExpr(n), EntryStmt(_, n) => NodeStmt(n), EntryTy(_, n) => NodeTy(n), + EntryTraitRef(_, n) => NodeTraitRef(n), EntryLocal(_, n) => NodeLocal(n), EntryPat(_, n) => NodePat(n), EntryBlock(_, n) => NodeBlock(n), EntryStructCtor(_, n) => NodeStructCtor(n), EntryLifetime(_, n) => NodeLifetime(n), EntryTyParam(_, n) => NodeTyParam(n), + EntryVisibility(_, n) => NodeVisibility(n), + RootInlinedParent(n) => NodeInlinedItem(n), _ => return None }) } @@ -237,45 +256,63 @@ impl<'ast> Map<'ast> { let map = self.map.borrow(); let mut id = id0; if !self.is_inlined_node_id(id) { + let mut last_expr = None; loop { match map[id.as_usize()] { EntryItem(_, item) => { - let def_id = self.local_def_id(item.id); - // NB ^~~~~~~ - // - // You would expect that `item.id == id`, but this - // is not always the case. In particular, for a - // ViewPath item like `use self::{mem, foo}`, we - // map the ids for `mem` and `foo` to the - // enclosing view path item. This seems mega super - // ultra wrong, but then who am I to judge? - // -nmatsakis + assert_eq!(id, item.id); + let def_id = self.local_def_id(id); assert!(!self.is_inlined_def_id(def_id)); + + if let Some(last_id) = last_expr { + // The body of the item may have a separate dep node + // (Note that trait items don't currently have + // their own dep node, so there's also just one + // HirBody node for all the items) + if self.is_body(last_id, item) { + return DepNode::HirBody(def_id); + } + } return DepNode::Hir(def_id); } - EntryImplItem(..) => { + EntryImplItem(_, item) => { let def_id = self.local_def_id(id); assert!(!self.is_inlined_def_id(def_id)); + + if let Some(last_id) = last_expr { + // The body of the item may have a separate dep node + if self.is_impl_item_body(last_id, item) { + return DepNode::HirBody(def_id); + } + } return DepNode::Hir(def_id); } EntryForeignItem(p, _) | EntryTraitItem(p, _) | EntryVariant(p, _) | - EntryExpr(p, _) | + EntryField(p, _) | EntryStmt(p, _) | EntryTy(p, _) | + EntryTraitRef(p, _) | EntryLocal(p, _) | EntryPat(p, _) | EntryBlock(p, _) | EntryStructCtor(p, _) | EntryLifetime(p, _) | - EntryTyParam(p, _) => + EntryTyParam(p, _) | + EntryVisibility(p, _) => id = p, - RootCrate => - return DepNode::Krate, + EntryExpr(p, _) => { + last_expr = Some(id); + id = p; + } + + RootCrate => { + return DepNode::Hir(DefId::local(CRATE_DEF_INDEX)); + } RootInlinedParent(_) => bug!("node {} has inlined ancestor but is not inlined", id0), @@ -304,23 +341,22 @@ impl<'ast> Map<'ast> { EntryTraitItem(p, _) | EntryImplItem(p, _) | EntryVariant(p, _) | + EntryField(p, _) | EntryExpr(p, _) | EntryStmt(p, _) | EntryTy(p, _) | + EntryTraitRef(p, _) | EntryLocal(p, _) | EntryPat(p, _) | EntryBlock(p, _) | EntryStructCtor(p, _) | EntryLifetime(p, _) | - EntryTyParam(p, _) => + EntryTyParam(p, _) | + EntryVisibility(p, _) => id = p, - RootInlinedParent(parent) => match *parent { - InlinedItem::Item(def_id, _) | - InlinedItem::TraitItem(def_id, _) | - InlinedItem::ImplItem(def_id, _) => - return DepNode::MetaData(def_id) - }, + RootInlinedParent(parent) => + return DepNode::MetaData(parent.def_id), RootCrate => bug!("node {} has crate ancestor but is inlined", id0), @@ -332,6 +368,29 @@ impl<'ast> Map<'ast> { } } + fn is_body(&self, node_id: NodeId, item: &Item) -> bool { + match item.node { + ItemFn(_, _, _, _, _, body) => body.node_id() == node_id, + // Since trait items currently don't get their own dep nodes, + // we check here whether node_id is the body of any of the items. + // If they get their own dep nodes, this can go away + ItemTrait(_, _, _, ref trait_items) => { + trait_items.iter().any(|trait_item| { match trait_item.node { + MethodTraitItem(_, Some(body)) => body.node_id() == node_id, + _ => false + }}) + } + _ => false + } + } + + fn is_impl_item_body(&self, node_id: NodeId, item: &ImplItem) -> bool { + match item.node { + ImplItemKind::Method(_, body) => body.node_id() == node_id, + _ => false + } + } + pub fn num_local_def_ids(&self) -> usize { self.definitions.borrow().len() } @@ -543,8 +602,7 @@ impl<'ast> Map<'ast> { pub fn get_parent_did(&self, id: NodeId) -> DefId { let parent = self.get_parent(id); match self.find_entry(parent) { - Some(RootInlinedParent(&II::TraitItem(did, _))) | - Some(RootInlinedParent(&II::ImplItem(did, _))) => did, + Some(RootInlinedParent(ii)) => ii.def_id, _ => self.local_def_id(parent) } } @@ -642,6 +700,10 @@ impl<'ast> Map<'ast> { } } + pub fn expr(&self, id: ExprId) -> &'ast Expr { + self.expect_expr(id.node_id()) + } + /// Returns the name associated with the given NodeId's AST. pub fn name(&self, id: NodeId) -> Name { match self.get(id) { @@ -650,9 +712,10 @@ impl<'ast> Map<'ast> { NodeImplItem(ii) => ii.name, NodeTraitItem(ti) => ti.name, NodeVariant(v) => v.node.name, + NodeField(f) => f.name, NodeLifetime(lt) => lt.name, NodeTyParam(tp) => tp.name, - NodeLocal(&Pat { node: PatKind::Binding(_,l,_), .. }) => l.node, + NodeLocal(&Pat { node: PatKind::Binding(_,_,l,_), .. }) => l.node, NodeStructCtor(_) => self.name(self.get_parent(id)), _ => bug!("no name for {}", self.node_to_string(id)) } @@ -668,6 +731,7 @@ impl<'ast> Map<'ast> { Some(NodeTraitItem(ref ti)) => Some(&ti.attrs[..]), Some(NodeImplItem(ref ii)) => Some(&ii.attrs[..]), Some(NodeVariant(ref v)) => Some(&v.node.attrs[..]), + Some(NodeField(ref f)) => Some(&f.attrs[..]), Some(NodeExpr(ref e)) => Some(&*e.attrs), Some(NodeStmt(ref s)) => Some(s.node.attrs()), // unit/tuple structs take the attributes straight from @@ -697,44 +761,40 @@ impl<'ast> Map<'ast> { } } - pub fn opt_span(&self, id: NodeId) -> Option { - let sp = match self.find(id) { - Some(NodeItem(item)) => item.span, - Some(NodeForeignItem(foreign_item)) => foreign_item.span, - Some(NodeTraitItem(trait_method)) => trait_method.span, - Some(NodeImplItem(ref impl_item)) => impl_item.span, - Some(NodeVariant(variant)) => variant.span, - Some(NodeExpr(expr)) => expr.span, - Some(NodeStmt(stmt)) => stmt.span, - Some(NodeTy(ty)) => ty.span, - Some(NodeLocal(pat)) => pat.span, - Some(NodePat(pat)) => pat.span, - Some(NodeBlock(block)) => block.span, - Some(NodeStructCtor(_)) => self.expect_item(self.get_parent(id)).span, - Some(NodeTyParam(ty_param)) => ty_param.span, - _ => return None, - }; - Some(sp) - } - pub fn span(&self, id: NodeId) -> Span { self.read(id); // reveals span from node - self.opt_span(id) - .unwrap_or_else(|| bug!("AstMap.span: could not find span for id {:?}", id)) + match self.find_entry(id) { + Some(EntryItem(_, item)) => item.span, + Some(EntryForeignItem(_, foreign_item)) => foreign_item.span, + Some(EntryTraitItem(_, trait_method)) => trait_method.span, + Some(EntryImplItem(_, impl_item)) => impl_item.span, + Some(EntryVariant(_, variant)) => variant.span, + Some(EntryField(_, field)) => field.span, + Some(EntryExpr(_, expr)) => expr.span, + Some(EntryStmt(_, stmt)) => stmt.span, + Some(EntryTy(_, ty)) => ty.span, + Some(EntryTraitRef(_, tr)) => tr.path.span, + Some(EntryLocal(_, pat)) => pat.span, + Some(EntryPat(_, pat)) => pat.span, + Some(EntryBlock(_, block)) => block.span, + Some(EntryStructCtor(_, _)) => self.expect_item(self.get_parent(id)).span, + Some(EntryLifetime(_, lifetime)) => lifetime.span, + Some(EntryTyParam(_, ty_param)) => ty_param.span, + Some(EntryVisibility(_, &Visibility::Restricted { ref path, .. })) => path.span, + Some(EntryVisibility(_, v)) => bug!("unexpected Visibility {:?}", v), + + Some(RootCrate) => self.forest.krate.span, + Some(RootInlinedParent(parent)) => parent.body.span, + Some(NotPresent) | None => { + bug!("hir::map::Map::span: id not in map: {:?}", id) + } + } } pub fn span_if_local(&self, id: DefId) -> Option { self.as_local_node_id(id).map(|id| self.span(id)) } - pub fn def_id_span(&self, def_id: DefId, fallback: Span) -> Span { - if let Some(node_id) = self.as_local_node_id(def_id) { - self.opt_span(node_id).unwrap_or(fallback) - } else { - fallback - } - } - pub fn node_to_string(&self, id: NodeId) -> String { node_id_to_string(self, id, true) } @@ -823,6 +883,7 @@ impl<'a, 'ast> Iterator for NodesMatchingSuffix<'a, 'ast> { Some(EntryTraitItem(_, n)) => n.name(), Some(EntryImplItem(_, n)) => n.name(), Some(EntryVariant(_, n)) => n.name(), + Some(EntryField(_, n)) => n.name(), _ => continue, }; if self.matches_names(self.map.get_parent(idx), name) { @@ -841,6 +902,7 @@ impl Named for Spanned { fn name(&self) -> Name { self.node.name() } impl Named for Item { fn name(&self) -> Name { self.name } } impl Named for ForeignItem { fn name(&self) -> Name { self.name } } impl Named for Variant_ { fn name(&self) -> Name { self.name } } +impl Named for StructField { fn name(&self) -> Name { self.name } } impl Named for TraitItem { fn name(&self) -> Name { self.name } } impl Named for ImplItem { fn name(&self) -> Name { self.name } } @@ -926,15 +988,20 @@ impl<'a> NodePrinter for pprust::State<'a> { NodeExpr(a) => self.print_expr(&a), NodeStmt(a) => self.print_stmt(&a), NodeTy(a) => self.print_type(&a), + NodeTraitRef(a) => self.print_trait_ref(&a), NodePat(a) => self.print_pat(&a), NodeBlock(a) => self.print_block(&a), NodeLifetime(a) => self.print_lifetime(&a), + NodeVisibility(a) => self.print_visibility(&a), NodeTyParam(_) => bug!("cannot print TyParam"), + NodeField(_) => bug!("cannot print StructField"), // these cases do not carry enough information in the // ast_map to reconstruct their full structure for pretty // printing. NodeLocal(_) => bug!("cannot print isolated Local"), NodeStructCtor(_) => bug!("cannot print isolated StructCtor"), + + NodeInlinedItem(_) => bug!("cannot print inlined item"), } } } @@ -1009,6 +1076,11 @@ fn node_id_to_string(map: &Map, id: NodeId, include_id: bool) -> String { variant.node.name, path_str(), id_str) } + Some(NodeField(ref field)) => { + format!("field {} in {}{}", + field.name, + path_str(), id_str) + } Some(NodeExpr(ref expr)) => { format!("expr {}{}", pprust::expr_to_string(&expr), id_str) } @@ -1018,6 +1090,9 @@ fn node_id_to_string(map: &Map, id: NodeId, include_id: bool) -> String { Some(NodeTy(ref ty)) => { format!("type {}{}", pprust::ty_to_string(&ty), id_str) } + Some(NodeTraitRef(ref tr)) => { + format!("trait_ref {}{}", pprust::path_to_string(&tr.path), id_str) + } Some(NodeLocal(ref pat)) => { format!("local {}{}", pprust::pat_to_string(&pat), id_str) } @@ -1037,6 +1112,12 @@ fn node_id_to_string(map: &Map, id: NodeId, include_id: bool) -> String { Some(NodeTyParam(ref ty_param)) => { format!("typaram {:?}{}", ty_param, id_str) } + Some(NodeVisibility(ref vis)) => { + format!("visibility {:?}{}", vis, id_str) + } + Some(NodeInlinedItem(_)) => { + format!("inlined item {}", id_str) + } None => { format!("unknown node{}", id_str) } diff --git a/src/librustc/hir/mod.rs b/src/librustc/hir/mod.rs index 31648765224..4fd8f96ba04 100644 --- a/src/librustc/hir/mod.rs +++ b/src/librustc/hir/mod.rs @@ -27,13 +27,13 @@ pub use self::Ty_::*; pub use self::TyParamBound::*; pub use self::UnOp::*; pub use self::UnsafeSource::*; -pub use self::ViewPath_::*; pub use self::Visibility::{Public, Inherited}; pub use self::PathParameters::*; use hir::def::Def; use hir::def_id::DefId; use util::nodemap::{NodeMap, FxHashSet}; +use rustc_data_structures::fnv::FnvHashMap; use syntax_pos::{mk_sp, Span, ExpnId, DUMMY_SP}; use syntax::codemap::{self, respan, Spanned}; @@ -108,6 +108,8 @@ pub struct Path { /// A `::foo` path, is relative to the crate root rather than current /// module (like paths in an import). pub global: bool, + /// The definition that the path resolved to. + pub def: Def, /// The segments in the path: the things separated by `::`. pub segments: HirVec, } @@ -124,21 +126,6 @@ impl fmt::Display for Path { } } -impl Path { - /// Convert a span and an identifier to the corresponding - /// 1-segment path. - pub fn from_name(s: Span, name: Name) -> Path { - Path { - span: s, - global: false, - segments: hir_vec![PathSegment { - name: name, - parameters: PathParameters::none() - }], - } - } -} - /// A segment of a path: an identifier, an optional lifetime, and a set of /// types. #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] @@ -154,6 +141,16 @@ pub struct PathSegment { pub parameters: PathParameters, } +impl PathSegment { + /// Convert an identifier to the corresponding segment. + pub fn from_name(name: Name) -> PathSegment { + PathSegment { + name: name, + parameters: PathParameters::none() + } + } +} + #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub enum PathParameters { /// The `<'a, A,B,C>` in `foo::bar::baz::<'a, A,B,C>` @@ -167,6 +164,7 @@ impl PathParameters { AngleBracketedParameters(AngleBracketedParameterData { lifetimes: HirVec::new(), types: HirVec::new(), + infer_types: true, bindings: HirVec::new(), }) } @@ -241,6 +239,11 @@ pub struct AngleBracketedParameterData { pub lifetimes: HirVec, /// The type parameters for this path segment, if present. pub types: HirVec>, + /// Whether to infer remaining type parameters, if any. + /// This only applies to expression and pattern paths, and + /// out of those only the segments with no type parameters + /// to begin with, e.g. `Vec::new` is `>::new::<..>`. + pub infer_types: bool, /// Bindings (equality constraints) on associated types, if present. /// E.g., `Foo`. pub bindings: HirVec, @@ -426,6 +429,7 @@ pub struct Crate { pub items: BTreeMap, pub impl_items: BTreeMap, + pub exprs: FnvHashMap, } impl Crate { @@ -456,6 +460,10 @@ impl Crate { visitor.visit_impl_item(impl_item); } } + + pub fn expr(&self, id: ExprId) -> &Expr { + &self.exprs[&id] + } } /// A macro definition, in this crate or imported from another. @@ -527,7 +535,7 @@ impl Pat { PatKind::Lit(_) | PatKind::Range(..) | PatKind::Binding(..) | - PatKind::Path(..) => { + PatKind::Path(_) => { true } } @@ -566,20 +574,20 @@ pub enum PatKind { Wild, /// A fresh binding `ref mut binding @ OPT_SUBPATTERN`. - Binding(BindingMode, Spanned, Option>), + /// The `DefId` is for the definition of the variable being bound. + Binding(BindingMode, DefId, Spanned, Option>), /// A struct or struct variant pattern, e.g. `Variant {x, y, ..}`. /// The `bool` is `true` in the presence of a `..`. - Struct(Path, HirVec>, bool), + Struct(QPath, HirVec>, bool), /// A tuple struct/variant pattern `Variant(x, y, .., z)`. /// If the `..` pattern fragment is present, then `Option` denotes its position. /// 0 <= position <= subpats.len() - TupleStruct(Path, HirVec>, Option), + TupleStruct(QPath, HirVec>, Option), - /// A possibly qualified path pattern. - /// Such pattern can be resolved to a unit struct/variant or a constant. - Path(Option, Path), + /// A path pattern for an unit struct/variant or a (maybe-associated) constant. + Path(QPath), /// A tuple pattern `(a, b)`. /// If the `..` pattern fragment is present, then `Option` denotes its position. @@ -836,9 +844,6 @@ pub enum BlockCheckMode { UnsafeBlock(UnsafeSource), PushUnsafeBlock(UnsafeSource), PopUnsafeBlock(UnsafeSource), - // Within this block (but outside a PopUnstableBlock), we suspend checking of stability. - PushUnstableBlock, - PopUnstableBlock, } #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)] @@ -847,6 +852,15 @@ pub enum UnsafeSource { UserProvided, } +#[derive(Copy, Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] +pub struct ExprId(NodeId); + +impl ExprId { + pub fn node_id(self) -> NodeId { + self.0 + } +} + /// An expression #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash)] pub struct Expr { @@ -856,6 +870,12 @@ pub struct Expr { pub attrs: ThinVec, } +impl Expr { + pub fn expr_id(&self) -> ExprId { + ExprId(self.id) + } +} + impl fmt::Debug for Expr { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "expr({}: {})", self.id, print::expr_to_string(self)) @@ -915,7 +935,7 @@ pub enum Expr_ { /// A closure (for example, `move |a, b, c| {a + b + c}`). /// /// The final span is the span of the argument block `|...|` - ExprClosure(CaptureClause, P, P, Span), + ExprClosure(CaptureClause, P, ExprId, Span), /// A block (`{ ... }`) ExprBlock(P), @@ -934,19 +954,15 @@ pub enum Expr_ { /// An indexing operation (`foo[2]`) ExprIndex(P, P), - /// Variable reference, possibly containing `::` and/or type - /// parameters, e.g. foo::bar::. - /// - /// Optionally "qualified", - /// e.g. ` as SomeTrait>::SomeType`. - ExprPath(Option, Path), + /// Path to a definition, possibly containing lifetime or type parameters. + ExprPath(QPath), /// A referencing operation (`&a` or `&mut a`) ExprAddrOf(Mutability, P), /// A `break`, with an optional label to break - ExprBreak(Option>, Option>), + ExprBreak(Option