Auto merge of #81625 - jonas-schievink:rollup-mshpp2n, r=jonas-schievink

Rollup of 12 pull requests

Successful merges:

 - #78641 (Let io::copy reuse BufWriter buffers)
 - #79291 (Add error message for private fn)
 - #81364 (Improve `rustc_mir_build::matches` docs)
 - #81387 (Move some tests to more reasonable directories - 3)
 - #81463 (Rename NLL* to Nll* accordingly to C-CASE)
 - #81504 (Suggest accessing field when appropriate)
 - #81529 (Fix invalid camel case suggestion involving unicode idents)
 - #81536 (Indicate both start and end of pass RSS in time-passes output)
 - #81592 (Rustdoc UI fixes)
 - #81594 (Avoid building LLVM just for llvm-dwp)
 - #81598 (Fix calling convention for CRT startup)
 - #81618 (Sync rustc_codegen_cranelift)

Failed merges:

r? `@ghost`
`@rustbot` modify labels: rollup
This commit is contained in:
bors 2021-02-01 16:30:22 +00:00
commit 02b85d7220
137 changed files with 1691 additions and 931 deletions

View File

@ -4133,6 +4133,7 @@ dependencies = [
"rustc_middle", "rustc_middle",
"rustc_session", "rustc_session",
"rustc_span", "rustc_span",
"rustc_trait_selection",
"rustc_typeck", "rustc_typeck",
"tracing", "tracing",
] ]

View File

@ -12,6 +12,9 @@ jobs:
fail-fast: false fail-fast: false
matrix: matrix:
os: [ubuntu-latest, macos-latest] os: [ubuntu-latest, macos-latest]
env:
- BACKEND: ""
- BACKEND: --oldbe
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v2
@ -51,7 +54,7 @@ jobs:
export COMPILE_RUNS=2 export COMPILE_RUNS=2
export RUN_RUNS=2 export RUN_RUNS=2
./test.sh ./test.sh $BACKEND
- name: Package prebuilt cg_clif - name: Package prebuilt cg_clif
run: tar cvfJ cg_clif.tar.xz build run: tar cvfJ cg_clif.tar.xz build

View File

@ -8,6 +8,7 @@ perf.data.old
*.string* *.string*
/build /build
/build_sysroot/sysroot_src /build_sysroot/sysroot_src
/build_sysroot/compiler-builtins
/rust /rust
/rand /rand
/regex /regex

View File

@ -1,7 +1,7 @@
{ {
// source for rustc_* is not included in the rust-src component; disable the errors about this // source for rustc_* is not included in the rust-src component; disable the errors about this
"rust-analyzer.diagnostics.disabled": ["unresolved-extern-crate"], "rust-analyzer.diagnostics.disabled": ["unresolved-extern-crate"],
"rust-analyzer.assist.importMergeBehaviour": "last", "rust-analyzer.assist.importMergeBehavior": "last",
"rust-analyzer.cargo.loadOutDirsFromCheck": true, "rust-analyzer.cargo.loadOutDirsFromCheck": true,
"rust-analyzer.linkedProjects": [ "rust-analyzer.linkedProjects": [
"./Cargo.toml", "./Cargo.toml",

View File

@ -2,9 +2,9 @@
# It is not intended for manual editing. # It is not intended for manual editing.
[[package]] [[package]]
name = "anyhow" name = "anyhow"
version = "1.0.34" version = "1.0.38"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bf8dcb5b4bbaa28653b647d8c77bd4ed40183b48882e130c1f1ffb73de069fd7" checksum = "afddf7f520a80dbf76e6f50a35bca42a2331ef227a28b3b6dc5c2e2338d114b1"
[[package]] [[package]]
name = "ar" name = "ar"
@ -25,15 +25,15 @@ checksum = "cf1de2fe8c75bc145a2f577add951f8134889b4795d47466a54a5c846d691693"
[[package]] [[package]]
name = "byteorder" name = "byteorder"
version = "1.3.4" version = "1.4.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "08c48aae112d48ed9f069b33538ea9e3e90aa263cfa3d1c24309612b1f7472de" checksum = "ae44d1a3d5a19df61dd0c8beb138458ac2a53a7ac09eba97d55592540004306b"
[[package]] [[package]]
name = "cc" name = "cc"
version = "1.0.62" version = "1.0.66"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f1770ced377336a88a67c473594ccc14eca6f4559217c34f64aac8f83d641b40" checksum = "4c0496836a84f8d0495758516b8621a622beb77c0fed418570e50764093ced48"
[[package]] [[package]]
name = "cfg-if" name = "cfg-if"
@ -49,16 +49,16 @@ checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
[[package]] [[package]]
name = "cranelift-bforest" name = "cranelift-bforest"
version = "0.68.0" version = "0.69.0"
source = "git+https://github.com/bytecodealliance/wasmtime/?branch=main#8f7f8ee0b4c5007ace6de29b45505c360450b1bb" source = "git+https://github.com/bytecodealliance/wasmtime/?branch=main#986b5768f9e68f1564b43f32b8a4080a6582c8ca"
dependencies = [ dependencies = [
"cranelift-entity", "cranelift-entity",
] ]
[[package]] [[package]]
name = "cranelift-codegen" name = "cranelift-codegen"
version = "0.68.0" version = "0.69.0"
source = "git+https://github.com/bytecodealliance/wasmtime/?branch=main#8f7f8ee0b4c5007ace6de29b45505c360450b1bb" source = "git+https://github.com/bytecodealliance/wasmtime/?branch=main#986b5768f9e68f1564b43f32b8a4080a6582c8ca"
dependencies = [ dependencies = [
"byteorder", "byteorder",
"cranelift-bforest", "cranelift-bforest",
@ -75,8 +75,8 @@ dependencies = [
[[package]] [[package]]
name = "cranelift-codegen-meta" name = "cranelift-codegen-meta"
version = "0.68.0" version = "0.69.0"
source = "git+https://github.com/bytecodealliance/wasmtime/?branch=main#8f7f8ee0b4c5007ace6de29b45505c360450b1bb" source = "git+https://github.com/bytecodealliance/wasmtime/?branch=main#986b5768f9e68f1564b43f32b8a4080a6582c8ca"
dependencies = [ dependencies = [
"cranelift-codegen-shared", "cranelift-codegen-shared",
"cranelift-entity", "cranelift-entity",
@ -84,18 +84,18 @@ dependencies = [
[[package]] [[package]]
name = "cranelift-codegen-shared" name = "cranelift-codegen-shared"
version = "0.68.0" version = "0.69.0"
source = "git+https://github.com/bytecodealliance/wasmtime/?branch=main#8f7f8ee0b4c5007ace6de29b45505c360450b1bb" source = "git+https://github.com/bytecodealliance/wasmtime/?branch=main#986b5768f9e68f1564b43f32b8a4080a6582c8ca"
[[package]] [[package]]
name = "cranelift-entity" name = "cranelift-entity"
version = "0.68.0" version = "0.69.0"
source = "git+https://github.com/bytecodealliance/wasmtime/?branch=main#8f7f8ee0b4c5007ace6de29b45505c360450b1bb" source = "git+https://github.com/bytecodealliance/wasmtime/?branch=main#986b5768f9e68f1564b43f32b8a4080a6582c8ca"
[[package]] [[package]]
name = "cranelift-frontend" name = "cranelift-frontend"
version = "0.68.0" version = "0.69.0"
source = "git+https://github.com/bytecodealliance/wasmtime/?branch=main#8f7f8ee0b4c5007ace6de29b45505c360450b1bb" source = "git+https://github.com/bytecodealliance/wasmtime/?branch=main#986b5768f9e68f1564b43f32b8a4080a6582c8ca"
dependencies = [ dependencies = [
"cranelift-codegen", "cranelift-codegen",
"log", "log",
@ -105,8 +105,8 @@ dependencies = [
[[package]] [[package]]
name = "cranelift-jit" name = "cranelift-jit"
version = "0.68.0" version = "0.69.0"
source = "git+https://github.com/bytecodealliance/wasmtime/?branch=main#8f7f8ee0b4c5007ace6de29b45505c360450b1bb" source = "git+https://github.com/bytecodealliance/wasmtime/?branch=main#986b5768f9e68f1564b43f32b8a4080a6582c8ca"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"cranelift-codegen", "cranelift-codegen",
@ -123,8 +123,8 @@ dependencies = [
[[package]] [[package]]
name = "cranelift-module" name = "cranelift-module"
version = "0.68.0" version = "0.69.0"
source = "git+https://github.com/bytecodealliance/wasmtime/?branch=main#8f7f8ee0b4c5007ace6de29b45505c360450b1bb" source = "git+https://github.com/bytecodealliance/wasmtime/?branch=main#986b5768f9e68f1564b43f32b8a4080a6582c8ca"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"cranelift-codegen", "cranelift-codegen",
@ -135,8 +135,8 @@ dependencies = [
[[package]] [[package]]
name = "cranelift-native" name = "cranelift-native"
version = "0.68.0" version = "0.69.0"
source = "git+https://github.com/bytecodealliance/wasmtime/?branch=main#8f7f8ee0b4c5007ace6de29b45505c360450b1bb" source = "git+https://github.com/bytecodealliance/wasmtime/?branch=main#986b5768f9e68f1564b43f32b8a4080a6582c8ca"
dependencies = [ dependencies = [
"cranelift-codegen", "cranelift-codegen",
"raw-cpuid", "raw-cpuid",
@ -145,8 +145,8 @@ dependencies = [
[[package]] [[package]]
name = "cranelift-object" name = "cranelift-object"
version = "0.68.0" version = "0.69.0"
source = "git+https://github.com/bytecodealliance/wasmtime/?branch=main#8f7f8ee0b4c5007ace6de29b45505c360450b1bb" source = "git+https://github.com/bytecodealliance/wasmtime/?branch=main#986b5768f9e68f1564b43f32b8a4080a6582c8ca"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"cranelift-codegen", "cranelift-codegen",
@ -209,9 +209,9 @@ checksum = "d7afe4a420e3fe79967a00898cc1f4db7c8a49a9333a29f8a4bd76a253d5cd04"
[[package]] [[package]]
name = "indexmap" name = "indexmap"
version = "1.6.0" version = "1.6.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "55e2e4c765aa53a0424761bf9f41aa7a6ac1efa87238f59560640e27fca028f2" checksum = "4fb1fa934250de4de8aef298d81c729a7d33d8c239daa3a7575e6b92bfc7313b"
dependencies = [ dependencies = [
"autocfg", "autocfg",
"hashbrown", "hashbrown",
@ -219,15 +219,15 @@ dependencies = [
[[package]] [[package]]
name = "libc" name = "libc"
version = "0.2.80" version = "0.2.82"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4d58d1b70b004888f764dfbf6a26a3b0342a1632d33968e4a179d8011c760614" checksum = "89203f3fba0a3795506acaad8ebce3c80c0af93f994d5a1d7a0b1eeb23271929"
[[package]] [[package]]
name = "libloading" name = "libloading"
version = "0.6.5" version = "0.6.7"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1090080fe06ec2648d0da3881d9453d97e71a45f00eb179af7fdd7e3f686fdb0" checksum = "351a32417a12d5f7e82c368a66781e307834dae04c6ce0cd4456d52989229883"
dependencies = [ dependencies = [
"cfg-if 1.0.0", "cfg-if 1.0.0",
"winapi", "winapi",
@ -235,9 +235,9 @@ dependencies = [
[[package]] [[package]]
name = "log" name = "log"
version = "0.4.11" version = "0.4.13"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4fabed175da42fed1fa0746b0ea71f412aa9d35e76e95e59b192c64b9dc2bf8b" checksum = "fcf3805d4480bb5b86070dcfeb9e2cb2ebc148adb753c5cca5f884d1d65a42b2"
dependencies = [ dependencies = [
"cfg-if 0.1.10", "cfg-if 0.1.10",
] ]
@ -272,9 +272,9 @@ dependencies = [
[[package]] [[package]]
name = "quote" name = "quote"
version = "1.0.7" version = "1.0.8"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "aa563d17ecb180e500da1cfd2b028310ac758de548efdd203e18f283af693f37" checksum = "991431c3519a3f36861882da93630ce66b52918dcf1b8e2fd66b397fc96f28df"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
] ]
@ -333,6 +333,7 @@ dependencies = [
"indexmap", "indexmap",
"libloading", "libloading",
"object", "object",
"smallvec",
"target-lexicon", "target-lexicon",
] ]
@ -362,15 +363,15 @@ checksum = "388a1df253eca08550bef6c72392cfe7c30914bf41df5269b68cbd6ff8f570a3"
[[package]] [[package]]
name = "smallvec" name = "smallvec"
version = "1.4.2" version = "1.6.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fbee7696b84bbf3d89a1c2eccff0850e3047ed46bfcd2e92c29a2d074d57e252" checksum = "fe0f37c9e8f3c5a4a66ad655a93c74daac4ad00c441533bf5c6e7990bb42604e"
[[package]] [[package]]
name = "syn" name = "syn"
version = "1.0.48" version = "1.0.58"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cc371affeffc477f42a221a1e4297aedcea33d47d19b61455588bd9d8f6b19ac" checksum = "cc60a3d73ea6594cd712d830cc1f0390fd71542d8c8cd24e70cc54cdfd5e05d5"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
@ -385,18 +386,18 @@ checksum = "4ee5a98e506fb7231a304c3a1bd7c132a55016cf65001e0282480665870dfcb9"
[[package]] [[package]]
name = "thiserror" name = "thiserror"
version = "1.0.22" version = "1.0.23"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0e9ae34b84616eedaaf1e9dd6026dbe00dcafa92aa0c8077cb69df1fcfe5e53e" checksum = "76cc616c6abf8c8928e2fdcc0dbfab37175edd8fb49a4641066ad1364fdab146"
dependencies = [ dependencies = [
"thiserror-impl", "thiserror-impl",
] ]
[[package]] [[package]]
name = "thiserror-impl" name = "thiserror-impl"
version = "1.0.22" version = "1.0.23"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9ba20f23e85b10754cd195504aebf6a27e2e6cbe28c17778a0c930724628dd56" checksum = "9be73a2caec27583d0046ef3796c3794f868a5bc813db689eed00c7631275cd1"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",

View File

@ -9,7 +9,7 @@ crate-type = ["dylib"]
[dependencies] [dependencies]
# These have to be in sync with each other # These have to be in sync with each other
cranelift-codegen = { git = "https://github.com/bytecodealliance/wasmtime/", branch = "main", features = ["unwind"] } cranelift-codegen = { git = "https://github.com/bytecodealliance/wasmtime/", branch = "main", features = ["unwind", "x86", "x64"] }
cranelift-frontend = { git = "https://github.com/bytecodealliance/wasmtime/", branch = "main" } cranelift-frontend = { git = "https://github.com/bytecodealliance/wasmtime/", branch = "main" }
cranelift-module = { git = "https://github.com/bytecodealliance/wasmtime/", branch = "main" } cranelift-module = { git = "https://github.com/bytecodealliance/wasmtime/", branch = "main" }
cranelift-jit = { git = "https://github.com/bytecodealliance/wasmtime/", branch = "main", optional = true } cranelift-jit = { git = "https://github.com/bytecodealliance/wasmtime/", branch = "main", optional = true }
@ -21,6 +21,7 @@ object = { version = "0.22.0", default-features = false, features = ["std", "rea
ar = { git = "https://github.com/bjorn3/rust-ar.git", branch = "do_not_remove_cg_clif_ranlib" } ar = { git = "https://github.com/bjorn3/rust-ar.git", branch = "do_not_remove_cg_clif_ranlib" }
indexmap = "1.0.2" indexmap = "1.0.2"
libloading = { version = "0.6.0", optional = true } libloading = { version = "0.6.0", optional = true }
smallvec = "1.6.1"
# Uncomment to use local checkout of cranelift # Uncomment to use local checkout of cranelift
#[patch."https://github.com/bytecodealliance/wasmtime/"] #[patch."https://github.com/bytecodealliance/wasmtime/"]
@ -37,6 +38,7 @@ libloading = { version = "0.6.0", optional = true }
default = ["jit", "inline_asm"] default = ["jit", "inline_asm"]
jit = ["cranelift-jit", "libloading"] jit = ["cranelift-jit", "libloading"]
inline_asm = [] inline_asm = []
oldbe = []
[profile.dev] [profile.dev]
# By compiling dependencies with optimizations, performing tests gets much faster. # By compiling dependencies with optimizations, performing tests gets much faster.

View File

@ -1,6 +1,4 @@
# WIP Cranelift codegen backend for rust # Cranelift codegen backend for rust
> ⚠⚠⚠ Certain kinds of FFI don't work yet. ⚠⚠⚠
The goal of this project is to create an alternative codegen backend for the rust compiler based on [Cranelift](https://github.com/bytecodealliance/wasmtime/blob/main/cranelift). The goal of this project is to create an alternative codegen backend for the rust compiler based on [Cranelift](https://github.com/bytecodealliance/wasmtime/blob/main/cranelift).
This has the potential to improve compilation times in debug mode. This has the potential to improve compilation times in debug mode.
@ -103,8 +101,7 @@ function jit_calc() {
## Not yet supported ## Not yet supported
* Good non-rust abi support ([several problems](https://github.com/bjorn3/rustc_codegen_cranelift/issues/10)) * Inline assembly ([no cranelift support](https://github.com/bytecodealliance/wasmtime/issues/1041))
* Inline assembly ([no cranelift support](https://github.com/bytecodealliance/wasmtime/issues/1041)
* On Linux there is support for invoking an external assembler for `global_asm!` and `asm!`. * On Linux there is support for invoking an external assembler for `global_asm!` and `asm!`.
`llvm_asm!` will remain unimplemented forever. `asm!` doesn't yet support reg classes. You `llvm_asm!` will remain unimplemented forever. `asm!` doesn't yet support reg classes. You
have to specify specific registers instead. have to specify specific registers instead.

View File

@ -3,23 +3,29 @@ set -e
# Settings # Settings
export CHANNEL="release" export CHANNEL="release"
build_sysroot=1 build_sysroot="clif"
target_dir='build' target_dir='build'
oldbe=''
while [[ $# != 0 ]]; do while [[ $# != 0 ]]; do
case $1 in case $1 in
"--debug") "--debug")
export CHANNEL="debug" export CHANNEL="debug"
;; ;;
"--without-sysroot") "--sysroot")
build_sysroot=0 build_sysroot=$2
shift
;; ;;
"--target-dir") "--target-dir")
target_dir=$2 target_dir=$2
shift shift
;; ;;
"--oldbe")
oldbe='--features oldbe'
;;
*) *)
echo "Unknown flag '$1'" echo "Unknown flag '$1'"
echo "Usage: ./build.sh [--debug] [--without-sysroot] [--target-dir DIR]" echo "Usage: ./build.sh [--debug] [--sysroot none|clif|llvm] [--target-dir DIR] [--oldbe]"
exit 1
;; ;;
esac esac
shift shift
@ -27,23 +33,24 @@ done
# Build cg_clif # Build cg_clif
unset CARGO_TARGET_DIR unset CARGO_TARGET_DIR
export RUSTFLAGS="-Zrun_dsymutil=no"
unamestr=$(uname) unamestr=$(uname)
if [[ "$unamestr" == 'Linux' ]]; then if [[ "$unamestr" == 'Linux' ]]; then
export RUSTFLAGS='-Clink-arg=-Wl,-rpath=$ORIGIN/../lib '$RUSTFLAGS export RUSTFLAGS='-Clink-arg=-Wl,-rpath=$ORIGIN/../lib '$RUSTFLAGS
elif [[ "$unamestr" == 'Darwin' ]]; then elif [[ "$unamestr" == 'Darwin' ]]; then
export RUSTFLAGS='-Clink-arg=-Wl,-rpath,@loader_path/../lib -Zosx-rpath-install-name '$RUSTFLAGS export RUSTFLAGS='-Csplit-debuginfo=unpacked -Clink-arg=-Wl,-rpath,@loader_path/../lib -Zosx-rpath-install-name '$RUSTFLAGS
dylib_ext='dylib' dylib_ext='dylib'
else else
echo "Unsupported os" echo "Unsupported os"
exit 1 exit 1
fi fi
if [[ "$CHANNEL" == "release" ]]; then if [[ "$CHANNEL" == "release" ]]; then
cargo build --release cargo build $oldbe --release
else else
cargo build cargo build $oldbe
fi fi
source scripts/ext_config.sh
rm -rf "$target_dir" rm -rf "$target_dir"
mkdir "$target_dir" mkdir "$target_dir"
mkdir "$target_dir"/bin "$target_dir"/lib mkdir "$target_dir"/bin "$target_dir"/lib
@ -51,10 +58,29 @@ ln target/$CHANNEL/cg_clif{,_build_sysroot} "$target_dir"/bin
ln target/$CHANNEL/*rustc_codegen_cranelift* "$target_dir"/lib ln target/$CHANNEL/*rustc_codegen_cranelift* "$target_dir"/lib
ln rust-toolchain scripts/config.sh scripts/cargo.sh "$target_dir" ln rust-toolchain scripts/config.sh scripts/cargo.sh "$target_dir"
if [[ "$build_sysroot" == "1" ]]; then mkdir -p "$target_dir/lib/rustlib/$TARGET_TRIPLE/lib/"
echo "[BUILD] sysroot" if [[ "$TARGET_TRIPLE" == "x86_64-pc-windows-gnu" ]]; then
export CG_CLIF_INCR_CACHE_DISABLED=1 cp $(rustc --print sysroot)/lib/rustlib/$TARGET_TRIPLE/lib/*.o "$target_dir/lib/rustlib/$TARGET_TRIPLE/lib/"
dir=$(pwd)
cd "$target_dir"
time "$dir/build_sysroot/build_sysroot.sh"
fi fi
case "$build_sysroot" in
"none")
;;
"llvm")
cp -r $(rustc --print sysroot)/lib/rustlib/$TARGET_TRIPLE/lib "$target_dir/lib/rustlib/$TARGET_TRIPLE/"
;;
"clif")
echo "[BUILD] sysroot"
dir=$(pwd)
cd "$target_dir"
time "$dir/build_sysroot/build_sysroot.sh"
cp lib/rustlib/*/lib/libstd-* lib/
;;
*)
echo "Unknown sysroot kind \`$build_sysroot\`."
echo "The allowed values are:"
echo " none A sysroot that doesn't contain the standard library"
echo " llvm Copy the sysroot from rustc compiled by cg_llvm"
echo " clif Build a new sysroot using cg_clif"
exit 1
esac

View File

@ -2,9 +2,9 @@
# It is not intended for manual editing. # It is not intended for manual editing.
[[package]] [[package]]
name = "addr2line" name = "addr2line"
version = "0.14.0" version = "0.14.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7c0929d69e78dd9bf5408269919fcbcaeb2e35e5d43e5815517cdc6a8e11a423" checksum = "a55f82cfe485775d02112886f4169bde0c5894d75e79ead7eafe7e40a25e45f7"
dependencies = [ dependencies = [
"compiler_builtins", "compiler_builtins",
"gimli", "gimli",
@ -63,9 +63,7 @@ dependencies = [
[[package]] [[package]]
name = "compiler_builtins" name = "compiler_builtins"
version = "0.1.36" version = "0.1.39"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7cd0782e0a7da7598164153173e5a5d4d9b1da094473c98dce0ff91406112369"
dependencies = [ dependencies = [
"rustc-std-workspace-core", "rustc-std-workspace-core",
] ]
@ -130,9 +128,9 @@ dependencies = [
[[package]] [[package]]
name = "hermit-abi" name = "hermit-abi"
version = "0.1.17" version = "0.1.18"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5aca5565f760fb5b220e499d72710ed156fdb74e631659e99377d9ebfbd13ae8" checksum = "322f4de77956e22ed0e5032c359a0f1273f1f7f0d79bfa3b8ffbc730d7fbcc5c"
dependencies = [ dependencies = [
"compiler_builtins", "compiler_builtins",
"libc", "libc",
@ -141,9 +139,9 @@ dependencies = [
[[package]] [[package]]
name = "libc" name = "libc"
version = "0.2.81" version = "0.2.84"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1482821306169ec4d07f6aca392a4681f66c75c9918aa49641a2595db64053cb" checksum = "1cca32fa0182e8c0989459524dc356b8f2b5c10f1b9eb521b7d182c03cf8c5ff"
dependencies = [ dependencies = [
"rustc-std-workspace-core", "rustc-std-workspace-core",
] ]

View File

@ -11,12 +11,13 @@ test = { path = "./sysroot_src/library/test" }
alloc_system = { path = "./alloc_system" } alloc_system = { path = "./alloc_system" }
compiler_builtins = { version = "=0.1.36", default-features = false } compiler_builtins = { version = "0.1.39", default-features = false, features = ["no-asm"] }
[patch.crates-io] [patch.crates-io]
rustc-std-workspace-core = { path = "./sysroot_src/library/rustc-std-workspace-core" } rustc-std-workspace-core = { path = "./sysroot_src/library/rustc-std-workspace-core" }
rustc-std-workspace-alloc = { path = "./sysroot_src/library/rustc-std-workspace-alloc" } rustc-std-workspace-alloc = { path = "./sysroot_src/library/rustc-std-workspace-alloc" }
rustc-std-workspace-std = { path = "./sysroot_src/library/rustc-std-workspace-std" } rustc-std-workspace-std = { path = "./sysroot_src/library/rustc-std-workspace-std" }
compiler_builtins = { path = "./compiler-builtins" }
[profile.dev] [profile.dev]
lto = "off" lto = "off"

View File

@ -24,17 +24,16 @@ export CARGO_TARGET_DIR=target
# Build libs # Build libs
export RUSTFLAGS="$RUSTFLAGS -Zforce-unstable-if-unmarked -Cpanic=abort" export RUSTFLAGS="$RUSTFLAGS -Zforce-unstable-if-unmarked -Cpanic=abort"
export __CARGO_DEFAULT_LIB_METADATA="cg_clif"
if [[ "$1" != "--debug" ]]; then if [[ "$1" != "--debug" ]]; then
sysroot_channel='release' sysroot_channel='release'
# FIXME Enable incremental again once rust-lang/rust#74946 is fixed # FIXME Enable incremental again once rust-lang/rust#74946 is fixed
# FIXME Enable -Zmir-opt-level=2 again once it doesn't ice anymore CARGO_INCREMENTAL=0 RUSTFLAGS="$RUSTFLAGS -Zmir-opt-level=2" cargo build --target "$TARGET_TRIPLE" --release
CARGO_INCREMENTAL=0 RUSTFLAGS="$RUSTFLAGS" cargo build --target "$TARGET_TRIPLE" --release
else else
sysroot_channel='debug' sysroot_channel='debug'
cargo build --target "$TARGET_TRIPLE" cargo build --target "$TARGET_TRIPLE"
fi fi
# Copy files to sysroot # Copy files to sysroot
mkdir -p "$dir/lib/rustlib/$TARGET_TRIPLE/lib/"
ln "target/$TARGET_TRIPLE/$sysroot_channel/deps/"* "$dir/lib/rustlib/$TARGET_TRIPLE/lib/" ln "target/$TARGET_TRIPLE/$sysroot_channel/deps/"* "$dir/lib/rustlib/$TARGET_TRIPLE/lib/"
rm "$dir/lib/rustlib/$TARGET_TRIPLE/lib/"*.{rmeta,d} rm "$dir/lib/rustlib/$TARGET_TRIPLE/lib/"*.{rmeta,d}

View File

@ -29,4 +29,11 @@ git commit --no-gpg-sign -m "Patch $file"
done done
popd popd
echo "Successfully prepared libcore for building" git clone https://github.com/rust-lang/compiler-builtins.git || echo "rust-lang/compiler-builtins has already been cloned"
pushd compiler-builtins
git checkout -- .
git checkout 0.1.39
git apply ../../crate_patches/0001-compiler-builtins-Remove-rotate_left-from-Int.patch
popd
echo "Successfully prepared sysroot source for building"

View File

@ -1,5 +1,5 @@
#!/bin/bash --verbose #!/bin/bash --verbose
set -e set -e
rm -rf target/ build/ build_sysroot/{sysroot_src/,target/} perf.data{,.old} rm -rf target/ build/ build_sysroot/{sysroot_src/,target/,compiler-builtins/} perf.data{,.old}
rm -rf rand/ regex/ simple-raytracer/ rm -rf rand/ regex/ simple-raytracer/

View File

@ -0,0 +1,35 @@
From 7078cca3cb614e1e82da428380b4e16fc3afef46 Mon Sep 17 00:00:00 2001
From: bjorn3 <bjorn3@users.noreply.github.com>
Date: Thu, 21 Jan 2021 14:46:36 +0100
Subject: [PATCH] Remove rotate_left from Int
---
src/int/mod.rs | 5 -----
1 file changed, 5 deletions(-)
diff --git a/src/int/mod.rs b/src/int/mod.rs
index 06054c8..3bea17b 100644
--- a/src/int/mod.rs
+++ b/src/int/mod.rs
@@ -85,7 +85,6 @@ pub trait Int:
fn wrapping_sub(self, other: Self) -> Self;
fn wrapping_shl(self, other: u32) -> Self;
fn wrapping_shr(self, other: u32) -> Self;
- fn rotate_left(self, other: u32) -> Self;
fn overflowing_add(self, other: Self) -> (Self, bool);
fn aborting_div(self, other: Self) -> Self;
fn aborting_rem(self, other: Self) -> Self;
@@ -209,10 +208,6 @@ macro_rules! int_impl_common {
<Self>::wrapping_shr(self, other)
}
- fn rotate_left(self, other: u32) -> Self {
- <Self>::rotate_left(self, other)
- }
-
fn overflowing_add(self, other: Self) -> (Self, bool) {
<Self>::overflowing_add(self, other)
}
--
2.26.2.7.g19db9cfb68

View File

@ -11,7 +11,8 @@ use alloc_system::System;
#[global_allocator] #[global_allocator]
static ALLOC: System = System; static ALLOC: System = System;
#[link(name = "c")] #[cfg_attr(unix, link(name = "c"))]
#[cfg_attr(target_env = "msvc", link(name = "msvcrt"))]
extern "C" { extern "C" {
fn puts(s: *const u8) -> i32; fn puts(s: *const u8) -> i32;
} }

View File

@ -532,8 +532,8 @@ pub mod intrinsics {
} }
pub mod libc { pub mod libc {
#[cfg_attr(not(windows), link(name = "c"))] #[cfg_attr(unix, link(name = "c"))]
#[cfg_attr(windows, link(name = "msvcrt"))] #[cfg_attr(target_env = "msvc", link(name = "msvcrt"))]
extern "C" { extern "C" {
pub fn puts(s: *const i8) -> i32; pub fn puts(s: *const i8) -> i32;
pub fn printf(format: *const i8, ...) -> i32; pub fn printf(format: *const i8, ...) -> i32;

View File

@ -1,7 +1,8 @@
#![feature(start, box_syntax, core_intrinsics, lang_items)] #![feature(start, box_syntax, core_intrinsics, lang_items)]
#![no_std] #![no_std]
#[link(name = "c")] #[cfg_attr(unix, link(name = "c"))]
#[cfg_attr(target_env = "msvc", link(name = "msvcrt"))]
extern {} extern {}
#[panic_handler] #[panic_handler]

View File

@ -119,5 +119,21 @@ index 6609bc3..241b497 100644
#[test] #[test]
#[should_panic(expected = "index 0 greater than length of slice")] #[should_panic(expected = "index 0 greater than length of slice")]
diff --git a/library/core/tests/num/ops.rs b/library/core/tests/num/ops.rs
index 9979cc8..d5d1d83 100644
--- a/library/core/tests/num/ops.rs
+++ b/library/core/tests/num/ops.rs
@@ -238,7 +238,7 @@ macro_rules! test_shift_assign {
}
};
}
-test_shift!(test_shl_defined, Shl::shl);
-test_shift_assign!(test_shl_assign_defined, ShlAssign::shl_assign);
-test_shift!(test_shr_defined, Shr::shr);
-test_shift_assign!(test_shr_assign_defined, ShrAssign::shr_assign);
+//test_shift!(test_shl_defined, Shl::shl);
+//test_shift_assign!(test_shl_assign_defined, ShlAssign::shl_assign);
+//test_shift!(test_shr_defined, Shr::shr);
+//test_shift_assign!(test_shr_assign_defined, ShrAssign::shr_assign);
-- --
2.21.0 (Apple Git-122) 2.21.0 (Apple Git-122)

View File

@ -1 +1 @@
nightly-2020-12-23 nightly-2021-01-30

View File

@ -12,28 +12,6 @@ else
exit 1 exit 1
fi fi
HOST_TRIPLE=$(rustc -vV | grep host | cut -d: -f2 | tr -d " ")
TARGET_TRIPLE=$HOST_TRIPLE
#TARGET_TRIPLE="x86_64-pc-windows-gnu"
#TARGET_TRIPLE="aarch64-unknown-linux-gnu"
linker=''
RUN_WRAPPER=''
export JIT_SUPPORTED=1
if [[ "$HOST_TRIPLE" != "$TARGET_TRIPLE" ]]; then
export JIT_SUPPORTED=0
if [[ "$TARGET_TRIPLE" == "aarch64-unknown-linux-gnu" ]]; then
# We are cross-compiling for aarch64. Use the correct linker and run tests in qemu.
linker='-Clinker=aarch64-linux-gnu-gcc'
RUN_WRAPPER='qemu-aarch64 -L /usr/aarch64-linux-gnu'
elif [[ "$TARGET_TRIPLE" == "x86_64-pc-windows-gnu" ]]; then
# We are cross-compiling for Windows. Run tests in wine.
RUN_WRAPPER='wine'
else
echo "Unknown non-native platform"
fi
fi
if echo "$RUSTC_WRAPPER" | grep sccache; then if echo "$RUSTC_WRAPPER" | grep sccache; then
echo echo
echo -e "\x1b[1;93m=== Warning: Unset RUSTC_WRAPPER to prevent interference with sccache ===\x1b[0m" echo -e "\x1b[1;93m=== Warning: Unset RUSTC_WRAPPER to prevent interference with sccache ===\x1b[0m"
@ -44,16 +22,14 @@ fi
dir=$(cd "$(dirname "${BASH_SOURCE[0]}")"; pwd) dir=$(cd "$(dirname "${BASH_SOURCE[0]}")"; pwd)
export RUSTC=$dir"/bin/cg_clif" export RUSTC=$dir"/bin/cg_clif"
export RUSTFLAGS=$linker" "$RUSTFLAGS
export RUSTDOCFLAGS=$linker' -Cpanic=abort -Zpanic-abort-tests '\ export RUSTDOCFLAGS=$linker' -Cpanic=abort -Zpanic-abort-tests '\
'-Zcodegen-backend='$dir'/lib/librustc_codegen_cranelift.'$dylib_ext' --sysroot '$dir '-Zcodegen-backend='$dir'/lib/librustc_codegen_cranelift.'$dylib_ext' --sysroot '$dir
# FIXME remove once the atomic shim is gone # FIXME remove once the atomic shim is gone
if [[ $(uname) == 'Darwin' ]]; then if [[ "$unamestr" == 'Darwin' ]]; then
export RUSTFLAGS="$RUSTFLAGS -Clink-arg=-undefined -Clink-arg=dynamic_lookup" export RUSTFLAGS="$RUSTFLAGS -Clink-arg=-undefined -Clink-arg=dynamic_lookup"
fi fi
export LD_LIBRARY_PATH="$(rustc --print sysroot)/lib" export LD_LIBRARY_PATH="$(rustc --print sysroot)/lib:"$dir"/lib"
export DYLD_LIBRARY_PATH=$LD_LIBRARY_PATH export DYLD_LIBRARY_PATH=$LD_LIBRARY_PATH
export CG_CLIF_DISPLAY_CG_TIME=1

View File

@ -0,0 +1,27 @@
# Note to people running shellcheck: this file should only be sourced, not executed directly.
# Various env vars that should only be set for the build system but not for cargo.sh
set -e
export CG_CLIF_DISPLAY_CG_TIME=1
export CG_CLIF_INCR_CACHE_DISABLED=1
export HOST_TRIPLE=$(rustc -vV | grep host | cut -d: -f2 | tr -d " ")
export TARGET_TRIPLE=${TARGET_TRIPLE:-$HOST_TRIPLE}
export RUN_WRAPPER=''
export JIT_SUPPORTED=1
if [[ "$HOST_TRIPLE" != "$TARGET_TRIPLE" ]]; then
export JIT_SUPPORTED=0
if [[ "$TARGET_TRIPLE" == "aarch64-unknown-linux-gnu" ]]; then
# We are cross-compiling for aarch64. Use the correct linker and run tests in qemu.
export RUSTFLAGS='-Clinker=aarch64-linux-gnu-gcc '$RUSTFLAGS
export RUN_WRAPPER='qemu-aarch64 -L /usr/aarch64-linux-gnu'
elif [[ "$TARGET_TRIPLE" == "x86_64-pc-windows-gnu" ]]; then
# We are cross-compiling for Windows. Run tests in wine.
export RUN_WRAPPER='wine'
else
echo "Unknown non-native platform"
fi
fi

View File

@ -3,7 +3,7 @@
set -e set -e
source build/config.sh source build/config.sh
export CG_CLIF_INCR_CACHE_DISABLED=1 source scripts/ext_config.sh
MY_RUSTC="$RUSTC $RUSTFLAGS -L crate=target/out --out-dir target/out -Cdebuginfo=2" MY_RUSTC="$RUSTC $RUSTFLAGS -L crate=target/out --out-dir target/out -Cdebuginfo=2"
function no_sysroot_tests() { function no_sysroot_tests() {

View File

@ -4,10 +4,10 @@
use std::borrow::Cow; use std::borrow::Cow;
use rustc_middle::mir; use rustc_middle::mir;
use rustc_target::abi::call::PassMode;
use cranelift_codegen::entity::EntityRef; use cranelift_codegen::entity::EntityRef;
use crate::abi::pass_mode::*;
use crate::prelude::*; use crate::prelude::*;
pub(super) fn add_args_header_comment(fx: &mut FunctionCx<'_, '_, impl Module>) { pub(super) fn add_args_header_comment(fx: &mut FunctionCx<'_, '_, impl Module>) {
@ -21,9 +21,9 @@ pub(super) fn add_arg_comment<'tcx>(
kind: &str, kind: &str,
local: Option<mir::Local>, local: Option<mir::Local>,
local_field: Option<usize>, local_field: Option<usize>,
params: EmptySinglePair<Value>, params: &[Value],
pass_mode: PassMode, arg_abi_mode: PassMode,
ty: Ty<'tcx>, arg_layout: TyAndLayout<'tcx>,
) { ) {
let local = if let Some(local) = local { let local = if let Some(local) = local {
Cow::Owned(format!("{:?}", local)) Cow::Owned(format!("{:?}", local))
@ -37,12 +37,20 @@ pub(super) fn add_arg_comment<'tcx>(
}; };
let params = match params { let params = match params {
Empty => Cow::Borrowed("-"), [] => Cow::Borrowed("-"),
Single(param) => Cow::Owned(format!("= {:?}", param)), [param] => Cow::Owned(format!("= {:?}", param)),
Pair(param_a, param_b) => Cow::Owned(format!("= {:?}, {:?}", param_a, param_b)), [param_a, param_b] => Cow::Owned(format!("= {:?},{:?}", param_a, param_b)),
params => Cow::Owned(format!(
"= {}",
params
.iter()
.map(ToString::to_string)
.collect::<Vec<_>>()
.join(",")
)),
}; };
let pass_mode = format!("{:?}", pass_mode); let pass_mode = format!("{:?}", arg_abi_mode);
fx.add_global_comment(format!( fx.add_global_comment(format!(
"{kind:5}{local:>3}{local_field:<5} {params:10} {pass_mode:36} {ty:?}", "{kind:5}{local:>3}{local_field:<5} {params:10} {pass_mode:36} {ty:?}",
kind = kind, kind = kind,
@ -50,7 +58,7 @@ pub(super) fn add_arg_comment<'tcx>(
local_field = local_field, local_field = local_field,
params = params, params = params,
pass_mode = pass_mode, pass_mode = pass_mode,
ty = ty, ty = arg_layout.ty,
)); ));
} }

View File

@ -6,199 +6,50 @@ mod pass_mode;
mod returning; mod returning;
use rustc_middle::middle::codegen_fn_attrs::CodegenFnAttrFlags; use rustc_middle::middle::codegen_fn_attrs::CodegenFnAttrFlags;
use rustc_middle::ty::layout::FnAbiExt;
use rustc_target::abi::call::{Conv, FnAbi};
use rustc_target::spec::abi::Abi; use rustc_target::spec::abi::Abi;
use cranelift_codegen::ir::{AbiParam, ArgumentPurpose}; use cranelift_codegen::ir::AbiParam;
use smallvec::smallvec;
use self::pass_mode::*; use self::pass_mode::*;
use crate::prelude::*; use crate::prelude::*;
pub(crate) use self::returning::{can_return_to_ssa_var, codegen_return}; pub(crate) use self::returning::{can_return_to_ssa_var, codegen_return};
// Copied from https://github.com/rust-lang/rust/blob/f52c72948aa1dd718cc1f168d21c91c584c0a662/src/librustc_middle/ty/layout.rs#L2301 fn clif_sig_from_fn_abi<'tcx>(
#[rustfmt::skip]
pub(crate) fn fn_sig_for_fn_abi<'tcx>(tcx: TyCtxt<'tcx>, instance: Instance<'tcx>) -> ty::PolyFnSig<'tcx> {
use rustc_middle::ty::subst::Subst;
// FIXME(davidtwco,eddyb): A `ParamEnv` should be passed through to this function.
let ty = instance.ty(tcx, ty::ParamEnv::reveal_all());
match *ty.kind() {
ty::FnDef(..) => {
// HACK(davidtwco,eddyb): This is a workaround for polymorphization considering
// parameters unused if they show up in the signature, but not in the `mir::Body`
// (i.e. due to being inside a projection that got normalized, see
// `src/test/ui/polymorphization/normalized_sig_types.rs`), and codegen not keeping
// track of a polymorphization `ParamEnv` to allow normalizing later.
let mut sig = match *ty.kind() {
ty::FnDef(def_id, substs) => tcx
.normalize_erasing_regions(tcx.param_env(def_id), tcx.fn_sig(def_id))
.subst(tcx, substs),
_ => unreachable!(),
};
if let ty::InstanceDef::VtableShim(..) = instance.def {
// Modify `fn(self, ...)` to `fn(self: *mut Self, ...)`.
sig = sig.map_bound(|mut sig| {
let mut inputs_and_output = sig.inputs_and_output.to_vec();
inputs_and_output[0] = tcx.mk_mut_ptr(inputs_and_output[0]);
sig.inputs_and_output = tcx.intern_type_list(&inputs_and_output);
sig
});
}
sig
}
ty::Closure(def_id, substs) => {
let sig = substs.as_closure().sig();
let env_ty = tcx.closure_env_ty(def_id, substs).unwrap();
sig.map_bound(|sig| {
tcx.mk_fn_sig(
std::iter::once(env_ty.skip_binder()).chain(sig.inputs().iter().cloned()),
sig.output(),
sig.c_variadic,
sig.unsafety,
sig.abi,
)
})
}
ty::Generator(_, substs, _) => {
let sig = substs.as_generator().poly_sig();
let env_region = ty::ReLateBound(ty::INNERMOST, ty::BoundRegion { kind: ty::BrEnv });
let env_ty = tcx.mk_mut_ref(tcx.mk_region(env_region), ty);
let pin_did = tcx.require_lang_item(rustc_hir::LangItem::Pin, None);
let pin_adt_ref = tcx.adt_def(pin_did);
let pin_substs = tcx.intern_substs(&[env_ty.into()]);
let env_ty = tcx.mk_adt(pin_adt_ref, pin_substs);
sig.map_bound(|sig| {
let state_did = tcx.require_lang_item(rustc_hir::LangItem::GeneratorState, None);
let state_adt_ref = tcx.adt_def(state_did);
let state_substs =
tcx.intern_substs(&[sig.yield_ty.into(), sig.return_ty.into()]);
let ret_ty = tcx.mk_adt(state_adt_ref, state_substs);
tcx.mk_fn_sig(
[env_ty, sig.resume_ty].iter(),
&ret_ty,
false,
rustc_hir::Unsafety::Normal,
rustc_target::spec::abi::Abi::Rust,
)
})
}
_ => bug!("unexpected type {:?} in Instance::fn_sig", ty),
}
}
fn clif_sig_from_fn_sig<'tcx>(
tcx: TyCtxt<'tcx>, tcx: TyCtxt<'tcx>,
triple: &target_lexicon::Triple, triple: &target_lexicon::Triple,
sig: FnSig<'tcx>, fn_abi: &FnAbi<'tcx, Ty<'tcx>>,
span: Span,
is_vtable_fn: bool,
requires_caller_location: bool,
) -> Signature { ) -> Signature {
let abi = match sig.abi { let call_conv = match fn_abi.conv {
Abi::System => Abi::C, Conv::Rust | Conv::C => CallConv::triple_default(triple),
abi => abi, Conv::X86_64SysV => CallConv::SystemV,
}; Conv::X86_64Win64 => CallConv::WindowsFastcall,
let (call_conv, inputs, output): (CallConv, Vec<Ty<'tcx>>, Ty<'tcx>) = match abi { Conv::ArmAapcs
Abi::Rust => ( | Conv::Msp430Intr
CallConv::triple_default(triple), | Conv::PtxKernel
sig.inputs().to_vec(), | Conv::X86Fastcall
sig.output(), | Conv::X86Intr
), | Conv::X86Stdcall
Abi::C | Abi::Unadjusted => ( | Conv::X86ThisCall
CallConv::triple_default(triple), | Conv::X86VectorCall
sig.inputs().to_vec(), | Conv::AmdGpuKernel
sig.output(), | Conv::AvrInterrupt
), | Conv::AvrNonBlockingInterrupt => {
Abi::SysV64 => (CallConv::SystemV, sig.inputs().to_vec(), sig.output()), todo!("{:?}", fn_abi.conv)
Abi::RustCall => {
assert_eq!(sig.inputs().len(), 2);
let extra_args = match sig.inputs().last().unwrap().kind() {
ty::Tuple(ref tupled_arguments) => tupled_arguments,
_ => bug!("argument to function with \"rust-call\" ABI is not a tuple"),
};
let mut inputs: Vec<Ty<'tcx>> = vec![sig.inputs()[0]];
inputs.extend(extra_args.types());
(CallConv::triple_default(triple), inputs, sig.output())
} }
Abi::System => unreachable!(),
Abi::RustIntrinsic => (
CallConv::triple_default(triple),
sig.inputs().to_vec(),
sig.output(),
),
_ => unimplemented!("unsupported abi {:?}", sig.abi),
}; };
let inputs = fn_abi
let inputs = inputs .args
.into_iter() .iter()
.enumerate() .map(|arg_abi| arg_abi.get_abi_param(tcx).into_iter())
.map(|(i, ty)| {
let mut layout = tcx.layout_of(ParamEnv::reveal_all().and(ty)).unwrap();
if i == 0 && is_vtable_fn {
// Virtual calls turn their self param into a thin pointer.
// See https://github.com/rust-lang/rust/blob/37b6a5e5e82497caf5353d9d856e4eb5d14cbe06/src/librustc/ty/layout.rs#L2519-L2572 for more info
layout = tcx
.layout_of(ParamEnv::reveal_all().and(tcx.mk_mut_ptr(tcx.mk_unit())))
.unwrap();
}
let pass_mode = get_pass_mode(tcx, layout);
if abi != Abi::Rust && abi != Abi::RustCall && abi != Abi::RustIntrinsic {
match pass_mode {
PassMode::NoPass | PassMode::ByVal(_) => {}
PassMode::ByRef { size: Some(size) } => {
let purpose = ArgumentPurpose::StructArgument(u32::try_from(size.bytes()).expect("struct too big to pass on stack"));
return EmptySinglePair::Single(AbiParam::special(pointer_ty(tcx), purpose)).into_iter();
}
PassMode::ByValPair(_, _) | PassMode::ByRef { size: None } => {
tcx.sess.span_warn(
span,
&format!(
"Argument of type `{:?}` with pass mode `{:?}` is not yet supported \
for non-rust abi `{}`. Calling this function may result in a crash.",
layout.ty,
pass_mode,
abi,
),
);
}
}
}
pass_mode.get_param_ty(tcx).map(AbiParam::new).into_iter()
})
.flatten(); .flatten();
let (mut params, returns): (Vec<_>, Vec<_>) = match get_pass_mode( let (return_ptr, returns) = fn_abi.ret.get_abi_return(tcx);
tcx, // Sometimes the first param is an pointer to the place where the return value needs to be stored.
tcx.layout_of(ParamEnv::reveal_all().and(output)).unwrap(), let params: Vec<_> = return_ptr.into_iter().chain(inputs).collect();
) {
PassMode::NoPass => (inputs.collect(), vec![]),
PassMode::ByVal(ret_ty) => (inputs.collect(), vec![AbiParam::new(ret_ty)]),
PassMode::ByValPair(ret_ty_a, ret_ty_b) => (
inputs.collect(),
vec![AbiParam::new(ret_ty_a), AbiParam::new(ret_ty_b)],
),
PassMode::ByRef { size: Some(_) } => {
(
Some(pointer_ty(tcx)) // First param is place to put return val
.into_iter()
.map(|ty| AbiParam::special(ty, ArgumentPurpose::StructReturn))
.chain(inputs)
.collect(),
vec![],
)
}
PassMode::ByRef { size: None } => todo!(),
};
if requires_caller_location {
params.push(AbiParam::new(pointer_ty(tcx)));
}
Signature { Signature {
params, params,
@ -207,30 +58,17 @@ fn clif_sig_from_fn_sig<'tcx>(
} }
} }
pub(crate) fn get_function_name_and_sig<'tcx>( pub(crate) fn get_function_sig<'tcx>(
tcx: TyCtxt<'tcx>, tcx: TyCtxt<'tcx>,
triple: &target_lexicon::Triple, triple: &target_lexicon::Triple,
inst: Instance<'tcx>, inst: Instance<'tcx>,
support_vararg: bool, ) -> Signature {
) -> (String, Signature) {
assert!(!inst.substs.needs_infer()); assert!(!inst.substs.needs_infer());
let fn_sig = tcx clif_sig_from_fn_abi(
.normalize_erasing_late_bound_regions(ParamEnv::reveal_all(), fn_sig_for_fn_abi(tcx, inst));
if fn_sig.c_variadic && !support_vararg {
tcx.sess.span_fatal(
tcx.def_span(inst.def_id()),
"Variadic function definitions are not yet supported",
);
}
let sig = clif_sig_from_fn_sig(
tcx, tcx,
triple, triple,
fn_sig, &FnAbi::of_instance(&RevealAllLayoutCx(tcx), inst, &[]),
tcx.def_span(inst.def_id()), )
false,
inst.def.requires_caller_location(tcx),
);
(tcx.symbol_name(inst).name.to_string(), sig)
} }
/// Instance must be monomorphized /// Instance must be monomorphized
@ -239,7 +77,8 @@ pub(crate) fn import_function<'tcx>(
module: &mut impl Module, module: &mut impl Module,
inst: Instance<'tcx>, inst: Instance<'tcx>,
) -> FuncId { ) -> FuncId {
let (name, sig) = get_function_name_and_sig(tcx, module.isa().triple(), inst, true); let name = tcx.symbol_name(inst).name.to_string();
let sig = get_function_sig(tcx, module.isa().triple(), inst);
module module
.declare_function(&name, Linkage::Import, &sig) .declare_function(&name, Linkage::Import, &sig)
.unwrap() .unwrap()
@ -263,13 +102,13 @@ impl<'tcx, M: Module> FunctionCx<'_, 'tcx, M> {
pub(crate) fn lib_call( pub(crate) fn lib_call(
&mut self, &mut self,
name: &str, name: &str,
input_tys: Vec<types::Type>, params: Vec<AbiParam>,
output_tys: Vec<types::Type>, returns: Vec<AbiParam>,
args: &[Value], args: &[Value],
) -> &[Value] { ) -> &[Value] {
let sig = Signature { let sig = Signature {
params: input_tys.iter().cloned().map(AbiParam::new).collect(), params,
returns: output_tys.iter().cloned().map(AbiParam::new).collect(), returns,
call_conv: CallConv::triple_default(self.triple()), call_conv: CallConv::triple_default(self.triple()),
}; };
let func_id = self let func_id = self
@ -301,16 +140,18 @@ impl<'tcx, M: Module> FunctionCx<'_, 'tcx, M> {
.iter() .iter()
.map(|arg| { .map(|arg| {
( (
self.clif_type(arg.layout().ty).unwrap(), AbiParam::new(self.clif_type(arg.layout().ty).unwrap()),
arg.load_scalar(self), arg.load_scalar(self),
) )
}) })
.unzip(); .unzip();
let return_layout = self.layout_of(return_ty); let return_layout = self.layout_of(return_ty);
let return_tys = if let ty::Tuple(tup) = return_ty.kind() { let return_tys = if let ty::Tuple(tup) = return_ty.kind() {
tup.types().map(|ty| self.clif_type(ty).unwrap()).collect() tup.types()
.map(|ty| AbiParam::new(self.clif_type(ty).unwrap()))
.collect()
} else { } else {
vec![self.clif_type(return_ty).unwrap()] vec![AbiParam::new(self.clif_type(return_ty).unwrap())]
}; };
let ret_vals = self.lib_call(name, input_tys, return_tys, &args); let ret_vals = self.lib_call(name, input_tys, return_tys, &args);
match *ret_vals { match *ret_vals {
@ -352,12 +193,25 @@ pub(crate) fn codegen_fn_prelude<'tcx>(
fx: &mut FunctionCx<'_, 'tcx, impl Module>, fx: &mut FunctionCx<'_, 'tcx, impl Module>,
start_block: Block, start_block: Block,
) { ) {
fx.bcx.append_block_params_for_function_params(start_block);
fx.bcx.switch_to_block(start_block);
fx.bcx.ins().nop();
let ssa_analyzed = crate::analyze::analyze(fx); let ssa_analyzed = crate::analyze::analyze(fx);
#[cfg(debug_assertions)] #[cfg(debug_assertions)]
self::comments::add_args_header_comment(fx); self::comments::add_args_header_comment(fx);
let ret_place = self::returning::codegen_return_param(fx, &ssa_analyzed, start_block); let mut block_params_iter = fx
.bcx
.func
.dfg
.block_params(start_block)
.to_vec()
.into_iter();
let ret_place =
self::returning::codegen_return_param(fx, &ssa_analyzed, &mut block_params_iter);
assert_eq!(fx.local_map.push(ret_place), RETURN_PLACE); assert_eq!(fx.local_map.push(ret_place), RETURN_PLACE);
// None means pass_mode == NoPass // None means pass_mode == NoPass
@ -366,6 +220,9 @@ pub(crate) fn codegen_fn_prelude<'tcx>(
Spread(Vec<Option<CValue<'tcx>>>), Spread(Vec<Option<CValue<'tcx>>>),
} }
let fn_abi = fx.fn_abi.take().unwrap();
let mut arg_abis_iter = fn_abi.args.iter();
let func_params = fx let func_params = fx
.mir .mir
.args_iter() .args_iter()
@ -385,14 +242,18 @@ pub(crate) fn codegen_fn_prelude<'tcx>(
}; };
let mut params = Vec::new(); let mut params = Vec::new();
for (i, arg_ty) in tupled_arg_tys.types().enumerate() { for (i, _arg_ty) in tupled_arg_tys.types().enumerate() {
let param = cvalue_for_param(fx, start_block, Some(local), Some(i), arg_ty); let arg_abi = arg_abis_iter.next().unwrap();
let param =
cvalue_for_param(fx, Some(local), Some(i), arg_abi, &mut block_params_iter);
params.push(param); params.push(param);
} }
(local, ArgKind::Spread(params), arg_ty) (local, ArgKind::Spread(params), arg_ty)
} else { } else {
let param = cvalue_for_param(fx, start_block, Some(local), None, arg_ty); let arg_abi = arg_abis_iter.next().unwrap();
let param =
cvalue_for_param(fx, Some(local), None, arg_abi, &mut block_params_iter);
(local, ArgKind::Normal(param), arg_ty) (local, ArgKind::Normal(param), arg_ty)
} }
}) })
@ -401,13 +262,14 @@ pub(crate) fn codegen_fn_prelude<'tcx>(
assert!(fx.caller_location.is_none()); assert!(fx.caller_location.is_none());
if fx.instance.def.requires_caller_location(fx.tcx) { if fx.instance.def.requires_caller_location(fx.tcx) {
// Store caller location for `#[track_caller]`. // Store caller location for `#[track_caller]`.
fx.caller_location = Some( let arg_abi = arg_abis_iter.next().unwrap();
cvalue_for_param(fx, start_block, None, None, fx.tcx.caller_location_ty()).unwrap(), fx.caller_location =
); Some(cvalue_for_param(fx, None, None, arg_abi, &mut block_params_iter).unwrap());
} }
fx.bcx.switch_to_block(start_block); assert!(arg_abis_iter.next().is_none(), "ArgAbi left behind");
fx.bcx.ins().nop(); fx.fn_abi = Some(fn_abi);
assert!(block_params_iter.next().is_none(), "arg_value left behind");
#[cfg(debug_assertions)] #[cfg(debug_assertions)]
self::comments::add_locals_header_comment(fx); self::comments::add_locals_header_comment(fx);
@ -533,6 +395,21 @@ pub(crate) fn codegen_terminator_call<'tcx>(
None None
}; };
let extra_args = &args[fn_sig.inputs().len()..];
let extra_args = extra_args
.iter()
.map(|op_arg| fx.monomorphize(op_arg.ty(fx.mir, fx.tcx)))
.collect::<Vec<_>>();
let fn_abi = if let Some(instance) = instance {
FnAbi::of_instance(&RevealAllLayoutCx(fx.tcx), instance, &extra_args)
} else {
FnAbi::of_fn_ptr(
&RevealAllLayoutCx(fx.tcx),
fn_ty.fn_sig(fx.tcx),
&extra_args,
)
};
let is_cold = instance let is_cold = instance
.map(|inst| { .map(|inst| {
fx.tcx fx.tcx
@ -570,8 +447,8 @@ pub(crate) fn codegen_terminator_call<'tcx>(
// | indirect call target // | indirect call target
// | | the first argument to be passed // | | the first argument to be passed
// v v v virtual calls are special cased below // v v
let (func_ref, first_arg, is_virtual_call) = match instance { let (func_ref, first_arg) = match instance {
// Trait object call // Trait object call
Some(Instance { Some(Instance {
def: InstanceDef::Virtual(_, idx), def: InstanceDef::Virtual(_, idx),
@ -582,23 +459,19 @@ pub(crate) fn codegen_terminator_call<'tcx>(
let nop_inst = fx.bcx.ins().nop(); let nop_inst = fx.bcx.ins().nop();
fx.add_comment( fx.add_comment(
nop_inst, nop_inst,
format!( format!("virtual call; self arg pass mode: {:?}", &fn_abi.args[0],),
"virtual call; self arg pass mode: {:?}",
get_pass_mode(fx.tcx, args[0].layout())
),
); );
} }
let (ptr, method) = crate::vtable::get_ptr_and_method_ref(fx, args[0], idx); let (ptr, method) = crate::vtable::get_ptr_and_method_ref(fx, args[0], idx);
(Some(method), Single(ptr), true) (Some(method), smallvec![ptr])
} }
// Normal call // Normal call
Some(_) => ( Some(_) => (
None, None,
args.get(0) args.get(0)
.map(|arg| adjust_arg_for_abi(fx, *arg)) .map(|arg| adjust_arg_for_abi(fx, *arg, &fn_abi.args[0]))
.unwrap_or(Empty), .unwrap_or(smallvec![]),
false,
), ),
// Indirect call // Indirect call
@ -612,23 +485,27 @@ pub(crate) fn codegen_terminator_call<'tcx>(
( (
Some(func), Some(func),
args.get(0) args.get(0)
.map(|arg| adjust_arg_for_abi(fx, *arg)) .map(|arg| adjust_arg_for_abi(fx, *arg, &fn_abi.args[0]))
.unwrap_or(Empty), .unwrap_or(smallvec![]),
false,
) )
} }
}; };
let ret_place = destination.map(|(place, _)| place); let ret_place = destination.map(|(place, _)| place);
let (call_inst, call_args) = let (call_inst, call_args) = self::returning::codegen_with_call_return_arg(
self::returning::codegen_with_call_return_arg(fx, fn_sig, ret_place, |fx, return_ptr| { fx,
&fn_abi.ret,
ret_place,
|fx, return_ptr| {
let regular_args_count = args.len();
let mut call_args: Vec<Value> = return_ptr let mut call_args: Vec<Value> = return_ptr
.into_iter() .into_iter()
.chain(first_arg.into_iter()) .chain(first_arg.into_iter())
.chain( .chain(
args.into_iter() args.into_iter()
.enumerate()
.skip(1) .skip(1)
.map(|arg| adjust_arg_for_abi(fx, arg).into_iter()) .map(|(i, arg)| adjust_arg_for_abi(fx, arg, &fn_abi.args[i]).into_iter())
.flatten(), .flatten(),
) )
.collect::<Vec<_>>(); .collect::<Vec<_>>();
@ -639,18 +516,17 @@ pub(crate) fn codegen_terminator_call<'tcx>(
{ {
// Pass the caller location for `#[track_caller]`. // Pass the caller location for `#[track_caller]`.
let caller_location = fx.get_caller_location(span); let caller_location = fx.get_caller_location(span);
call_args.extend(adjust_arg_for_abi(fx, caller_location).into_iter()); call_args.extend(
adjust_arg_for_abi(fx, caller_location, &fn_abi.args[regular_args_count])
.into_iter(),
);
assert_eq!(fn_abi.args.len(), regular_args_count + 1);
} else {
assert_eq!(fn_abi.args.len(), regular_args_count);
} }
let call_inst = if let Some(func_ref) = func_ref { let call_inst = if let Some(func_ref) = func_ref {
let sig = clif_sig_from_fn_sig( let sig = clif_sig_from_fn_abi(fx.tcx, fx.triple(), &fn_abi);
fx.tcx,
fx.triple(),
fn_sig,
span,
is_virtual_call,
false, // calls through function pointers never pass the caller location
);
let sig = fx.bcx.import_signature(sig); let sig = fx.bcx.import_signature(sig);
fx.bcx.ins().call_indirect(sig, func_ref, &call_args) fx.bcx.ins().call_indirect(sig, func_ref, &call_args)
} else { } else {
@ -660,7 +536,8 @@ pub(crate) fn codegen_terminator_call<'tcx>(
}; };
(call_inst, call_args) (call_inst, call_args)
}); },
);
// FIXME find a cleaner way to support varargs // FIXME find a cleaner way to support varargs
if fn_sig.c_variadic { if fn_sig.c_variadic {
@ -701,37 +578,33 @@ pub(crate) fn codegen_drop<'tcx>(
drop_place: CPlace<'tcx>, drop_place: CPlace<'tcx>,
) { ) {
let ty = drop_place.layout().ty; let ty = drop_place.layout().ty;
let drop_fn = Instance::resolve_drop_in_place(fx.tcx, ty).polymorphize(fx.tcx); let drop_instance = Instance::resolve_drop_in_place(fx.tcx, ty).polymorphize(fx.tcx);
if let ty::InstanceDef::DropGlue(_, None) = drop_fn.def { if let ty::InstanceDef::DropGlue(_, None) = drop_instance.def {
// we don't actually need to drop anything // we don't actually need to drop anything
} else { } else {
let drop_fn_ty = drop_fn.ty(fx.tcx, ParamEnv::reveal_all());
let fn_sig = fx.tcx.normalize_erasing_late_bound_regions(
ParamEnv::reveal_all(),
drop_fn_ty.fn_sig(fx.tcx),
);
assert_eq!(fn_sig.output(), fx.tcx.mk_unit());
match ty.kind() { match ty.kind() {
ty::Dynamic(..) => { ty::Dynamic(..) => {
let (ptr, vtable) = drop_place.to_ptr_maybe_unsized(); let (ptr, vtable) = drop_place.to_ptr_maybe_unsized();
let ptr = ptr.get_addr(fx); let ptr = ptr.get_addr(fx);
let drop_fn = crate::vtable::drop_fn_of_obj(fx, vtable.unwrap()); let drop_fn = crate::vtable::drop_fn_of_obj(fx, vtable.unwrap());
let sig = clif_sig_from_fn_sig( // FIXME(eddyb) perhaps move some of this logic into
fx.tcx, // `Instance::resolve_drop_in_place`?
fx.triple(), let virtual_drop = Instance {
fn_sig, def: ty::InstanceDef::Virtual(drop_instance.def_id(), 0),
span, substs: drop_instance.substs,
true, };
false, // `drop_in_place` is never `#[track_caller]` let fn_abi = FnAbi::of_instance(&RevealAllLayoutCx(fx.tcx), virtual_drop, &[]);
);
let sig = clif_sig_from_fn_abi(fx.tcx, fx.triple(), &fn_abi);
let sig = fx.bcx.import_signature(sig); let sig = fx.bcx.import_signature(sig);
fx.bcx.ins().call_indirect(sig, drop_fn, &[ptr]); fx.bcx.ins().call_indirect(sig, drop_fn, &[ptr]);
} }
_ => { _ => {
assert!(!matches!(drop_fn.def, InstanceDef::Virtual(_, _))); assert!(!matches!(drop_instance.def, InstanceDef::Virtual(_, _)));
let fn_abi = FnAbi::of_instance(&RevealAllLayoutCx(fx.tcx), drop_instance, &[]);
let arg_value = drop_place.place_ref( let arg_value = drop_place.place_ref(
fx, fx,
@ -743,17 +616,19 @@ pub(crate) fn codegen_drop<'tcx>(
}, },
)), )),
); );
let arg_value = adjust_arg_for_abi(fx, arg_value); let arg_value = adjust_arg_for_abi(fx, arg_value, &fn_abi.args[0]);
let mut call_args: Vec<Value> = arg_value.into_iter().collect::<Vec<_>>(); let mut call_args: Vec<Value> = arg_value.into_iter().collect::<Vec<_>>();
if drop_fn.def.requires_caller_location(fx.tcx) { if drop_instance.def.requires_caller_location(fx.tcx) {
// Pass the caller location for `#[track_caller]`. // Pass the caller location for `#[track_caller]`.
let caller_location = fx.get_caller_location(span); let caller_location = fx.get_caller_location(span);
call_args.extend(adjust_arg_for_abi(fx, caller_location).into_iter()); call_args.extend(
adjust_arg_for_abi(fx, caller_location, &fn_abi.args[1]).into_iter(),
);
} }
let func_ref = fx.get_function_ref(drop_fn); let func_ref = fx.get_function_ref(drop_instance);
fx.bcx.ins().call(func_ref, &call_args); fx.bcx.ins().call(func_ref, &call_args);
} }
} }

View File

@ -1,140 +1,281 @@
//! Argument passing //! Argument passing
use crate::prelude::*; use crate::prelude::*;
use crate::value_and_place::assert_assignable;
pub(super) use EmptySinglePair::*; use cranelift_codegen::ir::{ArgumentExtension, ArgumentPurpose};
use rustc_target::abi::call::{
ArgAbi, ArgAttributes, ArgExtension as RustcArgExtension, CastTarget, PassMode, Reg, RegKind,
};
use smallvec::{smallvec, SmallVec};
#[derive(Copy, Clone, Debug)] pub(super) trait ArgAbiExt<'tcx> {
pub(super) enum PassMode { fn get_abi_param(&self, tcx: TyCtxt<'tcx>) -> SmallVec<[AbiParam; 2]>;
NoPass, fn get_abi_return(&self, tcx: TyCtxt<'tcx>) -> (Option<AbiParam>, Vec<AbiParam>);
ByVal(Type),
ByValPair(Type, Type),
ByRef { size: Option<Size> },
} }
#[derive(Copy, Clone, Debug)] fn reg_to_abi_param(reg: Reg) -> AbiParam {
pub(super) enum EmptySinglePair<T> { let clif_ty = match (reg.kind, reg.size.bytes()) {
Empty, (RegKind::Integer, 1) => types::I8,
Single(T), (RegKind::Integer, 2) => types::I16,
Pair(T, T), (RegKind::Integer, 4) => types::I32,
(RegKind::Integer, 8) => types::I64,
(RegKind::Integer, 16) => types::I128,
(RegKind::Float, 4) => types::F32,
(RegKind::Float, 8) => types::F64,
(RegKind::Vector, size) => types::I8.by(u16::try_from(size).unwrap()).unwrap(),
_ => unreachable!("{:?}", reg),
};
AbiParam::new(clif_ty)
} }
impl<T> EmptySinglePair<T> { fn apply_arg_attrs_to_abi_param(mut param: AbiParam, arg_attrs: ArgAttributes) -> AbiParam {
pub(super) fn into_iter(self) -> EmptySinglePairIter<T> { match arg_attrs.arg_ext {
EmptySinglePairIter(self) RustcArgExtension::None => {}
} RustcArgExtension::Zext => param.extension = ArgumentExtension::Uext,
RustcArgExtension::Sext => param.extension = ArgumentExtension::Sext,
pub(super) fn map<U>(self, mut f: impl FnMut(T) -> U) -> EmptySinglePair<U> {
match self {
Empty => Empty,
Single(v) => Single(f(v)),
Pair(a, b) => Pair(f(a), f(b)),
}
} }
param
} }
pub(super) struct EmptySinglePairIter<T>(EmptySinglePair<T>); fn cast_target_to_abi_params(cast: CastTarget) -> SmallVec<[AbiParam; 2]> {
let (rest_count, rem_bytes) = if cast.rest.unit.size.bytes() == 0 {
impl<T> Iterator for EmptySinglePairIter<T> { (0, 0)
type Item = T;
fn next(&mut self) -> Option<T> {
match std::mem::replace(&mut self.0, Empty) {
Empty => None,
Single(v) => Some(v),
Pair(a, b) => {
self.0 = Single(b);
Some(a)
}
}
}
}
impl<T: std::fmt::Debug> EmptySinglePair<T> {
pub(super) fn assert_single(self) -> T {
match self {
Single(v) => v,
_ => panic!("Called assert_single on {:?}", self),
}
}
pub(super) fn assert_pair(self) -> (T, T) {
match self {
Pair(a, b) => (a, b),
_ => panic!("Called assert_pair on {:?}", self),
}
}
}
impl PassMode {
pub(super) fn get_param_ty(self, tcx: TyCtxt<'_>) -> EmptySinglePair<Type> {
match self {
PassMode::NoPass => Empty,
PassMode::ByVal(clif_type) => Single(clif_type),
PassMode::ByValPair(a, b) => Pair(a, b),
PassMode::ByRef { size: Some(_) } => Single(pointer_ty(tcx)),
PassMode::ByRef { size: None } => Pair(pointer_ty(tcx), pointer_ty(tcx)),
}
}
}
pub(super) fn get_pass_mode<'tcx>(tcx: TyCtxt<'tcx>, layout: TyAndLayout<'tcx>) -> PassMode {
if layout.is_zst() {
// WARNING zst arguments must never be passed, as that will break CastKind::ClosureFnPointer
PassMode::NoPass
} else { } else {
match &layout.abi { (
Abi::Uninhabited => PassMode::NoPass, cast.rest.total.bytes() / cast.rest.unit.size.bytes(),
Abi::Scalar(scalar) => PassMode::ByVal(scalar_to_clif_type(tcx, scalar.clone())), cast.rest.total.bytes() % cast.rest.unit.size.bytes(),
Abi::ScalarPair(a, b) => { )
let a = scalar_to_clif_type(tcx, a.clone()); };
let b = scalar_to_clif_type(tcx, b.clone());
if a == types::I128 && b == types::I128 {
// Returning (i128, i128) by-val-pair would take 4 regs, while only 3 are
// available on x86_64. Cranelift gets confused when too many return params
// are used.
PassMode::ByRef {
size: Some(layout.size),
}
} else {
PassMode::ByValPair(a, b)
}
}
// FIXME implement Vector Abi in a cg_llvm compatible way if cast.prefix.iter().all(|x| x.is_none()) {
Abi::Vector { .. } => { // Simplify to a single unit when there is no prefix and size <= unit size
if let Some(vector_ty) = crate::intrinsics::clif_vector_type(tcx, layout) { if cast.rest.total <= cast.rest.unit.size {
PassMode::ByVal(vector_ty) let clif_ty = match (cast.rest.unit.kind, cast.rest.unit.size.bytes()) {
} else { (RegKind::Integer, 1) => types::I8,
PassMode::ByRef { (RegKind::Integer, 2) => types::I16,
size: Some(layout.size), (RegKind::Integer, 3..=4) => types::I32,
} (RegKind::Integer, 5..=8) => types::I64,
} (RegKind::Integer, 9..=16) => types::I128,
} (RegKind::Float, 4) => types::F32,
(RegKind::Float, 8) => types::F64,
Abi::Aggregate { sized: true } => PassMode::ByRef { (RegKind::Vector, size) => types::I8.by(u16::try_from(size).unwrap()).unwrap(),
size: Some(layout.size), _ => unreachable!("{:?}", cast.rest.unit),
}, };
Abi::Aggregate { sized: false } => PassMode::ByRef { size: None }, return smallvec![AbiParam::new(clif_ty)];
} }
} }
// Create list of fields in the main structure
let mut args = cast
.prefix
.iter()
.flatten()
.map(|&kind| {
reg_to_abi_param(Reg {
kind,
size: cast.prefix_chunk_size,
})
})
.chain((0..rest_count).map(|_| reg_to_abi_param(cast.rest.unit)))
.collect::<SmallVec<_>>();
// Append final integer
if rem_bytes != 0 {
// Only integers can be really split further.
assert_eq!(cast.rest.unit.kind, RegKind::Integer);
args.push(reg_to_abi_param(Reg {
kind: RegKind::Integer,
size: Size::from_bytes(rem_bytes),
}));
}
args
}
impl<'tcx> ArgAbiExt<'tcx> for ArgAbi<'tcx, Ty<'tcx>> {
fn get_abi_param(&self, tcx: TyCtxt<'tcx>) -> SmallVec<[AbiParam; 2]> {
match self.mode {
PassMode::Ignore => smallvec![],
PassMode::Direct(attrs) => match &self.layout.abi {
Abi::Scalar(scalar) => {
smallvec![apply_arg_attrs_to_abi_param(
AbiParam::new(scalar_to_clif_type(tcx, scalar.clone())),
attrs
)]
}
Abi::Vector { .. } => {
let vector_ty = crate::intrinsics::clif_vector_type(tcx, self.layout).unwrap();
smallvec![AbiParam::new(vector_ty)]
}
_ => unreachable!("{:?}", self.layout.abi),
},
PassMode::Pair(attrs_a, attrs_b) => match &self.layout.abi {
Abi::ScalarPair(a, b) => {
let a = scalar_to_clif_type(tcx, a.clone());
let b = scalar_to_clif_type(tcx, b.clone());
smallvec![
apply_arg_attrs_to_abi_param(AbiParam::new(a), attrs_a),
apply_arg_attrs_to_abi_param(AbiParam::new(b), attrs_b),
]
}
_ => unreachable!("{:?}", self.layout.abi),
},
PassMode::Cast(cast) => cast_target_to_abi_params(cast),
PassMode::Indirect {
attrs,
extra_attrs: None,
on_stack,
} => {
if on_stack {
let size = u32::try_from(self.layout.size.bytes()).unwrap();
smallvec![apply_arg_attrs_to_abi_param(
AbiParam::special(pointer_ty(tcx), ArgumentPurpose::StructArgument(size),),
attrs
)]
} else {
smallvec![apply_arg_attrs_to_abi_param(
AbiParam::new(pointer_ty(tcx)),
attrs
)]
}
}
PassMode::Indirect {
attrs,
extra_attrs: Some(extra_attrs),
on_stack,
} => {
assert!(!on_stack);
smallvec![
apply_arg_attrs_to_abi_param(AbiParam::new(pointer_ty(tcx)), attrs),
apply_arg_attrs_to_abi_param(AbiParam::new(pointer_ty(tcx)), extra_attrs),
]
}
}
}
fn get_abi_return(&self, tcx: TyCtxt<'tcx>) -> (Option<AbiParam>, Vec<AbiParam>) {
match self.mode {
PassMode::Ignore => (None, vec![]),
PassMode::Direct(_) => match &self.layout.abi {
Abi::Scalar(scalar) => (
None,
vec![AbiParam::new(scalar_to_clif_type(tcx, scalar.clone()))],
),
Abi::Vector { .. } => {
let vector_ty = crate::intrinsics::clif_vector_type(tcx, self.layout).unwrap();
(None, vec![AbiParam::new(vector_ty)])
}
_ => unreachable!("{:?}", self.layout.abi),
},
PassMode::Pair(_, _) => match &self.layout.abi {
Abi::ScalarPair(a, b) => {
let a = scalar_to_clif_type(tcx, a.clone());
let b = scalar_to_clif_type(tcx, b.clone());
(None, vec![AbiParam::new(a), AbiParam::new(b)])
}
_ => unreachable!("{:?}", self.layout.abi),
},
PassMode::Cast(cast) => (None, cast_target_to_abi_params(cast).into_iter().collect()),
PassMode::Indirect {
attrs: _,
extra_attrs: None,
on_stack,
} => {
assert!(!on_stack);
(
Some(AbiParam::special(
pointer_ty(tcx),
ArgumentPurpose::StructReturn,
)),
vec![],
)
}
PassMode::Indirect {
attrs: _,
extra_attrs: Some(_),
on_stack: _,
} => unreachable!("unsized return value"),
}
}
}
pub(super) fn to_casted_value<'tcx>(
fx: &mut FunctionCx<'_, 'tcx, impl Module>,
arg: CValue<'tcx>,
cast: CastTarget,
) -> SmallVec<[Value; 2]> {
let (ptr, meta) = arg.force_stack(fx);
assert!(meta.is_none());
let mut offset = 0;
cast_target_to_abi_params(cast)
.into_iter()
.map(|param| {
let val = ptr
.offset_i64(fx, offset)
.load(fx, param.value_type, MemFlags::new());
offset += i64::from(param.value_type.bytes());
val
})
.collect()
}
pub(super) fn from_casted_value<'tcx>(
fx: &mut FunctionCx<'_, 'tcx, impl Module>,
block_params: &[Value],
layout: TyAndLayout<'tcx>,
cast: CastTarget,
) -> CValue<'tcx> {
let abi_params = cast_target_to_abi_params(cast);
let abi_param_size: u32 = abi_params
.iter()
.map(|param| param.value_type.bytes())
.sum();
let layout_size = u32::try_from(layout.size.bytes()).unwrap();
let stack_slot = fx.bcx.create_stack_slot(StackSlotData {
kind: StackSlotKind::ExplicitSlot,
// FIXME Don't force the size to a multiple of 16 bytes once Cranelift gets a way to
// specify stack slot alignment.
// Stack slot size may be bigger for for example `[u8; 3]` which is packed into an `i32`.
// It may also be smaller for example when the type is a wrapper around an integer with a
// larger alignment than the integer.
size: (std::cmp::max(abi_param_size, layout_size) + 15) / 16 * 16,
offset: None,
});
let ptr = Pointer::new(fx.bcx.ins().stack_addr(pointer_ty(fx.tcx), stack_slot, 0));
let mut offset = 0;
let mut block_params_iter = block_params.into_iter().copied();
for param in abi_params {
let val = ptr.offset_i64(fx, offset).store(
fx,
block_params_iter.next().unwrap(),
MemFlags::new(),
);
offset += i64::from(param.value_type.bytes());
val
}
assert_eq!(block_params_iter.next(), None, "Leftover block param");
CValue::by_ref(ptr, layout)
} }
/// Get a set of values to be passed as function arguments. /// Get a set of values to be passed as function arguments.
pub(super) fn adjust_arg_for_abi<'tcx>( pub(super) fn adjust_arg_for_abi<'tcx>(
fx: &mut FunctionCx<'_, 'tcx, impl Module>, fx: &mut FunctionCx<'_, 'tcx, impl Module>,
arg: CValue<'tcx>, arg: CValue<'tcx>,
) -> EmptySinglePair<Value> { arg_abi: &ArgAbi<'tcx, Ty<'tcx>>,
match get_pass_mode(fx.tcx, arg.layout()) { ) -> SmallVec<[Value; 2]> {
PassMode::NoPass => Empty, assert_assignable(fx, arg.layout().ty, arg_abi.layout.ty);
PassMode::ByVal(_) => Single(arg.load_scalar(fx)), match arg_abi.mode {
PassMode::ByValPair(_, _) => { PassMode::Ignore => smallvec![],
PassMode::Direct(_) => smallvec![arg.load_scalar(fx)],
PassMode::Pair(_, _) => {
let (a, b) = arg.load_scalar_pair(fx); let (a, b) = arg.load_scalar_pair(fx);
Pair(a, b) smallvec![a, b]
} }
PassMode::ByRef { size: _ } => match arg.force_stack(fx) { PassMode::Cast(cast) => to_casted_value(fx, arg, cast),
(ptr, None) => Single(ptr.get_addr(fx)), PassMode::Indirect { .. } => match arg.force_stack(fx) {
(ptr, Some(meta)) => Pair(ptr.get_addr(fx), meta), (ptr, None) => smallvec![ptr.get_addr(fx)],
(ptr, Some(meta)) => smallvec![ptr.get_addr(fx), meta],
}, },
} }
} }
@ -143,20 +284,23 @@ pub(super) fn adjust_arg_for_abi<'tcx>(
/// as necessary. /// as necessary.
pub(super) fn cvalue_for_param<'tcx>( pub(super) fn cvalue_for_param<'tcx>(
fx: &mut FunctionCx<'_, 'tcx, impl Module>, fx: &mut FunctionCx<'_, 'tcx, impl Module>,
start_block: Block,
#[cfg_attr(not(debug_assertions), allow(unused_variables))] local: Option<mir::Local>, #[cfg_attr(not(debug_assertions), allow(unused_variables))] local: Option<mir::Local>,
#[cfg_attr(not(debug_assertions), allow(unused_variables))] local_field: Option<usize>, #[cfg_attr(not(debug_assertions), allow(unused_variables))] local_field: Option<usize>,
arg_ty: Ty<'tcx>, arg_abi: &ArgAbi<'tcx, Ty<'tcx>>,
block_params_iter: &mut impl Iterator<Item = Value>,
) -> Option<CValue<'tcx>> { ) -> Option<CValue<'tcx>> {
let layout = fx.layout_of(arg_ty); let block_params = arg_abi
let pass_mode = get_pass_mode(fx.tcx, layout); .get_abi_param(fx.tcx)
.into_iter()
if let PassMode::NoPass = pass_mode { .map(|abi_param| {
return None; let block_param = block_params_iter.next().unwrap();
} assert_eq!(
fx.bcx.func.dfg.value_type(block_param),
let clif_types = pass_mode.get_param_ty(fx.tcx); abi_param.value_type
let block_params = clif_types.map(|t| fx.bcx.append_block_param(start_block, t)); );
block_param
})
.collect::<SmallVec<[_; 2]>>();
#[cfg(debug_assertions)] #[cfg(debug_assertions)]
crate::abi::comments::add_arg_comment( crate::abi::comments::add_arg_comment(
@ -164,25 +308,48 @@ pub(super) fn cvalue_for_param<'tcx>(
"arg", "arg",
local, local,
local_field, local_field,
block_params, &block_params,
pass_mode, arg_abi.mode,
arg_ty, arg_abi.layout,
); );
match pass_mode { match arg_abi.mode {
PassMode::NoPass => unreachable!(), PassMode::Ignore => None,
PassMode::ByVal(_) => Some(CValue::by_val(block_params.assert_single(), layout)), PassMode::Direct(_) => {
PassMode::ByValPair(_, _) => { assert_eq!(block_params.len(), 1, "{:?}", block_params);
let (a, b) = block_params.assert_pair(); Some(CValue::by_val(block_params[0], arg_abi.layout))
Some(CValue::by_val_pair(a, b, layout))
} }
PassMode::ByRef { size: Some(_) } => Some(CValue::by_ref( PassMode::Pair(_, _) => {
Pointer::new(block_params.assert_single()), assert_eq!(block_params.len(), 2, "{:?}", block_params);
layout, Some(CValue::by_val_pair(
)), block_params[0],
PassMode::ByRef { size: None } => { block_params[1],
let (ptr, meta) = block_params.assert_pair(); arg_abi.layout,
Some(CValue::by_ref_unsized(Pointer::new(ptr), meta, layout)) ))
}
PassMode::Cast(cast) => Some(from_casted_value(fx, &block_params, arg_abi.layout, cast)),
PassMode::Indirect {
attrs: _,
extra_attrs: None,
on_stack: _,
} => {
assert_eq!(block_params.len(), 1, "{:?}", block_params);
Some(CValue::by_ref(
Pointer::new(block_params[0]),
arg_abi.layout,
))
}
PassMode::Indirect {
attrs: _,
extra_attrs: Some(_),
on_stack: _,
} => {
assert_eq!(block_params.len(), 2, "{:?}", block_params);
Some(CValue::by_ref_unsized(
Pointer::new(block_params[0]),
block_params[1],
arg_abi.layout,
))
} }
} }
} }

View File

@ -1,21 +1,57 @@
//! Return value handling //! Return value handling
use crate::abi::pass_mode::*;
use crate::prelude::*; use crate::prelude::*;
fn return_layout<'a, 'tcx>(fx: &mut FunctionCx<'a, 'tcx, impl Module>) -> TyAndLayout<'tcx> { use rustc_middle::ty::layout::FnAbiExt;
fx.layout_of(fx.monomorphize(&fx.mir.local_decls[RETURN_PLACE].ty)) use rustc_target::abi::call::{ArgAbi, FnAbi, PassMode};
} use smallvec::{smallvec, SmallVec};
/// Can the given type be returned into an ssa var or does it need to be returned on the stack. /// Can the given type be returned into an ssa var or does it need to be returned on the stack.
pub(crate) fn can_return_to_ssa_var<'tcx>( pub(crate) fn can_return_to_ssa_var<'tcx>(
tcx: TyCtxt<'tcx>, fx: &FunctionCx<'_, 'tcx, impl Module>,
dest_layout: TyAndLayout<'tcx>, func: &mir::Operand<'tcx>,
args: &[mir::Operand<'tcx>],
) -> bool { ) -> bool {
match get_pass_mode(tcx, dest_layout) { let fn_ty = fx.monomorphize(func.ty(fx.mir, fx.tcx));
PassMode::NoPass | PassMode::ByVal(_) | PassMode::ByValPair(_, _) => true, let fn_sig = fx
// FIXME Make it possible to return ByRef to an ssa var. .tcx
PassMode::ByRef { size: _ } => false, .normalize_erasing_late_bound_regions(ParamEnv::reveal_all(), fn_ty.fn_sig(fx.tcx));
// Handle special calls like instrinsics and empty drop glue.
let instance = if let ty::FnDef(def_id, substs) = *fn_ty.kind() {
let instance = ty::Instance::resolve(fx.tcx, ty::ParamEnv::reveal_all(), def_id, substs)
.unwrap()
.unwrap()
.polymorphize(fx.tcx);
match instance.def {
InstanceDef::Intrinsic(_) | InstanceDef::DropGlue(_, _) => {
return true;
}
_ => Some(instance),
}
} else {
None
};
let extra_args = &args[fn_sig.inputs().len()..];
let extra_args = extra_args
.iter()
.map(|op_arg| fx.monomorphize(op_arg.ty(fx.mir, fx.tcx)))
.collect::<Vec<_>>();
let fn_abi = if let Some(instance) = instance {
FnAbi::of_instance(&RevealAllLayoutCx(fx.tcx), instance, &extra_args)
} else {
FnAbi::of_fn_ptr(
&RevealAllLayoutCx(fx.tcx),
fn_ty.fn_sig(fx.tcx),
&extra_args,
)
};
match fn_abi.ret.mode {
PassMode::Ignore | PassMode::Direct(_) | PassMode::Pair(_, _) => true,
// FIXME Make it possible to return Cast and Indirect to an ssa var.
PassMode::Cast(_) | PassMode::Indirect { .. } => false,
} }
} }
@ -24,27 +60,45 @@ pub(crate) fn can_return_to_ssa_var<'tcx>(
pub(super) fn codegen_return_param<'tcx>( pub(super) fn codegen_return_param<'tcx>(
fx: &mut FunctionCx<'_, 'tcx, impl Module>, fx: &mut FunctionCx<'_, 'tcx, impl Module>,
ssa_analyzed: &rustc_index::vec::IndexVec<Local, crate::analyze::SsaKind>, ssa_analyzed: &rustc_index::vec::IndexVec<Local, crate::analyze::SsaKind>,
start_block: Block, block_params_iter: &mut impl Iterator<Item = Value>,
) -> CPlace<'tcx> { ) -> CPlace<'tcx> {
let ret_layout = return_layout(fx); let (ret_place, ret_param): (_, SmallVec<[_; 2]>) = match fx.fn_abi.as_ref().unwrap().ret.mode {
let ret_pass_mode = get_pass_mode(fx.tcx, ret_layout); PassMode::Ignore => (
let (ret_place, ret_param) = match ret_pass_mode { CPlace::no_place(fx.fn_abi.as_ref().unwrap().ret.layout),
PassMode::NoPass => (CPlace::no_place(ret_layout), Empty), smallvec![],
PassMode::ByVal(_) | PassMode::ByValPair(_, _) => { ),
PassMode::Direct(_) | PassMode::Pair(_, _) | PassMode::Cast(_) => {
let is_ssa = ssa_analyzed[RETURN_PLACE] == crate::analyze::SsaKind::Ssa; let is_ssa = ssa_analyzed[RETURN_PLACE] == crate::analyze::SsaKind::Ssa;
( (
super::make_local_place(fx, RETURN_PLACE, ret_layout, is_ssa), super::make_local_place(
Empty, fx,
RETURN_PLACE,
fx.fn_abi.as_ref().unwrap().ret.layout,
is_ssa,
),
smallvec![],
) )
} }
PassMode::ByRef { size: Some(_) } => { PassMode::Indirect {
let ret_param = fx.bcx.append_block_param(start_block, fx.pointer_type); attrs: _,
extra_attrs: None,
on_stack: _,
} => {
let ret_param = block_params_iter.next().unwrap();
assert_eq!(fx.bcx.func.dfg.value_type(ret_param), pointer_ty(fx.tcx));
( (
CPlace::for_ptr(Pointer::new(ret_param), ret_layout), CPlace::for_ptr(
Single(ret_param), Pointer::new(ret_param),
fx.fn_abi.as_ref().unwrap().ret.layout,
),
smallvec![ret_param],
) )
} }
PassMode::ByRef { size: None } => todo!(), PassMode::Indirect {
attrs: _,
extra_attrs: Some(_),
on_stack: _,
} => unreachable!("unsized return value"),
}; };
#[cfg(not(debug_assertions))] #[cfg(not(debug_assertions))]
@ -56,9 +110,9 @@ pub(super) fn codegen_return_param<'tcx>(
"ret", "ret",
Some(RETURN_PLACE), Some(RETURN_PLACE),
None, None,
ret_param, &ret_param,
ret_pass_mode, fx.fn_abi.as_ref().unwrap().ret.mode,
ret_layout.ty, fx.fn_abi.as_ref().unwrap().ret.layout,
); );
ret_place ret_place
@ -68,42 +122,71 @@ pub(super) fn codegen_return_param<'tcx>(
/// returns the call return value(s) if any are written to the correct place. /// returns the call return value(s) if any are written to the correct place.
pub(super) fn codegen_with_call_return_arg<'tcx, M: Module, T>( pub(super) fn codegen_with_call_return_arg<'tcx, M: Module, T>(
fx: &mut FunctionCx<'_, 'tcx, M>, fx: &mut FunctionCx<'_, 'tcx, M>,
fn_sig: FnSig<'tcx>, ret_arg_abi: &ArgAbi<'tcx, Ty<'tcx>>,
ret_place: Option<CPlace<'tcx>>, ret_place: Option<CPlace<'tcx>>,
f: impl FnOnce(&mut FunctionCx<'_, 'tcx, M>, Option<Value>) -> (Inst, T), f: impl FnOnce(&mut FunctionCx<'_, 'tcx, M>, Option<Value>) -> (Inst, T),
) -> (Inst, T) { ) -> (Inst, T) {
let ret_layout = fx.layout_of(fn_sig.output()); let return_ptr = match ret_arg_abi.mode {
PassMode::Ignore => None,
let output_pass_mode = get_pass_mode(fx.tcx, ret_layout); PassMode::Indirect {
let return_ptr = match output_pass_mode { attrs: _,
PassMode::NoPass => None, extra_attrs: None,
PassMode::ByRef { size: Some(_) } => match ret_place { on_stack: _,
} => match ret_place {
Some(ret_place) => Some(ret_place.to_ptr().get_addr(fx)), Some(ret_place) => Some(ret_place.to_ptr().get_addr(fx)),
None => Some(fx.bcx.ins().iconst(fx.pointer_type, 43)), // FIXME allocate temp stack slot None => Some(fx.bcx.ins().iconst(fx.pointer_type, 43)), // FIXME allocate temp stack slot
}, },
PassMode::ByRef { size: None } => todo!(), PassMode::Indirect {
PassMode::ByVal(_) | PassMode::ByValPair(_, _) => None, attrs: _,
extra_attrs: Some(_),
on_stack: _,
} => unreachable!("unsized return value"),
PassMode::Direct(_) | PassMode::Pair(_, _) | PassMode::Cast(_) => None,
}; };
let (call_inst, meta) = f(fx, return_ptr); let (call_inst, meta) = f(fx, return_ptr);
match output_pass_mode { match ret_arg_abi.mode {
PassMode::NoPass => {} PassMode::Ignore => {}
PassMode::ByVal(_) => { PassMode::Direct(_) => {
if let Some(ret_place) = ret_place { if let Some(ret_place) = ret_place {
let ret_val = fx.bcx.inst_results(call_inst)[0]; let ret_val = fx.bcx.inst_results(call_inst)[0];
ret_place.write_cvalue(fx, CValue::by_val(ret_val, ret_layout)); ret_place.write_cvalue(fx, CValue::by_val(ret_val, ret_arg_abi.layout));
} }
} }
PassMode::ByValPair(_, _) => { PassMode::Pair(_, _) => {
if let Some(ret_place) = ret_place { if let Some(ret_place) = ret_place {
let ret_val_a = fx.bcx.inst_results(call_inst)[0]; let ret_val_a = fx.bcx.inst_results(call_inst)[0];
let ret_val_b = fx.bcx.inst_results(call_inst)[1]; let ret_val_b = fx.bcx.inst_results(call_inst)[1];
ret_place.write_cvalue(fx, CValue::by_val_pair(ret_val_a, ret_val_b, ret_layout)); ret_place.write_cvalue(
fx,
CValue::by_val_pair(ret_val_a, ret_val_b, ret_arg_abi.layout),
);
} }
} }
PassMode::ByRef { size: Some(_) } => {} PassMode::Cast(cast) => {
PassMode::ByRef { size: None } => todo!(), if let Some(ret_place) = ret_place {
let results = fx
.bcx
.inst_results(call_inst)
.into_iter()
.copied()
.collect::<SmallVec<[Value; 2]>>();
let result =
super::pass_mode::from_casted_value(fx, &results, ret_place.layout(), cast);
ret_place.write_cvalue(fx, result);
}
}
PassMode::Indirect {
attrs: _,
extra_attrs: None,
on_stack: _,
} => {}
PassMode::Indirect {
attrs: _,
extra_attrs: Some(_),
on_stack: _,
} => unreachable!("unsized return value"),
} }
(call_inst, meta) (call_inst, meta)
@ -111,20 +194,35 @@ pub(super) fn codegen_with_call_return_arg<'tcx, M: Module, T>(
/// Codegen a return instruction with the right return value(s) if any. /// Codegen a return instruction with the right return value(s) if any.
pub(crate) fn codegen_return(fx: &mut FunctionCx<'_, '_, impl Module>) { pub(crate) fn codegen_return(fx: &mut FunctionCx<'_, '_, impl Module>) {
match get_pass_mode(fx.tcx, return_layout(fx)) { match fx.fn_abi.as_ref().unwrap().ret.mode {
PassMode::NoPass | PassMode::ByRef { size: Some(_) } => { PassMode::Ignore
| PassMode::Indirect {
attrs: _,
extra_attrs: None,
on_stack: _,
} => {
fx.bcx.ins().return_(&[]); fx.bcx.ins().return_(&[]);
} }
PassMode::ByRef { size: None } => todo!(), PassMode::Indirect {
PassMode::ByVal(_) => { attrs: _,
extra_attrs: Some(_),
on_stack: _,
} => unreachable!("unsized return value"),
PassMode::Direct(_) => {
let place = fx.get_local_place(RETURN_PLACE); let place = fx.get_local_place(RETURN_PLACE);
let ret_val = place.to_cvalue(fx).load_scalar(fx); let ret_val = place.to_cvalue(fx).load_scalar(fx);
fx.bcx.ins().return_(&[ret_val]); fx.bcx.ins().return_(&[ret_val]);
} }
PassMode::ByValPair(_, _) => { PassMode::Pair(_, _) => {
let place = fx.get_local_place(RETURN_PLACE); let place = fx.get_local_place(RETURN_PLACE);
let (ret_val_a, ret_val_b) = place.to_cvalue(fx).load_scalar_pair(fx); let (ret_val_a, ret_val_b) = place.to_cvalue(fx).load_scalar_pair(fx);
fx.bcx.ins().return_(&[ret_val_a, ret_val_b]); fx.bcx.ins().return_(&[ret_val_a, ret_val_b]);
} }
PassMode::Cast(cast) => {
let place = fx.get_local_place(RETURN_PLACE);
let ret_val = place.to_cvalue(fx);
let ret_vals = super::pass_mode::to_casted_value(fx, ret_val, cast);
fx.bcx.ins().return_(&ret_vals);
}
} }
} }

View File

@ -40,11 +40,14 @@ pub(crate) fn analyze(fx: &FunctionCx<'_, '_, impl Module>) -> IndexVec<Local, S
} }
match &bb.terminator().kind { match &bb.terminator().kind {
TerminatorKind::Call { destination, .. } => { TerminatorKind::Call {
destination,
func,
args,
..
} => {
if let Some((dest_place, _dest_bb)) = destination { if let Some((dest_place, _dest_bb)) = destination {
let dest_layout = fx if !crate::abi::can_return_to_ssa_var(fx, func, args) {
.layout_of(fx.monomorphize(&dest_place.ty(&fx.mir.local_decls, fx.tcx).ty));
if !crate::abi::can_return_to_ssa_var(fx.tcx, dest_layout) {
not_ssa(&mut flag_map, dest_place.local) not_ssa(&mut flag_map, dest_place.local)
} }
} }

View File

@ -2,6 +2,8 @@
use rustc_index::vec::IndexVec; use rustc_index::vec::IndexVec;
use rustc_middle::ty::adjustment::PointerCast; use rustc_middle::ty::adjustment::PointerCast;
use rustc_middle::ty::layout::FnAbiExt;
use rustc_target::abi::call::FnAbi;
use crate::prelude::*; use crate::prelude::*;
@ -19,7 +21,8 @@ pub(crate) fn codegen_fn<'tcx>(
let mir = tcx.instance_mir(instance.def); let mir = tcx.instance_mir(instance.def);
// Declare function // Declare function
let (name, sig) = get_function_name_and_sig(tcx, cx.module.isa().triple(), instance, false); let name = tcx.symbol_name(instance).name.to_string();
let sig = get_function_sig(tcx, cx.module.isa().triple(), instance);
let func_id = cx.module.declare_function(&name, linkage, &sig).unwrap(); let func_id = cx.module.declare_function(&name, linkage, &sig).unwrap();
cx.cached_context.clear(); cx.cached_context.clear();
@ -50,6 +53,7 @@ pub(crate) fn codegen_fn<'tcx>(
instance, instance,
mir, mir,
fn_abi: Some(FnAbi::of_instance(&RevealAllLayoutCx(tcx), instance, &[])),
bcx, bcx,
block_map, block_map,
@ -117,6 +121,9 @@ pub(crate) fn codegen_fn<'tcx>(
context.compute_domtree(); context.compute_domtree();
context.eliminate_unreachable_code(cx.module.isa()).unwrap(); context.eliminate_unreachable_code(cx.module.isa()).unwrap();
context.dce(cx.module.isa()).unwrap(); context.dce(cx.module.isa()).unwrap();
// Some Cranelift optimizations expect the domtree to not yet be computed and as such don't
// invalidate it when it would change.
context.domtree.clear();
context.want_disasm = crate::pretty_clif::should_write_ir(tcx); context.want_disasm = crate::pretty_clif::should_write_ir(tcx);
@ -1053,7 +1060,11 @@ pub(crate) fn codegen_panic_inner<'tcx>(
fx.lib_call( fx.lib_call(
&*symbol_name, &*symbol_name,
vec![fx.pointer_type, fx.pointer_type, fx.pointer_type], vec![
AbiParam::new(fx.pointer_type),
AbiParam::new(fx.pointer_type),
AbiParam::new(fx.pointer_type),
],
vec![], vec![],
args, args,
); );

View File

@ -6,7 +6,7 @@ extern crate rustc_interface;
extern crate rustc_session; extern crate rustc_session;
extern crate rustc_target; extern crate rustc_target;
use rustc_data_structures::profiling::print_time_passes_entry; use rustc_data_structures::profiling::{get_resident_set_size, print_time_passes_entry};
use rustc_interface::interface; use rustc_interface::interface;
use rustc_session::config::ErrorOutputType; use rustc_session::config::ErrorOutputType;
use rustc_session::early_error; use rustc_session::early_error;
@ -39,7 +39,8 @@ impl rustc_driver::Callbacks for CraneliftPassesCallbacks {
} }
fn main() { fn main() {
let start = std::time::Instant::now(); let start_time = std::time::Instant::now();
let start_rss = get_resident_set_size();
rustc_driver::init_rustc_env_logger(); rustc_driver::init_rustc_env_logger();
let mut callbacks = CraneliftPassesCallbacks::default(); let mut callbacks = CraneliftPassesCallbacks::default();
rustc_driver::install_ice_hook(); rustc_driver::install_ice_hook();
@ -61,7 +62,11 @@ fn main() {
}))); })));
run_compiler.run() run_compiler.run()
}); });
// The extra `\t` is necessary to align this label with the others.
print_time_passes_entry(callbacks.time_passes, "\ttotal", start.elapsed()); if callbacks.time_passes {
let end_rss = get_resident_set_size();
print_time_passes_entry("total", start_time.elapsed(), start_rss, end_rss);
}
std::process::exit(exit_code) std::process::exit(exit_code)
} }

View File

@ -53,10 +53,7 @@ impl rustc_driver::Callbacks for CraneliftPassesCallbacks {
.unwrap() .unwrap()
.parent() .parent()
.unwrap() .unwrap()
.parent() .to_owned(),
.unwrap()
.join("build_sysroot")
.join("sysroot"),
); );
} }
} }

View File

@ -1,5 +1,7 @@
//! Replaces 128-bit operators with lang item calls where necessary //! Replaces 128-bit operators with lang item calls where necessary
use cranelift_codegen::ir::ArgumentPurpose;
use crate::prelude::*; use crate::prelude::*;
pub(crate) fn maybe_codegen<'tcx>( pub(crate) fn maybe_codegen<'tcx>(
@ -24,41 +26,41 @@ pub(crate) fn maybe_codegen<'tcx>(
None None
} }
BinOp::Add | BinOp::Sub if !checked => None, BinOp::Add | BinOp::Sub if !checked => None,
BinOp::Add => { BinOp::Mul if !checked => {
let out_ty = fx.tcx.mk_tup([lhs.layout().ty, fx.tcx.types.bool].iter()); let val_ty = if is_signed {
return Some(if is_signed { fx.tcx.types.i128
fx.easy_call("__rust_i128_addo", &[lhs, rhs], out_ty)
} else { } else {
fx.easy_call("__rust_u128_addo", &[lhs, rhs], out_ty) fx.tcx.types.u128
}); };
Some(fx.easy_call("__multi3", &[lhs, rhs], val_ty))
} }
BinOp::Sub => { BinOp::Add | BinOp::Sub | BinOp::Mul => {
assert!(checked);
let out_ty = fx.tcx.mk_tup([lhs.layout().ty, fx.tcx.types.bool].iter()); let out_ty = fx.tcx.mk_tup([lhs.layout().ty, fx.tcx.types.bool].iter());
return Some(if is_signed { let out_place = CPlace::new_stack_slot(fx, fx.layout_of(out_ty));
fx.easy_call("__rust_i128_subo", &[lhs, rhs], out_ty) let param_types = vec![
} else { AbiParam::special(pointer_ty(fx.tcx), ArgumentPurpose::StructReturn),
fx.easy_call("__rust_u128_subo", &[lhs, rhs], out_ty) AbiParam::new(types::I128),
}); AbiParam::new(types::I128),
];
let args = [
out_place.to_ptr().get_addr(fx),
lhs.load_scalar(fx),
rhs.load_scalar(fx),
];
let name = match (bin_op, is_signed) {
(BinOp::Add, false) => "__rust_u128_addo",
(BinOp::Add, true) => "__rust_i128_addo",
(BinOp::Sub, false) => "__rust_u128_subo",
(BinOp::Sub, true) => "__rust_i128_subo",
(BinOp::Mul, false) => "__rust_u128_mulo",
(BinOp::Mul, true) => "__rust_i128_mulo",
_ => unreachable!(),
};
fx.lib_call(name, param_types, vec![], &args);
Some(out_place.to_cvalue(fx))
} }
BinOp::Offset => unreachable!("offset should only be used on pointers, not 128bit ints"), BinOp::Offset => unreachable!("offset should only be used on pointers, not 128bit ints"),
BinOp::Mul => {
let res = if checked {
let out_ty = fx.tcx.mk_tup([lhs.layout().ty, fx.tcx.types.bool].iter());
if is_signed {
fx.easy_call("__rust_i128_mulo", &[lhs, rhs], out_ty)
} else {
fx.easy_call("__rust_u128_mulo", &[lhs, rhs], out_ty)
}
} else {
let val_ty = if is_signed {
fx.tcx.types.i128
} else {
fx.tcx.types.u128
};
fx.easy_call("__multi3", &[lhs, rhs], val_ty)
};
Some(res)
}
BinOp::Div => { BinOp::Div => {
assert!(!checked); assert!(!checked);
if is_signed { if is_signed {

View File

@ -1,4 +1,5 @@
use rustc_index::vec::IndexVec; use rustc_index::vec::IndexVec;
use rustc_target::abi::call::FnAbi;
use rustc_target::abi::{Integer, Primitive}; use rustc_target::abi::{Integer, Primitive};
use rustc_target::spec::{HasTargetSpec, Target}; use rustc_target::spec::{HasTargetSpec, Target};
@ -294,6 +295,7 @@ pub(crate) struct FunctionCx<'clif, 'tcx, M: Module> {
pub(crate) instance: Instance<'tcx>, pub(crate) instance: Instance<'tcx>,
pub(crate) mir: &'tcx Body<'tcx>, pub(crate) mir: &'tcx Body<'tcx>,
pub(crate) fn_abi: Option<FnAbi<'tcx, Ty<'tcx>>>,
pub(crate) bcx: FunctionBuilder<'clif>, pub(crate) bcx: FunctionBuilder<'clif>,
pub(crate) block_map: IndexVec<BasicBlock, Block>, pub(crate) block_map: IndexVec<BasicBlock, Block>,
@ -319,16 +321,7 @@ impl<'tcx, M: Module> LayoutOf for FunctionCx<'_, 'tcx, M> {
type TyAndLayout = TyAndLayout<'tcx>; type TyAndLayout = TyAndLayout<'tcx>;
fn layout_of(&self, ty: Ty<'tcx>) -> TyAndLayout<'tcx> { fn layout_of(&self, ty: Ty<'tcx>) -> TyAndLayout<'tcx> {
assert!(!ty.still_further_specializable()); RevealAllLayoutCx(self.tcx).layout_of(ty)
self.tcx
.layout_of(ParamEnv::reveal_all().and(&ty))
.unwrap_or_else(|e| {
if let layout::LayoutError::SizeOverflow(_) = e {
self.tcx.sess.fatal(&e.to_string())
} else {
bug!("failed to get layout for `{}`: {}", ty, e)
}
})
} }
} }
@ -442,3 +435,47 @@ impl<'tcx, M: Module> FunctionCx<'_, 'tcx, M> {
self.bcx.ins().global_value(self.pointer_type, local_msg_id) self.bcx.ins().global_value(self.pointer_type, local_msg_id)
} }
} }
pub(crate) struct RevealAllLayoutCx<'tcx>(pub(crate) TyCtxt<'tcx>);
impl<'tcx> LayoutOf for RevealAllLayoutCx<'tcx> {
type Ty = Ty<'tcx>;
type TyAndLayout = TyAndLayout<'tcx>;
fn layout_of(&self, ty: Ty<'tcx>) -> TyAndLayout<'tcx> {
assert!(!ty.still_further_specializable());
self.0
.layout_of(ParamEnv::reveal_all().and(&ty))
.unwrap_or_else(|e| {
if let layout::LayoutError::SizeOverflow(_) = e {
self.0.sess.fatal(&e.to_string())
} else {
bug!("failed to get layout for `{}`: {}", ty, e)
}
})
}
}
impl<'tcx> layout::HasTyCtxt<'tcx> for RevealAllLayoutCx<'tcx> {
fn tcx<'b>(&'b self) -> TyCtxt<'tcx> {
self.0
}
}
impl<'tcx> rustc_target::abi::HasDataLayout for RevealAllLayoutCx<'tcx> {
fn data_layout(&self) -> &rustc_target::abi::TargetDataLayout {
&self.0.data_layout
}
}
impl<'tcx> layout::HasParamEnv<'tcx> for RevealAllLayoutCx<'tcx> {
fn param_env(&self) -> ParamEnv<'tcx> {
ParamEnv::reveal_all()
}
}
impl<'tcx> HasTargetSpec for RevealAllLayoutCx<'tcx> {
fn target_spec(&self) -> &Target {
&self.0.sess.target
}
}

View File

@ -281,9 +281,6 @@ pub(super) fn run_aot(
None None
}; };
rustc_incremental::assert_dep_graph(tcx);
rustc_incremental::save_dep_graph(tcx);
let metadata_module = if need_metadata_module { let metadata_module = if need_metadata_module {
let _timer = tcx.prof.generic_activity("codegen crate metadata"); let _timer = tcx.prof.generic_activity("codegen crate metadata");
let (metadata_cgu_name, tmp_file) = tcx.sess.time("write compressed metadata", || { let (metadata_cgu_name, tmp_file) = tcx.sess.time("write compressed metadata", || {
@ -322,10 +319,6 @@ pub(super) fn run_aot(
None None
}; };
if tcx.sess.opts.output_types.should_codegen() {
rustc_incremental::assert_module_sources::assert_module_sources(tcx);
}
Box::new(( Box::new((
CodegenResults { CodegenResults {
crate_name: tcx.crate_name(LOCAL_CRATE), crate_name: tcx.crate_name(LOCAL_CRATE),

View File

@ -156,12 +156,8 @@ extern "C" fn __clif_jit_fn(instance_ptr: *const Instance<'static>) -> *const u8
let jit_module = jit_module.as_mut().unwrap(); let jit_module = jit_module.as_mut().unwrap();
let mut cx = crate::CodegenCx::new(tcx, jit_module, false, false); let mut cx = crate::CodegenCx::new(tcx, jit_module, false, false);
let (name, sig) = crate::abi::get_function_name_and_sig( let name = tcx.symbol_name(instance).name.to_string();
tcx, let sig = crate::abi::get_function_sig(tcx, cx.module.isa().triple(), instance);
cx.module.isa().triple(),
instance,
true,
);
let func_id = cx let func_id = cx
.module .module
.declare_function(&name, Linkage::Export, &sig) .declare_function(&name, Linkage::Export, &sig)
@ -246,8 +242,8 @@ pub(super) fn codegen_shim<'tcx>(cx: &mut CodegenCx<'tcx, impl Module>, inst: In
let pointer_type = cx.module.target_config().pointer_type(); let pointer_type = cx.module.target_config().pointer_type();
let (name, sig) = let name = tcx.symbol_name(inst).name.to_string();
crate::abi::get_function_name_and_sig(tcx, cx.module.isa().triple(), inst, true); let sig = crate::abi::get_function_sig(tcx, cx.module.isa().triple(), inst);
let func_id = cx let func_id = cx
.module .module
.declare_function(&name, Linkage::Export, &sig) .declare_function(&name, Linkage::Export, &sig)

View File

@ -50,12 +50,9 @@ fn predefine_mono_items<'tcx>(
for &(mono_item, (linkage, visibility)) in mono_items { for &(mono_item, (linkage, visibility)) in mono_items {
match mono_item { match mono_item {
MonoItem::Fn(instance) => { MonoItem::Fn(instance) => {
let (name, sig) = get_function_name_and_sig( let name = cx.tcx.symbol_name(instance).name.to_string();
cx.tcx, let _inst_guard = crate::PrintOnPanic(|| format!("{:?} {}", instance, name));
cx.module.isa().triple(), let sig = get_function_sig(cx.tcx, cx.module.isa().triple(), instance);
instance,
false,
);
let linkage = crate::linkage::get_clif_linkage(mono_item, linkage, visibility); let linkage = crate::linkage::get_clif_linkage(mono_item, linkage, visibility);
cx.module.declare_function(&name, linkage, &sig).unwrap(); cx.module.declare_function(&name, linkage, &sig).unwrap();
} }

View File

@ -27,7 +27,6 @@ extern crate rustc_incremental;
extern crate rustc_index; extern crate rustc_index;
extern crate rustc_session; extern crate rustc_session;
extern crate rustc_span; extern crate rustc_span;
extern crate rustc_symbol_mangling;
extern crate rustc_target; extern crate rustc_target;
// This prevents duplicating functions and statics that are already part of the host rustc process. // This prevents duplicating functions and statics that are already part of the host rustc process.
@ -90,7 +89,8 @@ mod prelude {
pub(crate) use rustc_middle::mir::{self, *}; pub(crate) use rustc_middle::mir::{self, *};
pub(crate) use rustc_middle::ty::layout::{self, TyAndLayout}; pub(crate) use rustc_middle::ty::layout::{self, TyAndLayout};
pub(crate) use rustc_middle::ty::{ pub(crate) use rustc_middle::ty::{
self, FloatTy, FnSig, Instance, InstanceDef, IntTy, ParamEnv, Ty, TyCtxt, TypeAndMut, TypeFoldable, UintTy, self, FloatTy, Instance, InstanceDef, IntTy, ParamEnv, Ty, TyCtxt, TypeAndMut,
TypeFoldable, UintTy,
}; };
pub(crate) use rustc_target::abi::{Abi, LayoutOf, Scalar, Size, VariantIdx}; pub(crate) use rustc_target::abi::{Abi, LayoutOf, Scalar, Size, VariantIdx};
@ -256,8 +256,6 @@ impl CodegenBackend for CraneliftCodegenBackend {
}; };
let res = driver::codegen_crate(tcx, metadata, need_metadata_module, config); let res = driver::codegen_crate(tcx, metadata, need_metadata_module, config);
rustc_symbol_mangling::test::report_symbol_names(tcx);
res res
} }
@ -279,18 +277,14 @@ impl CodegenBackend for CraneliftCodegenBackend {
) -> Result<(), ErrorReported> { ) -> Result<(), ErrorReported> {
use rustc_codegen_ssa::back::link::link_binary; use rustc_codegen_ssa::back::link::link_binary;
let _timer = sess.prof.generic_activity("link_crate"); let target_cpu = crate::target_triple(sess).to_string();
link_binary::<crate::archive::ArArchiveBuilder<'_>>(
sess.time("linking", || { sess,
let target_cpu = crate::target_triple(sess).to_string(); &codegen_results,
link_binary::<crate::archive::ArArchiveBuilder<'_>>( outputs,
sess, &codegen_results.crate_name.as_str(),
&codegen_results, &target_cpu,
outputs, );
&codegen_results.crate_name.as_str(),
&target_cpu,
);
});
Ok(()) Ok(())
} }
@ -345,7 +339,12 @@ fn build_isa(sess: &Session) -> Box<dyn isa::TargetIsa + 'static> {
let flags = settings::Flags::new(flags_builder); let flags = settings::Flags::new(flags_builder);
let mut isa_builder = cranelift_codegen::isa::lookup(target_triple).unwrap(); let variant = if cfg!(feature = "oldbe") {
cranelift_codegen::isa::BackendVariant::Legacy
} else {
cranelift_codegen::isa::BackendVariant::MachInst
};
let mut isa_builder = cranelift_codegen::isa::lookup_variant(target_triple, variant).unwrap();
// Don't use "haswell", as it implies `has_lzcnt`.macOS CI is still at Ivy Bridge EP, so `lzcnt` // Don't use "haswell", as it implies `has_lzcnt`.macOS CI is still at Ivy Bridge EP, so `lzcnt`
// is interpreted as `bsr`. // is interpreted as `bsr`.
isa_builder.enable("nehalem").unwrap(); isa_builder.enable("nehalem").unwrap();

View File

@ -69,8 +69,8 @@ pub(crate) fn maybe_create_entry_wrapper(
let instance = Instance::mono(tcx, rust_main_def_id).polymorphize(tcx); let instance = Instance::mono(tcx, rust_main_def_id).polymorphize(tcx);
let (main_name, main_sig) = let main_name = tcx.symbol_name(instance).name.to_string();
get_function_name_and_sig(tcx, m.isa().triple(), instance, false); let main_sig = get_function_sig(tcx, m.isa().triple(), instance);
let main_func_id = m let main_func_id = m
.declare_function(&main_name, Linkage::Import, &main_sig) .declare_function(&main_name, Linkage::Import, &main_sig)
.unwrap(); .unwrap();

View File

@ -280,7 +280,6 @@ pub(crate) fn codegen_checked_int_binop<'tcx>(
(val, fx.bcx.ins().bor(has_underflow, has_overflow)) (val, fx.bcx.ins().bor(has_underflow, has_overflow))
} }
types::I64 => { types::I64 => {
//let val = fx.easy_call("__mulodi4", &[lhs, rhs, overflow_ptr], types::I64);
let val = fx.bcx.ins().imul(lhs, rhs); let val = fx.bcx.ins().imul(lhs, rhs);
let has_overflow = if !signed { let has_overflow = if !signed {
let val_hi = fx.bcx.ins().umulhi(lhs, rhs); let val_hi = fx.bcx.ins().umulhi(lhs, rhs);

View File

@ -61,7 +61,9 @@ use cranelift_codegen::{
write::{FuncWriter, PlainWriter}, write::{FuncWriter, PlainWriter},
}; };
use rustc_middle::ty::layout::FnAbiExt;
use rustc_session::config::OutputType; use rustc_session::config::OutputType;
use rustc_target::abi::call::FnAbi;
use crate::prelude::*; use crate::prelude::*;
@ -78,11 +80,8 @@ impl CommentWriter {
format!("symbol {}", tcx.symbol_name(instance).name), format!("symbol {}", tcx.symbol_name(instance).name),
format!("instance {:?}", instance), format!("instance {:?}", instance),
format!( format!(
"sig {:?}", "abi {:?}",
tcx.normalize_erasing_late_bound_regions( FnAbi::of_instance(&RevealAllLayoutCx(tcx), instance, &[])
ParamEnv::reveal_all(),
crate::abi::fn_sig_for_fn_abi(tcx, instance)
)
), ),
String::new(), String::new(),
] ]

View File

@ -334,7 +334,9 @@ impl<'tcx> CPlace<'tcx> {
let stack_slot = fx.bcx.create_stack_slot(StackSlotData { let stack_slot = fx.bcx.create_stack_slot(StackSlotData {
kind: StackSlotKind::ExplicitSlot, kind: StackSlotKind::ExplicitSlot,
size: u32::try_from(layout.size.bytes()).unwrap(), // FIXME Don't force the size to a multiple of 16 bytes once Cranelift gets a way to
// specify stack slot alignment.
size: (u32::try_from(layout.size.bytes()).unwrap() + 15) / 16 * 16,
offset: None, offset: None,
}); });
CPlace { CPlace {
@ -450,64 +452,6 @@ impl<'tcx> CPlace<'tcx> {
fx: &mut FunctionCx<'_, 'tcx, impl Module>, fx: &mut FunctionCx<'_, 'tcx, impl Module>,
from: CValue<'tcx>, from: CValue<'tcx>,
) { ) {
fn assert_assignable<'tcx>(
fx: &FunctionCx<'_, 'tcx, impl Module>,
from_ty: Ty<'tcx>,
to_ty: Ty<'tcx>,
) {
match (from_ty.kind(), to_ty.kind()) {
(ty::Ref(_, a, _), ty::Ref(_, b, _))
| (
ty::RawPtr(TypeAndMut { ty: a, mutbl: _ }),
ty::RawPtr(TypeAndMut { ty: b, mutbl: _ }),
) => {
assert_assignable(fx, a, b);
}
(ty::FnPtr(_), ty::FnPtr(_)) => {
let from_sig = fx.tcx.normalize_erasing_late_bound_regions(
ParamEnv::reveal_all(),
from_ty.fn_sig(fx.tcx),
);
let to_sig = fx.tcx.normalize_erasing_late_bound_regions(
ParamEnv::reveal_all(),
to_ty.fn_sig(fx.tcx),
);
assert_eq!(
from_sig, to_sig,
"Can't write fn ptr with incompatible sig {:?} to place with sig {:?}\n\n{:#?}",
from_sig, to_sig, fx,
);
// fn(&T) -> for<'l> fn(&'l T) is allowed
}
(&ty::Dynamic(from_traits, _), &ty::Dynamic(to_traits, _)) => {
for (from, to) in from_traits.iter().zip(to_traits) {
let from = fx
.tcx
.normalize_erasing_late_bound_regions(ParamEnv::reveal_all(), from);
let to = fx
.tcx
.normalize_erasing_late_bound_regions(ParamEnv::reveal_all(), to);
assert_eq!(
from, to,
"Can't write trait object of incompatible traits {:?} to place with traits {:?}\n\n{:#?}",
from_traits, to_traits, fx,
);
}
// dyn for<'r> Trait<'r> -> dyn Trait<'_> is allowed
}
_ => {
assert_eq!(
from_ty,
to_ty,
"Can't write value with incompatible type {:?} to place with type {:?}\n\n{:#?}",
from_ty,
to_ty,
fx,
);
}
}
}
assert_assignable(fx, from.layout().ty, self.layout().ty); assert_assignable(fx, from.layout().ty, self.layout().ty);
self.write_cvalue_maybe_transmute(fx, from, "write_cvalue"); self.write_cvalue_maybe_transmute(fx, from, "write_cvalue");
@ -556,7 +500,9 @@ impl<'tcx> CPlace<'tcx> {
// FIXME do something more efficient for transmutes between vectors and integers. // FIXME do something more efficient for transmutes between vectors and integers.
let stack_slot = fx.bcx.create_stack_slot(StackSlotData { let stack_slot = fx.bcx.create_stack_slot(StackSlotData {
kind: StackSlotKind::ExplicitSlot, kind: StackSlotKind::ExplicitSlot,
size: src_ty.bytes(), // FIXME Don't force the size to a multiple of 16 bytes once Cranelift gets a way to
// specify stack slot alignment.
size: (src_ty.bytes() + 15) / 16 * 16,
offset: None, offset: None,
}); });
let ptr = Pointer::stack_slot(stack_slot); let ptr = Pointer::stack_slot(stack_slot);
@ -794,3 +740,62 @@ impl<'tcx> CPlace<'tcx> {
} }
} }
} }
#[track_caller]
pub(crate) fn assert_assignable<'tcx>(
fx: &FunctionCx<'_, 'tcx, impl Module>,
from_ty: Ty<'tcx>,
to_ty: Ty<'tcx>,
) {
match (from_ty.kind(), to_ty.kind()) {
(ty::Ref(_, a, _), ty::Ref(_, b, _))
| (
ty::RawPtr(TypeAndMut { ty: a, mutbl: _ }),
ty::RawPtr(TypeAndMut { ty: b, mutbl: _ }),
) => {
assert_assignable(fx, a, b);
}
(ty::Ref(_, a, _), ty::RawPtr(TypeAndMut { ty: b, mutbl: _ }))
| (ty::RawPtr(TypeAndMut { ty: a, mutbl: _ }), ty::Ref(_, b, _)) => {
assert_assignable(fx, a, b);
}
(ty::FnPtr(_), ty::FnPtr(_)) => {
let from_sig = fx.tcx.normalize_erasing_late_bound_regions(
ParamEnv::reveal_all(),
from_ty.fn_sig(fx.tcx),
);
let to_sig = fx
.tcx
.normalize_erasing_late_bound_regions(ParamEnv::reveal_all(), to_ty.fn_sig(fx.tcx));
assert_eq!(
from_sig, to_sig,
"Can't write fn ptr with incompatible sig {:?} to place with sig {:?}\n\n{:#?}",
from_sig, to_sig, fx,
);
// fn(&T) -> for<'l> fn(&'l T) is allowed
}
(&ty::Dynamic(from_traits, _), &ty::Dynamic(to_traits, _)) => {
for (from, to) in from_traits.iter().zip(to_traits) {
let from = fx
.tcx
.normalize_erasing_late_bound_regions(ParamEnv::reveal_all(), from);
let to = fx
.tcx
.normalize_erasing_late_bound_regions(ParamEnv::reveal_all(), to);
assert_eq!(
from, to,
"Can't write trait object of incompatible traits {:?} to place with traits {:?}\n\n{:#?}",
from_traits, to_traits, fx,
);
}
// dyn for<'r> Trait<'r> -> dyn Trait<'_> is allowed
}
_ => {
assert_eq!(
from_ty, to_ty,
"Can't write value with incompatible type {:?} to place with type {:?}\n\n{:#?}",
from_ty, to_ty, fx,
);
}
}
}

View File

@ -1,9 +1,7 @@
#!/bin/bash #!/bin/bash
set -e set -e
export RUSTFLAGS="-Zrun_dsymutil=no" ./build.sh --sysroot none "$@"
./build.sh --without-sysroot "$@"
rm -r target/out || true rm -r target/out || true

View File

@ -12,7 +12,7 @@ use crate::{CachedModuleCodegen, CrateInfo, MemFlags, ModuleCodegen, ModuleKind}
use rustc_attr as attr; use rustc_attr as attr;
use rustc_data_structures::fx::FxHashMap; use rustc_data_structures::fx::FxHashMap;
use rustc_data_structures::profiling::print_time_passes_entry; use rustc_data_structures::profiling::{get_resident_set_size, print_time_passes_entry};
use rustc_data_structures::sync::{par_iter, ParallelIterator}; use rustc_data_structures::sync::{par_iter, ParallelIterator};
use rustc_hir as hir; use rustc_hir as hir;
use rustc_hir::def_id::{LocalDefId, LOCAL_CRATE}; use rustc_hir::def_id::{LocalDefId, LOCAL_CRATE};
@ -595,6 +595,7 @@ pub fn codegen_crate<B: ExtraBackendMethods>(
let mut cgu_reuse = Vec::new(); let mut cgu_reuse = Vec::new();
let mut pre_compiled_cgus: Option<FxHashMap<usize, _>> = None; let mut pre_compiled_cgus: Option<FxHashMap<usize, _>> = None;
let mut total_codegen_time = Duration::new(0, 0); let mut total_codegen_time = Duration::new(0, 0);
let start_rss = tcx.sess.time_passes().then(|| get_resident_set_size());
for (i, cgu) in codegen_units.iter().enumerate() { for (i, cgu) in codegen_units.iter().enumerate() {
ongoing_codegen.wait_for_signal_to_codegen_item(); ongoing_codegen.wait_for_signal_to_codegen_item();
@ -669,7 +670,16 @@ pub fn codegen_crate<B: ExtraBackendMethods>(
// Since the main thread is sometimes blocked during codegen, we keep track // Since the main thread is sometimes blocked during codegen, we keep track
// -Ztime-passes output manually. // -Ztime-passes output manually.
print_time_passes_entry(tcx.sess.time_passes(), "codegen_to_LLVM_IR", total_codegen_time); if tcx.sess.time_passes() {
let end_rss = get_resident_set_size();
print_time_passes_entry(
"codegen_to_LLVM_IR",
total_codegen_time,
start_rss.unwrap(),
end_rss,
);
}
ongoing_codegen.check_for_errors(tcx.sess); ongoing_codegen.check_for_errors(tcx.sess);

View File

@ -555,13 +555,16 @@ impl<'a> TimingGuard<'a> {
#[must_use] #[must_use]
pub struct VerboseTimingGuard<'a> { pub struct VerboseTimingGuard<'a> {
start_and_message: Option<(Instant, String)>, start_and_message: Option<(Instant, Option<usize>, String)>,
_guard: TimingGuard<'a>, _guard: TimingGuard<'a>,
} }
impl<'a> VerboseTimingGuard<'a> { impl<'a> VerboseTimingGuard<'a> {
pub fn start(message: Option<String>, _guard: TimingGuard<'a>) -> Self { pub fn start(message: Option<String>, _guard: TimingGuard<'a>) -> Self {
VerboseTimingGuard { _guard, start_and_message: message.map(|msg| (Instant::now(), msg)) } VerboseTimingGuard {
_guard,
start_and_message: message.map(|msg| (Instant::now(), get_resident_set_size(), msg)),
}
} }
#[inline(always)] #[inline(always)]
@ -573,25 +576,42 @@ impl<'a> VerboseTimingGuard<'a> {
impl Drop for VerboseTimingGuard<'_> { impl Drop for VerboseTimingGuard<'_> {
fn drop(&mut self) { fn drop(&mut self) {
if let Some((start, ref message)) = self.start_and_message { if let Some((start_time, start_rss, ref message)) = self.start_and_message {
print_time_passes_entry(true, &message[..], start.elapsed()); let end_rss = get_resident_set_size();
print_time_passes_entry(&message[..], start_time.elapsed(), start_rss, end_rss);
} }
} }
} }
pub fn print_time_passes_entry(do_it: bool, what: &str, dur: Duration) { pub fn print_time_passes_entry(
if !do_it { what: &str,
return; dur: Duration,
} start_rss: Option<usize>,
end_rss: Option<usize>,
) {
let rss_to_mb = |rss| (rss as f64 / 1_000_000.0).round() as usize;
let mem_string = match get_resident() { let mem_string = match (start_rss, end_rss) {
Some(n) => { (Some(start_rss), Some(end_rss)) => {
let mb = n as f64 / 1_000_000.0; // It's tempting to add the change in RSS from start to end, but its somewhat confusing
format!("; rss: {}MB", mb.round() as usize) // and misleading when looking at time-passes output. Consider two adjacent entries:
//
// time: 10.000; rss start: 1000MB, end: 1000MB, change: 0MB pass1
// time: 5.000; rss start: 2000MB, end: 2000MB, change: 0MB pass2
//
// If you're looking for jumps in RSS based on the change column, you miss the fact
// that a 1GB jump happened between pass1 and pass2 (supposing pass1 and pass2 actually
// occur sequentially and pass1 isn't just nested within pass2). It's easy to imagine
// someone missing this or being confused by the fact that the change is zero.
format!("; rss: {:>5}MB -> {:>5}MB", rss_to_mb(start_rss), rss_to_mb(end_rss))
} }
None => String::new(), (Some(start_rss), None) => format!("; rss start: {:>5}MB", rss_to_mb(start_rss)),
(None, Some(end_rss)) => format!("; rss end: {:5>}MB", rss_to_mb(end_rss)),
(None, None) => String::new(),
}; };
println!("time: {}{}\t{}", duration_to_secs_str(dur), mem_string, what);
println!("time: {:>7}{}\t{}", duration_to_secs_str(dur), mem_string, what);
} }
// Hack up our own formatting for the duration to make it easier for scripts // Hack up our own formatting for the duration to make it easier for scripts
@ -603,7 +623,7 @@ pub fn duration_to_secs_str(dur: std::time::Duration) -> String {
// Memory reporting // Memory reporting
cfg_if! { cfg_if! {
if #[cfg(windows)] { if #[cfg(windows)] {
fn get_resident() -> Option<usize> { pub fn get_resident_set_size() -> Option<usize> {
use std::mem::{self, MaybeUninit}; use std::mem::{self, MaybeUninit};
use winapi::shared::minwindef::DWORD; use winapi::shared::minwindef::DWORD;
use winapi::um::processthreadsapi::GetCurrentProcess; use winapi::um::processthreadsapi::GetCurrentProcess;
@ -621,7 +641,7 @@ cfg_if! {
} }
} }
} else if #[cfg(unix)] { } else if #[cfg(unix)] {
fn get_resident() -> Option<usize> { pub fn get_resident_set_size() -> Option<usize> {
let field = 1; let field = 1;
let contents = fs::read("/proc/self/statm").ok()?; let contents = fs::read("/proc/self/statm").ok()?;
let contents = String::from_utf8(contents).ok()?; let contents = String::from_utf8(contents).ok()?;
@ -630,7 +650,7 @@ cfg_if! {
Some(npages * 4096) Some(npages * 4096)
} }
} else { } else {
fn get_resident() -> Option<usize> { pub fn get_resident_set_size() -> Option<usize> {
None None
} }
} }

View File

@ -16,7 +16,7 @@ pub extern crate rustc_plugin_impl as plugin;
use rustc_ast as ast; use rustc_ast as ast;
use rustc_codegen_ssa::{traits::CodegenBackend, CodegenResults}; use rustc_codegen_ssa::{traits::CodegenBackend, CodegenResults};
use rustc_data_structures::profiling::print_time_passes_entry; use rustc_data_structures::profiling::{get_resident_set_size, print_time_passes_entry};
use rustc_data_structures::sync::SeqCst; use rustc_data_structures::sync::SeqCst;
use rustc_errors::registry::{InvalidErrorCode, Registry}; use rustc_errors::registry::{InvalidErrorCode, Registry};
use rustc_errors::{ErrorReported, PResult}; use rustc_errors::{ErrorReported, PResult};
@ -1312,7 +1312,8 @@ pub fn init_env_logger(env: &str) {
} }
pub fn main() -> ! { pub fn main() -> ! {
let start = Instant::now(); let start_time = Instant::now();
let start_rss = get_resident_set_size();
init_rustc_env_logger(); init_rustc_env_logger();
let mut callbacks = TimePassesCallbacks::default(); let mut callbacks = TimePassesCallbacks::default();
install_ice_hook(); install_ice_hook();
@ -1330,7 +1331,11 @@ pub fn main() -> ! {
.collect::<Vec<_>>(); .collect::<Vec<_>>();
RunCompiler::new(&args, &mut callbacks).run() RunCompiler::new(&args, &mut callbacks).run()
}); });
// The extra `\t` is necessary to align this label with the others.
print_time_passes_entry(callbacks.time_passes, "\ttotal", start.elapsed()); if callbacks.time_passes {
let end_rss = get_resident_set_size();
print_time_passes_entry("total", start_time.elapsed(), start_rss, end_rss);
}
process::exit(exit_code) process::exit(exit_code)
} }

View File

@ -14,7 +14,7 @@ use crate::infer::canonical::{
}; };
use crate::infer::nll_relate::{NormalizationStrategy, TypeRelating, TypeRelatingDelegate}; use crate::infer::nll_relate::{NormalizationStrategy, TypeRelating, TypeRelatingDelegate};
use crate::infer::region_constraints::{Constraint, RegionConstraintData}; use crate::infer::region_constraints::{Constraint, RegionConstraintData};
use crate::infer::{InferCtxt, InferOk, InferResult, NLLRegionVariableOrigin}; use crate::infer::{InferCtxt, InferOk, InferResult, NllRegionVariableOrigin};
use crate::traits::query::{Fallible, NoSolution}; use crate::traits::query::{Fallible, NoSolution};
use crate::traits::TraitEngine; use crate::traits::TraitEngine;
use crate::traits::{Obligation, ObligationCause, PredicateObligation}; use crate::traits::{Obligation, ObligationCause, PredicateObligation};
@ -644,7 +644,7 @@ impl<'tcx> TypeRelatingDelegate<'tcx> for QueryTypeRelatingDelegate<'_, 'tcx> {
} }
fn next_existential_region_var(&mut self, from_forall: bool) -> ty::Region<'tcx> { fn next_existential_region_var(&mut self, from_forall: bool) -> ty::Region<'tcx> {
let origin = NLLRegionVariableOrigin::Existential { from_forall }; let origin = NllRegionVariableOrigin::Existential { from_forall };
self.infcx.next_nll_region_var(origin) self.infcx.next_nll_region_var(origin)
} }
@ -654,7 +654,7 @@ impl<'tcx> TypeRelatingDelegate<'tcx> for QueryTypeRelatingDelegate<'_, 'tcx> {
fn generalize_existential(&mut self, universe: ty::UniverseIndex) -> ty::Region<'tcx> { fn generalize_existential(&mut self, universe: ty::UniverseIndex) -> ty::Region<'tcx> {
self.infcx.next_nll_region_var_in_universe( self.infcx.next_nll_region_var_in_universe(
NLLRegionVariableOrigin::Existential { from_forall: false }, NllRegionVariableOrigin::Existential { from_forall: false },
universe, universe,
) )
} }

View File

@ -1661,6 +1661,7 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> {
debug!("exp_found {:?} terr {:?}", exp_found, terr); debug!("exp_found {:?} terr {:?}", exp_found, terr);
if let Some(exp_found) = exp_found { if let Some(exp_found) = exp_found {
self.suggest_as_ref_where_appropriate(span, &exp_found, diag); self.suggest_as_ref_where_appropriate(span, &exp_found, diag);
self.suggest_accessing_field_where_appropriate(cause, &exp_found, diag);
self.suggest_await_on_expect_found(cause, span, &exp_found, diag); self.suggest_await_on_expect_found(cause, span, &exp_found, diag);
} }
@ -1819,6 +1820,53 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> {
} }
} }
fn suggest_accessing_field_where_appropriate(
&self,
cause: &ObligationCause<'tcx>,
exp_found: &ty::error::ExpectedFound<Ty<'tcx>>,
diag: &mut DiagnosticBuilder<'tcx>,
) {
debug!(
"suggest_accessing_field_where_appropriate(cause={:?}, exp_found={:?})",
cause, exp_found
);
if let ty::Adt(expected_def, expected_substs) = exp_found.expected.kind() {
if expected_def.is_enum() {
return;
}
if let Some((name, ty)) = expected_def
.non_enum_variant()
.fields
.iter()
.filter(|field| field.vis.is_accessible_from(field.did, self.tcx))
.map(|field| (field.ident.name, field.ty(self.tcx, expected_substs)))
.find(|(_, ty)| ty::TyS::same_type(ty, exp_found.found))
{
if let ObligationCauseCode::Pattern { span: Some(span), .. } = cause.code {
if let Ok(snippet) = self.tcx.sess.source_map().span_to_snippet(span) {
let suggestion = if expected_def.is_struct() {
format!("{}.{}", snippet, name)
} else if expected_def.is_union() {
format!("unsafe {{ {}.{} }}", snippet, name)
} else {
return;
};
diag.span_suggestion(
span,
&format!(
"you might have meant to use field `{}` of type `{}`",
name, ty
),
suggestion,
Applicability::MaybeIncorrect,
);
}
}
}
}
}
/// When encountering a case where `.as_ref()` on a `Result` or `Option` would be appropriate, /// When encountering a case where `.as_ref()` on a `Result` or `Option` would be appropriate,
/// suggests it. /// suggests it.
fn suggest_as_ref_where_appropriate( fn suggest_as_ref_where_appropriate(
@ -2342,7 +2390,7 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> {
let var_name = self.tcx.hir().name(upvar_id.var_path.hir_id); let var_name = self.tcx.hir().name(upvar_id.var_path.hir_id);
format!(" for capture of `{}` by closure", var_name) format!(" for capture of `{}` by closure", var_name)
} }
infer::NLL(..) => bug!("NLL variable found in lexical phase"), infer::Nll(..) => bug!("NLL variable found in lexical phase"),
}; };
struct_span_err!( struct_span_err!(

View File

@ -458,11 +458,11 @@ pub enum RegionVariableOrigin {
/// This origin is used for the inference variables that we create /// This origin is used for the inference variables that we create
/// during NLL region processing. /// during NLL region processing.
NLL(NLLRegionVariableOrigin), Nll(NllRegionVariableOrigin),
} }
#[derive(Copy, Clone, Debug)] #[derive(Copy, Clone, Debug)]
pub enum NLLRegionVariableOrigin { pub enum NllRegionVariableOrigin {
/// During NLL region processing, we create variables for free /// During NLL region processing, we create variables for free
/// regions that we encounter in the function signature and /// regions that we encounter in the function signature and
/// elsewhere. This origin indices we've got one of those. /// elsewhere. This origin indices we've got one of those.
@ -1078,17 +1078,17 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> {
} }
/// Just a convenient wrapper of `next_region_var` for using during NLL. /// Just a convenient wrapper of `next_region_var` for using during NLL.
pub fn next_nll_region_var(&self, origin: NLLRegionVariableOrigin) -> ty::Region<'tcx> { pub fn next_nll_region_var(&self, origin: NllRegionVariableOrigin) -> ty::Region<'tcx> {
self.next_region_var(RegionVariableOrigin::NLL(origin)) self.next_region_var(RegionVariableOrigin::Nll(origin))
} }
/// Just a convenient wrapper of `next_region_var` for using during NLL. /// Just a convenient wrapper of `next_region_var` for using during NLL.
pub fn next_nll_region_var_in_universe( pub fn next_nll_region_var_in_universe(
&self, &self,
origin: NLLRegionVariableOrigin, origin: NllRegionVariableOrigin,
universe: ty::UniverseIndex, universe: ty::UniverseIndex,
) -> ty::Region<'tcx> { ) -> ty::Region<'tcx> {
self.next_region_var_in_universe(RegionVariableOrigin::NLL(origin), universe) self.next_region_var_in_universe(RegionVariableOrigin::Nll(origin), universe)
} }
pub fn var_for_def(&self, span: Span, param: &ty::GenericParamDef) -> GenericArg<'tcx> { pub fn var_for_def(&self, span: Span, param: &ty::GenericParamDef) -> GenericArg<'tcx> {
@ -1770,7 +1770,7 @@ impl RegionVariableOrigin {
| LateBoundRegion(a, ..) | LateBoundRegion(a, ..)
| UpvarRegion(_, a) => a, | UpvarRegion(_, a) => a,
BoundRegionInCoherence(_) => rustc_span::DUMMY_SP, BoundRegionInCoherence(_) => rustc_span::DUMMY_SP,
NLL(..) => bug!("NLL variable used with `span`"), Nll(..) => bug!("NLL variable used with `span`"),
} }
} }
} }

View File

@ -56,8 +56,19 @@ declare_lint! {
declare_lint_pass!(NonCamelCaseTypes => [NON_CAMEL_CASE_TYPES]); declare_lint_pass!(NonCamelCaseTypes => [NON_CAMEL_CASE_TYPES]);
/// Some unicode characters *have* case, are considered upper case or lower case, but they *can't*
/// be upper cased or lower cased. For the purposes of the lint suggestion, we care about being able
/// to change the char's case.
fn char_has_case(c: char) -> bool { fn char_has_case(c: char) -> bool {
c.is_lowercase() || c.is_uppercase() let mut l = c.to_lowercase();
let mut u = c.to_uppercase();
while let Some(l) = l.next() {
match u.next() {
Some(u) if l != u => return true,
_ => {}
}
}
u.next().is_some()
} }
fn is_camel_case(name: &str) -> bool { fn is_camel_case(name: &str) -> bool {
@ -138,6 +149,8 @@ impl NonCamelCaseTypes {
to_camel_case(name), to_camel_case(name),
Applicability::MaybeIncorrect, Applicability::MaybeIncorrect,
); );
} else {
err.span_label(ident.span, "should have an UpperCamelCase name");
} }
err.emit(); err.emit();
@ -299,6 +312,8 @@ impl NonSnakeCase {
} else { } else {
err.help(&format!("convert the identifier to snake case: `{}`", sc)); err.help(&format!("convert the identifier to snake case: `{}`", sc));
} }
} else {
err.span_label(ident.span, "should have a snake_case name");
} }
err.emit(); err.emit();
@ -477,6 +492,8 @@ impl NonUpperCaseGlobals {
uc, uc,
Applicability::MaybeIncorrect, Applicability::MaybeIncorrect,
); );
} else {
err.span_label(ident.span, "should have an UPPER_CASE name");
} }
err.emit(); err.emit();

View File

@ -5,7 +5,7 @@ use std::collections::VecDeque;
use rustc_data_structures::fx::FxHashSet; use rustc_data_structures::fx::FxHashSet;
use rustc_errors::{Applicability, DiagnosticBuilder}; use rustc_errors::{Applicability, DiagnosticBuilder};
use rustc_index::vec::IndexVec; use rustc_index::vec::IndexVec;
use rustc_infer::infer::NLLRegionVariableOrigin; use rustc_infer::infer::NllRegionVariableOrigin;
use rustc_middle::mir::{ use rustc_middle::mir::{
Body, CastKind, ConstraintCategory, FakeReadCause, Local, Location, Operand, Place, Rvalue, Body, CastKind, ConstraintCategory, FakeReadCause, Local, Location, Operand, Place, Rvalue,
Statement, StatementKind, TerminatorKind, Statement, StatementKind, TerminatorKind,
@ -258,7 +258,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
let (category, from_closure, span) = self.regioncx.best_blame_constraint( let (category, from_closure, span) = self.regioncx.best_blame_constraint(
&self.body, &self.body,
borrow_region, borrow_region,
NLLRegionVariableOrigin::FreeRegion, NllRegionVariableOrigin::FreeRegion,
|r| self.regioncx.provides_universal_region(r, borrow_region, outlived_region), |r| self.regioncx.provides_universal_region(r, borrow_region, outlived_region),
); );

View File

@ -3,7 +3,7 @@
use rustc_errors::{Applicability, DiagnosticBuilder}; use rustc_errors::{Applicability, DiagnosticBuilder};
use rustc_infer::infer::{ use rustc_infer::infer::{
error_reporting::nice_region_error::NiceRegionError, error_reporting::nice_region_error::NiceRegionError,
error_reporting::unexpected_hidden_region_diagnostic, NLLRegionVariableOrigin, error_reporting::unexpected_hidden_region_diagnostic, NllRegionVariableOrigin,
}; };
use rustc_middle::mir::{ConstraintCategory, ReturnConstraint}; use rustc_middle::mir::{ConstraintCategory, ReturnConstraint};
use rustc_middle::ty::subst::Subst; use rustc_middle::ty::subst::Subst;
@ -75,13 +75,13 @@ crate enum RegionErrorKind<'tcx> {
/// The region element that erroneously must be outlived by `longer_fr`. /// The region element that erroneously must be outlived by `longer_fr`.
error_element: RegionElement, error_element: RegionElement,
/// The origin of the placeholder region. /// The origin of the placeholder region.
fr_origin: NLLRegionVariableOrigin, fr_origin: NllRegionVariableOrigin,
}, },
/// Any other lifetime error. /// Any other lifetime error.
RegionError { RegionError {
/// The origin of the region. /// The origin of the region.
fr_origin: NLLRegionVariableOrigin, fr_origin: NllRegionVariableOrigin,
/// The region that should outlive `shorter_fr`. /// The region that should outlive `shorter_fr`.
longer_fr: RegionVid, longer_fr: RegionVid,
/// The region that should be shorter, but we can't prove it. /// The region that should be shorter, but we can't prove it.
@ -269,7 +269,7 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> {
pub(in crate::borrow_check) fn report_region_error( pub(in crate::borrow_check) fn report_region_error(
&mut self, &mut self,
fr: RegionVid, fr: RegionVid,
fr_origin: NLLRegionVariableOrigin, fr_origin: NllRegionVariableOrigin,
outlived_fr: RegionVid, outlived_fr: RegionVid,
outlives_suggestion: &mut OutlivesSuggestionBuilder, outlives_suggestion: &mut OutlivesSuggestionBuilder,
) { ) {

View File

@ -5,7 +5,7 @@
use super::{OutlivesConstraint, RegionInferenceContext}; use super::{OutlivesConstraint, RegionInferenceContext};
use crate::borrow_check::type_check::Locations; use crate::borrow_check::type_check::Locations;
use rustc_infer::infer::NLLRegionVariableOrigin; use rustc_infer::infer::NllRegionVariableOrigin;
use rustc_middle::ty::TyCtxt; use rustc_middle::ty::TyCtxt;
use std::io::{self, Write}; use std::io::{self, Write};
@ -20,7 +20,7 @@ impl<'tcx> RegionInferenceContext<'tcx> {
writeln!(out, "| Free Region Mapping")?; writeln!(out, "| Free Region Mapping")?;
for region in self.regions() { for region in self.regions() {
if let NLLRegionVariableOrigin::FreeRegion = self.definitions[region].origin { if let NllRegionVariableOrigin::FreeRegion = self.definitions[region].origin {
let classification = self.universal_regions.region_classification(region).unwrap(); let classification = self.universal_regions.region_classification(region).unwrap();
let outlived_by = self.universal_region_relations.regions_outlived_by(region); let outlived_by = self.universal_region_relations.regions_outlived_by(region);
writeln!( writeln!(

View File

@ -9,7 +9,7 @@ use rustc_hir::def_id::DefId;
use rustc_index::vec::IndexVec; use rustc_index::vec::IndexVec;
use rustc_infer::infer::canonical::QueryOutlivesConstraint; use rustc_infer::infer::canonical::QueryOutlivesConstraint;
use rustc_infer::infer::region_constraints::{GenericKind, VarInfos, VerifyBound}; use rustc_infer::infer::region_constraints::{GenericKind, VarInfos, VerifyBound};
use rustc_infer::infer::{InferCtxt, NLLRegionVariableOrigin, RegionVariableOrigin}; use rustc_infer::infer::{InferCtxt, NllRegionVariableOrigin, RegionVariableOrigin};
use rustc_middle::mir::{ use rustc_middle::mir::{
Body, ClosureOutlivesRequirement, ClosureOutlivesSubject, ClosureRegionRequirements, Body, ClosureOutlivesRequirement, ClosureOutlivesSubject, ClosureRegionRequirements,
ConstraintCategory, Local, Location, ReturnConstraint, ConstraintCategory, Local, Location, ReturnConstraint,
@ -143,9 +143,9 @@ pub(crate) struct AppliedMemberConstraint {
pub(crate) struct RegionDefinition<'tcx> { pub(crate) struct RegionDefinition<'tcx> {
/// What kind of variable is this -- a free region? existential /// What kind of variable is this -- a free region? existential
/// variable? etc. (See the `NLLRegionVariableOrigin` for more /// variable? etc. (See the `NllRegionVariableOrigin` for more
/// info.) /// info.)
pub(in crate::borrow_check) origin: NLLRegionVariableOrigin, pub(in crate::borrow_check) origin: NllRegionVariableOrigin,
/// Which universe is this region variable defined in? This is /// Which universe is this region variable defined in? This is
/// most often `ty::UniverseIndex::ROOT`, but when we encounter /// most often `ty::UniverseIndex::ROOT`, but when we encounter
@ -451,7 +451,7 @@ impl<'tcx> RegionInferenceContext<'tcx> {
let scc = self.constraint_sccs.scc(variable); let scc = self.constraint_sccs.scc(variable);
match self.definitions[variable].origin { match self.definitions[variable].origin {
NLLRegionVariableOrigin::FreeRegion => { NllRegionVariableOrigin::FreeRegion => {
// For each free, universally quantified region X: // For each free, universally quantified region X:
// Add all nodes in the CFG to liveness constraints // Add all nodes in the CFG to liveness constraints
@ -462,7 +462,7 @@ impl<'tcx> RegionInferenceContext<'tcx> {
self.scc_values.add_element(scc, variable); self.scc_values.add_element(scc, variable);
} }
NLLRegionVariableOrigin::Placeholder(placeholder) => { NllRegionVariableOrigin::Placeholder(placeholder) => {
// Each placeholder region is only visible from // Each placeholder region is only visible from
// its universe `ui` and its extensions. So we // its universe `ui` and its extensions. So we
// can't just add it into `scc` unless the // can't just add it into `scc` unless the
@ -480,8 +480,8 @@ impl<'tcx> RegionInferenceContext<'tcx> {
} }
} }
NLLRegionVariableOrigin::RootEmptyRegion NllRegionVariableOrigin::RootEmptyRegion
| NLLRegionVariableOrigin::Existential { .. } => { | NllRegionVariableOrigin::Existential { .. } => {
// For existential, regions, nothing to do. // For existential, regions, nothing to do.
} }
} }
@ -1348,7 +1348,7 @@ impl<'tcx> RegionInferenceContext<'tcx> {
) { ) {
for (fr, fr_definition) in self.definitions.iter_enumerated() { for (fr, fr_definition) in self.definitions.iter_enumerated() {
match fr_definition.origin { match fr_definition.origin {
NLLRegionVariableOrigin::FreeRegion => { NllRegionVariableOrigin::FreeRegion => {
// Go through each of the universal regions `fr` and check that // Go through each of the universal regions `fr` and check that
// they did not grow too large, accumulating any requirements // they did not grow too large, accumulating any requirements
// for our caller into the `outlives_requirements` vector. // for our caller into the `outlives_requirements` vector.
@ -1360,12 +1360,12 @@ impl<'tcx> RegionInferenceContext<'tcx> {
); );
} }
NLLRegionVariableOrigin::Placeholder(placeholder) => { NllRegionVariableOrigin::Placeholder(placeholder) => {
self.check_bound_universal_region(fr, placeholder, errors_buffer); self.check_bound_universal_region(fr, placeholder, errors_buffer);
} }
NLLRegionVariableOrigin::RootEmptyRegion NllRegionVariableOrigin::RootEmptyRegion
| NLLRegionVariableOrigin::Existential { .. } => { | NllRegionVariableOrigin::Existential { .. } => {
// nothing to check here // nothing to check here
} }
} }
@ -1449,7 +1449,7 @@ impl<'tcx> RegionInferenceContext<'tcx> {
errors_buffer.push(RegionErrorKind::RegionError { errors_buffer.push(RegionErrorKind::RegionError {
longer_fr: *longer_fr, longer_fr: *longer_fr,
shorter_fr: *shorter_fr, shorter_fr: *shorter_fr,
fr_origin: NLLRegionVariableOrigin::FreeRegion, fr_origin: NllRegionVariableOrigin::FreeRegion,
is_reported: true, is_reported: true,
}); });
} }
@ -1459,16 +1459,16 @@ impl<'tcx> RegionInferenceContext<'tcx> {
// a more complete picture on how to separate this responsibility. // a more complete picture on how to separate this responsibility.
for (fr, fr_definition) in self.definitions.iter_enumerated() { for (fr, fr_definition) in self.definitions.iter_enumerated() {
match fr_definition.origin { match fr_definition.origin {
NLLRegionVariableOrigin::FreeRegion => { NllRegionVariableOrigin::FreeRegion => {
// handled by polonius above // handled by polonius above
} }
NLLRegionVariableOrigin::Placeholder(placeholder) => { NllRegionVariableOrigin::Placeholder(placeholder) => {
self.check_bound_universal_region(fr, placeholder, errors_buffer); self.check_bound_universal_region(fr, placeholder, errors_buffer);
} }
NLLRegionVariableOrigin::RootEmptyRegion NllRegionVariableOrigin::RootEmptyRegion
| NLLRegionVariableOrigin::Existential { .. } => { | NllRegionVariableOrigin::Existential { .. } => {
// nothing to check here // nothing to check here
} }
} }
@ -1516,7 +1516,7 @@ impl<'tcx> RegionInferenceContext<'tcx> {
errors_buffer.push(RegionErrorKind::RegionError { errors_buffer.push(RegionErrorKind::RegionError {
longer_fr, longer_fr,
shorter_fr: representative, shorter_fr: representative,
fr_origin: NLLRegionVariableOrigin::FreeRegion, fr_origin: NllRegionVariableOrigin::FreeRegion,
is_reported: true, is_reported: true,
}); });
} }
@ -1539,7 +1539,7 @@ impl<'tcx> RegionInferenceContext<'tcx> {
errors_buffer.push(RegionErrorKind::RegionError { errors_buffer.push(RegionErrorKind::RegionError {
longer_fr, longer_fr,
shorter_fr, shorter_fr,
fr_origin: NLLRegionVariableOrigin::FreeRegion, fr_origin: NllRegionVariableOrigin::FreeRegion,
is_reported: !error_reported, is_reported: !error_reported,
}); });
@ -1597,7 +1597,7 @@ impl<'tcx> RegionInferenceContext<'tcx> {
let blame_span_category = self.find_outlives_blame_span( let blame_span_category = self.find_outlives_blame_span(
body, body,
longer_fr, longer_fr,
NLLRegionVariableOrigin::FreeRegion, NllRegionVariableOrigin::FreeRegion,
shorter_fr, shorter_fr,
); );
@ -1656,7 +1656,7 @@ impl<'tcx> RegionInferenceContext<'tcx> {
errors_buffer.push(RegionErrorKind::BoundUniversalRegionError { errors_buffer.push(RegionErrorKind::BoundUniversalRegionError {
longer_fr, longer_fr,
error_element, error_element,
fr_origin: NLLRegionVariableOrigin::Placeholder(placeholder), fr_origin: NllRegionVariableOrigin::Placeholder(placeholder),
}); });
} }
@ -1732,7 +1732,7 @@ impl<'tcx> RegionInferenceContext<'tcx> {
debug!("cannot_name_value_of(r1={:?}, r2={:?})", r1, r2); debug!("cannot_name_value_of(r1={:?}, r2={:?})", r1, r2);
match self.definitions[r2].origin { match self.definitions[r2].origin {
NLLRegionVariableOrigin::Placeholder(placeholder) => { NllRegionVariableOrigin::Placeholder(placeholder) => {
let universe1 = self.definitions[r1].universe; let universe1 = self.definitions[r1].universe;
debug!( debug!(
"cannot_name_value_of: universe1={:?} placeholder={:?}", "cannot_name_value_of: universe1={:?} placeholder={:?}",
@ -1741,9 +1741,9 @@ impl<'tcx> RegionInferenceContext<'tcx> {
universe1.cannot_name(placeholder.universe) universe1.cannot_name(placeholder.universe)
} }
NLLRegionVariableOrigin::RootEmptyRegion NllRegionVariableOrigin::RootEmptyRegion
| NLLRegionVariableOrigin::FreeRegion | NllRegionVariableOrigin::FreeRegion
| NLLRegionVariableOrigin::Existential { .. } => false, | NllRegionVariableOrigin::Existential { .. } => false,
} }
} }
@ -1771,7 +1771,7 @@ impl<'tcx> RegionInferenceContext<'tcx> {
&self, &self,
body: &Body<'tcx>, body: &Body<'tcx>,
fr1: RegionVid, fr1: RegionVid,
fr1_origin: NLLRegionVariableOrigin, fr1_origin: NllRegionVariableOrigin,
fr2: RegionVid, fr2: RegionVid,
) -> (ConstraintCategory, Span) { ) -> (ConstraintCategory, Span) {
let (category, _, span) = self.best_blame_constraint(body, fr1, fr1_origin, |r| { let (category, _, span) = self.best_blame_constraint(body, fr1, fr1_origin, |r| {
@ -1933,7 +1933,7 @@ impl<'tcx> RegionInferenceContext<'tcx> {
.definitions .definitions
.iter_enumerated() .iter_enumerated()
.find_map(|(r, definition)| match definition.origin { .find_map(|(r, definition)| match definition.origin {
NLLRegionVariableOrigin::Placeholder(p) if p == error_placeholder => Some(r), NllRegionVariableOrigin::Placeholder(p) if p == error_placeholder => Some(r),
_ => None, _ => None,
}) })
.unwrap(), .unwrap(),
@ -1965,7 +1965,7 @@ impl<'tcx> RegionInferenceContext<'tcx> {
&self, &self,
body: &Body<'tcx>, body: &Body<'tcx>,
from_region: RegionVid, from_region: RegionVid,
from_region_origin: NLLRegionVariableOrigin, from_region_origin: NllRegionVariableOrigin,
target_test: impl Fn(RegionVid) -> bool, target_test: impl Fn(RegionVid) -> bool,
) -> (ConstraintCategory, bool, Span) { ) -> (ConstraintCategory, bool, Span) {
debug!( debug!(
@ -2059,11 +2059,11 @@ impl<'tcx> RegionInferenceContext<'tcx> {
// //
// and here we prefer to blame the source (the y = x statement). // and here we prefer to blame the source (the y = x statement).
let blame_source = match from_region_origin { let blame_source = match from_region_origin {
NLLRegionVariableOrigin::FreeRegion NllRegionVariableOrigin::FreeRegion
| NLLRegionVariableOrigin::Existential { from_forall: false } => true, | NllRegionVariableOrigin::Existential { from_forall: false } => true,
NLLRegionVariableOrigin::RootEmptyRegion NllRegionVariableOrigin::RootEmptyRegion
| NLLRegionVariableOrigin::Placeholder(_) | NllRegionVariableOrigin::Placeholder(_)
| NLLRegionVariableOrigin::Existential { from_forall: true } => false, | NllRegionVariableOrigin::Existential { from_forall: true } => false,
}; };
let find_region = |i: &usize| { let find_region = |i: &usize| {
@ -2144,8 +2144,8 @@ impl<'tcx> RegionDefinition<'tcx> {
// `init_universal_regions`. // `init_universal_regions`.
let origin = match rv_origin { let origin = match rv_origin {
RegionVariableOrigin::NLL(origin) => origin, RegionVariableOrigin::Nll(origin) => origin,
_ => NLLRegionVariableOrigin::Existential { from_forall: false }, _ => NllRegionVariableOrigin::Existential { from_forall: false },
}; };
Self { origin, universe, external_name: None } Self { origin, universe, external_name: None }

View File

@ -1,5 +1,5 @@
use rustc_index::vec::IndexVec; use rustc_index::vec::IndexVec;
use rustc_infer::infer::{InferCtxt, NLLRegionVariableOrigin}; use rustc_infer::infer::{InferCtxt, NllRegionVariableOrigin};
use rustc_middle::mir::visit::{MutVisitor, TyContext}; use rustc_middle::mir::visit::{MutVisitor, TyContext};
use rustc_middle::mir::{Body, Location, PlaceElem, Promoted}; use rustc_middle::mir::{Body, Location, PlaceElem, Promoted};
use rustc_middle::ty::subst::SubstsRef; use rustc_middle::ty::subst::SubstsRef;
@ -15,7 +15,7 @@ pub fn renumber_mir<'tcx>(
debug!("renumber_mir()"); debug!("renumber_mir()");
debug!("renumber_mir: body.arg_count={:?}", body.arg_count); debug!("renumber_mir: body.arg_count={:?}", body.arg_count);
let mut visitor = NLLVisitor { infcx }; let mut visitor = NllVisitor { infcx };
for body in promoted.iter_mut() { for body in promoted.iter_mut() {
visitor.visit_body(body); visitor.visit_body(body);
@ -33,16 +33,16 @@ where
debug!("renumber_regions(value={:?})", value); debug!("renumber_regions(value={:?})", value);
infcx.tcx.fold_regions(value, &mut false, |_region, _depth| { infcx.tcx.fold_regions(value, &mut false, |_region, _depth| {
let origin = NLLRegionVariableOrigin::Existential { from_forall: false }; let origin = NllRegionVariableOrigin::Existential { from_forall: false };
infcx.next_nll_region_var(origin) infcx.next_nll_region_var(origin)
}) })
} }
struct NLLVisitor<'a, 'tcx> { struct NllVisitor<'a, 'tcx> {
infcx: &'a InferCtxt<'a, 'tcx>, infcx: &'a InferCtxt<'a, 'tcx>,
} }
impl<'a, 'tcx> NLLVisitor<'a, 'tcx> { impl<'a, 'tcx> NllVisitor<'a, 'tcx> {
fn renumber_regions<T>(&mut self, value: T) -> T fn renumber_regions<T>(&mut self, value: T) -> T
where where
T: TypeFoldable<'tcx>, T: TypeFoldable<'tcx>,
@ -51,7 +51,7 @@ impl<'a, 'tcx> NLLVisitor<'a, 'tcx> {
} }
} }
impl<'a, 'tcx> MutVisitor<'tcx> for NLLVisitor<'a, 'tcx> { impl<'a, 'tcx> MutVisitor<'tcx> for NllVisitor<'a, 'tcx> {
fn tcx(&self) -> TyCtxt<'tcx> { fn tcx(&self) -> TyCtxt<'tcx> {
self.infcx.tcx self.infcx.tcx
} }

View File

@ -16,7 +16,7 @@ use rustc_infer::infer::canonical::QueryRegionConstraints;
use rustc_infer::infer::outlives::env::RegionBoundPairs; use rustc_infer::infer::outlives::env::RegionBoundPairs;
use rustc_infer::infer::type_variable::{TypeVariableOrigin, TypeVariableOriginKind}; use rustc_infer::infer::type_variable::{TypeVariableOrigin, TypeVariableOriginKind};
use rustc_infer::infer::{ use rustc_infer::infer::{
InferCtxt, InferOk, LateBoundRegionConversionTime, NLLRegionVariableOrigin, InferCtxt, InferOk, LateBoundRegionConversionTime, NllRegionVariableOrigin,
}; };
use rustc_middle::mir::tcx::PlaceTy; use rustc_middle::mir::tcx::PlaceTy;
use rustc_middle::mir::visit::{NonMutatingUseContext, PlaceContext, Visitor}; use rustc_middle::mir::visit::{NonMutatingUseContext, PlaceContext, Visitor};
@ -872,7 +872,7 @@ impl MirTypeckRegionConstraints<'tcx> {
match self.placeholder_index_to_region.get(placeholder_index) { match self.placeholder_index_to_region.get(placeholder_index) {
Some(&v) => v, Some(&v) => v,
None => { None => {
let origin = NLLRegionVariableOrigin::Placeholder(placeholder); let origin = NllRegionVariableOrigin::Placeholder(placeholder);
let region = infcx.next_nll_region_var_in_universe(origin, placeholder.universe); let region = infcx.next_nll_region_var_in_universe(origin, placeholder.universe);
self.placeholder_index_to_region.push(region); self.placeholder_index_to_region.push(region);
region region

View File

@ -1,5 +1,5 @@
use rustc_infer::infer::nll_relate::{NormalizationStrategy, TypeRelating, TypeRelatingDelegate}; use rustc_infer::infer::nll_relate::{NormalizationStrategy, TypeRelating, TypeRelatingDelegate};
use rustc_infer::infer::{InferCtxt, NLLRegionVariableOrigin}; use rustc_infer::infer::{InferCtxt, NllRegionVariableOrigin};
use rustc_middle::mir::ConstraintCategory; use rustc_middle::mir::ConstraintCategory;
use rustc_middle::ty::relate::TypeRelation; use rustc_middle::ty::relate::TypeRelation;
use rustc_middle::ty::{self, Const, Ty}; use rustc_middle::ty::{self, Const, Ty};
@ -64,7 +64,7 @@ impl TypeRelatingDelegate<'tcx> for NllTypeRelatingDelegate<'_, '_, 'tcx> {
fn next_existential_region_var(&mut self, from_forall: bool) -> ty::Region<'tcx> { fn next_existential_region_var(&mut self, from_forall: bool) -> ty::Region<'tcx> {
if self.borrowck_context.is_some() { if self.borrowck_context.is_some() {
let origin = NLLRegionVariableOrigin::Existential { from_forall }; let origin = NllRegionVariableOrigin::Existential { from_forall };
self.infcx.next_nll_region_var(origin) self.infcx.next_nll_region_var(origin)
} else { } else {
self.infcx.tcx.lifetimes.re_erased self.infcx.tcx.lifetimes.re_erased
@ -81,7 +81,7 @@ impl TypeRelatingDelegate<'tcx> for NllTypeRelatingDelegate<'_, '_, 'tcx> {
fn generalize_existential(&mut self, universe: ty::UniverseIndex) -> ty::Region<'tcx> { fn generalize_existential(&mut self, universe: ty::UniverseIndex) -> ty::Region<'tcx> {
self.infcx.next_nll_region_var_in_universe( self.infcx.next_nll_region_var_in_universe(
NLLRegionVariableOrigin::Existential { from_forall: false }, NllRegionVariableOrigin::Existential { from_forall: false },
universe, universe,
) )
} }

View File

@ -20,7 +20,7 @@ use rustc_hir::def_id::{DefId, LocalDefId};
use rustc_hir::lang_items::LangItem; use rustc_hir::lang_items::LangItem;
use rustc_hir::{BodyOwnerKind, HirId}; use rustc_hir::{BodyOwnerKind, HirId};
use rustc_index::vec::{Idx, IndexVec}; use rustc_index::vec::{Idx, IndexVec};
use rustc_infer::infer::{InferCtxt, NLLRegionVariableOrigin}; use rustc_infer::infer::{InferCtxt, NllRegionVariableOrigin};
use rustc_middle::ty::fold::TypeFoldable; use rustc_middle::ty::fold::TypeFoldable;
use rustc_middle::ty::subst::{InternalSubsts, Subst, SubstsRef}; use rustc_middle::ty::subst::{InternalSubsts, Subst, SubstsRef};
use rustc_middle::ty::{self, RegionVid, Ty, TyCtxt}; use rustc_middle::ty::{self, RegionVid, Ty, TyCtxt};
@ -393,7 +393,7 @@ struct UniversalRegionsBuilder<'cx, 'tcx> {
param_env: ty::ParamEnv<'tcx>, param_env: ty::ParamEnv<'tcx>,
} }
const FR: NLLRegionVariableOrigin = NLLRegionVariableOrigin::FreeRegion; const FR: NllRegionVariableOrigin = NllRegionVariableOrigin::FreeRegion;
impl<'cx, 'tcx> UniversalRegionsBuilder<'cx, 'tcx> { impl<'cx, 'tcx> UniversalRegionsBuilder<'cx, 'tcx> {
fn build(self) -> UniversalRegions<'tcx> { fn build(self) -> UniversalRegions<'tcx> {
@ -486,7 +486,7 @@ impl<'cx, 'tcx> UniversalRegionsBuilder<'cx, 'tcx> {
let root_empty = self let root_empty = self
.infcx .infcx
.next_nll_region_var(NLLRegionVariableOrigin::RootEmptyRegion) .next_nll_region_var(NllRegionVariableOrigin::RootEmptyRegion)
.to_region_vid(); .to_region_vid();
UniversalRegions { UniversalRegions {
@ -647,7 +647,7 @@ impl<'cx, 'tcx> UniversalRegionsBuilder<'cx, 'tcx> {
trait InferCtxtExt<'tcx> { trait InferCtxtExt<'tcx> {
fn replace_free_regions_with_nll_infer_vars<T>( fn replace_free_regions_with_nll_infer_vars<T>(
&self, &self,
origin: NLLRegionVariableOrigin, origin: NllRegionVariableOrigin,
value: T, value: T,
) -> T ) -> T
where where
@ -655,7 +655,7 @@ trait InferCtxtExt<'tcx> {
fn replace_bound_regions_with_nll_infer_vars<T>( fn replace_bound_regions_with_nll_infer_vars<T>(
&self, &self,
origin: NLLRegionVariableOrigin, origin: NllRegionVariableOrigin,
all_outlive_scope: LocalDefId, all_outlive_scope: LocalDefId,
value: ty::Binder<T>, value: ty::Binder<T>,
indices: &mut UniversalRegionIndices<'tcx>, indices: &mut UniversalRegionIndices<'tcx>,
@ -673,7 +673,7 @@ trait InferCtxtExt<'tcx> {
impl<'cx, 'tcx> InferCtxtExt<'tcx> for InferCtxt<'cx, 'tcx> { impl<'cx, 'tcx> InferCtxtExt<'tcx> for InferCtxt<'cx, 'tcx> {
fn replace_free_regions_with_nll_infer_vars<T>( fn replace_free_regions_with_nll_infer_vars<T>(
&self, &self,
origin: NLLRegionVariableOrigin, origin: NllRegionVariableOrigin,
value: T, value: T,
) -> T ) -> T
where where
@ -684,7 +684,7 @@ impl<'cx, 'tcx> InferCtxtExt<'tcx> for InferCtxt<'cx, 'tcx> {
fn replace_bound_regions_with_nll_infer_vars<T>( fn replace_bound_regions_with_nll_infer_vars<T>(
&self, &self,
origin: NLLRegionVariableOrigin, origin: NllRegionVariableOrigin,
all_outlive_scope: LocalDefId, all_outlive_scope: LocalDefId,
value: ty::Binder<T>, value: ty::Binder<T>,
indices: &mut UniversalRegionIndices<'tcx>, indices: &mut UniversalRegionIndices<'tcx>,

View File

@ -82,8 +82,8 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
/// visible through borrow checking. False edges ensure that the CFG as /// visible through borrow checking. False edges ensure that the CFG as
/// seen by borrow checking doesn't encode this. False edges are added: /// seen by borrow checking doesn't encode this. False edges are added:
/// ///
/// * From each prebinding block to the next prebinding block. /// * From each pre-binding block to the next pre-binding block.
/// * From each otherwise block to the next prebinding block. /// * From each otherwise block to the next pre-binding block.
crate fn match_expr( crate fn match_expr(
&mut self, &mut self,
destination: Place<'tcx>, destination: Place<'tcx>,
@ -630,10 +630,10 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
#[derive(Debug)] #[derive(Debug)]
pub(super) struct Candidate<'pat, 'tcx> { pub(super) struct Candidate<'pat, 'tcx> {
/// `Span` of the original pattern that gave rise to this candidate /// [`Span`] of the original pattern that gave rise to this candidate.
span: Span, span: Span,
/// This `Candidate` has a guard. /// Whether this `Candidate` has a guard.
has_guard: bool, has_guard: bool,
/// All of these must be satisfied... /// All of these must be satisfied...
@ -645,14 +645,15 @@ pub(super) struct Candidate<'pat, 'tcx> {
/// ...and these types asserted... /// ...and these types asserted...
ascriptions: Vec<Ascription<'tcx>>, ascriptions: Vec<Ascription<'tcx>>,
/// ... and if this is non-empty, one of these subcandidates also has to match ... /// ...and if this is non-empty, one of these subcandidates also has to match...
subcandidates: Vec<Candidate<'pat, 'tcx>>, subcandidates: Vec<Candidate<'pat, 'tcx>>,
/// ...and the guard must be evaluated, if false branch to Block... /// ...and the guard must be evaluated; if it's `false` then branch to `otherwise_block`.
otherwise_block: Option<BasicBlock>, otherwise_block: Option<BasicBlock>,
/// ...and the blocks for add false edges between candidates /// The block before the `bindings` have been established.
pre_binding_block: Option<BasicBlock>, pre_binding_block: Option<BasicBlock>,
/// The pre-binding block of the next candidate.
next_candidate_pre_binding_block: Option<BasicBlock>, next_candidate_pre_binding_block: Option<BasicBlock>,
} }
@ -737,18 +738,19 @@ crate struct MatchPair<'pat, 'tcx> {
pattern: &'pat Pat<'tcx>, pattern: &'pat Pat<'tcx>,
} }
/// See [`Test`] for more.
#[derive(Clone, Debug, PartialEq)] #[derive(Clone, Debug, PartialEq)]
enum TestKind<'tcx> { enum TestKind<'tcx> {
/// Test the branches of enum. /// Test what enum variant a value is.
Switch { Switch {
/// The enum being tested /// The enum type being tested.
adt_def: &'tcx ty::AdtDef, adt_def: &'tcx ty::AdtDef,
/// The set of variants that we should create a branch for. We also /// The set of variants that we should create a branch for. We also
/// create an additional "otherwise" case. /// create an additional "otherwise" case.
variants: BitSet<VariantIdx>, variants: BitSet<VariantIdx>,
}, },
/// Test what value an `integer`, `bool` or `char` has. /// Test what value an integer, `bool`, or `char` has.
SwitchInt { SwitchInt {
/// The type of the value that we're testing. /// The type of the value that we're testing.
switch_ty: Ty<'tcx>, switch_ty: Ty<'tcx>,
@ -756,7 +758,7 @@ enum TestKind<'tcx> {
/// ///
/// For integers and `char`s we create a branch to each of the values in /// For integers and `char`s we create a branch to each of the values in
/// `options`, as well as an "otherwise" branch for all other values, even /// `options`, as well as an "otherwise" branch for all other values, even
/// in the (rare) case that options is exhaustive. /// in the (rare) case that `options` is exhaustive.
/// ///
/// For `bool` we always generate two edges, one for `true` and one for /// For `bool` we always generate two edges, one for `true` and one for
/// `false`. /// `false`.
@ -776,17 +778,21 @@ enum TestKind<'tcx> {
/// Test whether the value falls within an inclusive or exclusive range /// Test whether the value falls within an inclusive or exclusive range
Range(PatRange<'tcx>), Range(PatRange<'tcx>),
/// Test length of the slice is equal to len /// Test that the length of the slice is equal to `len`.
Len { len: u64, op: BinOp }, Len { len: u64, op: BinOp },
} }
/// A test to perform to determine which [`Candidate`] matches a value.
///
/// [`Test`] is just the test to perform; it does not include the value
/// to be tested.
#[derive(Debug)] #[derive(Debug)]
crate struct Test<'tcx> { crate struct Test<'tcx> {
span: Span, span: Span,
kind: TestKind<'tcx>, kind: TestKind<'tcx>,
} }
/// ArmHasGuard is isomorphic to a boolean flag. It indicates whether /// `ArmHasGuard` is a wrapper around a boolean flag. It indicates whether
/// a match arm has a guard expression attached to it. /// a match arm has a guard expression attached to it.
#[derive(Copy, Clone, Debug)] #[derive(Copy, Clone, Debug)]
crate struct ArmHasGuard(crate bool); crate struct ArmHasGuard(crate bool);
@ -801,27 +807,27 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
/// candidates are sorted such that the first item in the list /// candidates are sorted such that the first item in the list
/// has the highest priority. When a candidate is found to match /// has the highest priority. When a candidate is found to match
/// the value, we will set and generate a branch to the appropriate /// the value, we will set and generate a branch to the appropriate
/// prebinding block. /// pre-binding block.
/// ///
/// If we find that *NONE* of the candidates apply, we branch to the /// If we find that *NONE* of the candidates apply, we branch to the
/// `otherwise_block`, setting it to `Some` if required. In principle, this /// `otherwise_block`, setting it to `Some` if required. In principle, this
/// means that the input list was not exhaustive, though at present we /// means that the input list was not exhaustive, though at present we
/// sometimes are not smart enough to recognize all exhaustive inputs. /// sometimes are not smart enough to recognize all exhaustive inputs.
/// ///
/// It might be surprising that the input can be inexhaustive. /// It might be surprising that the input can be non-exhaustive.
/// Indeed, initially, it is not, because all matches are /// Indeed, initially, it is not, because all matches are
/// exhaustive in Rust. But during processing we sometimes divide /// exhaustive in Rust. But during processing we sometimes divide
/// up the list of candidates and recurse with a non-exhaustive /// up the list of candidates and recurse with a non-exhaustive
/// list. This is important to keep the size of the generated code /// list. This is important to keep the size of the generated code
/// under control. See `test_candidates` for more details. /// under control. See [`Builder::test_candidates`] for more details.
/// ///
/// If `fake_borrows` is Some, then places which need fake borrows /// If `fake_borrows` is `Some`, then places which need fake borrows
/// will be added to it. /// will be added to it.
/// ///
/// For an example of a case where we set `otherwise_block`, even for an /// For an example of a case where we set `otherwise_block`, even for an
/// exhaustive match consider: /// exhaustive match, consider:
/// ///
/// ```rust /// ```
/// match x { /// match x {
/// (true, true) => (), /// (true, true) => (),
/// (_, false) => (), /// (_, false) => (),
@ -830,8 +836,8 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
/// ``` /// ```
/// ///
/// For this match, we check if `x.0` matches `true` (for the first /// For this match, we check if `x.0` matches `true` (for the first
/// arm). If that's false, we check `x.1`. If it's `true` we check if /// arm). If it doesn't match, we check `x.1`. If `x.1` is `true` we check
/// `x.0` matches `false` (for the third arm). In the (impossible at /// if `x.0` matches `false` (for the third arm). In the (impossible at
/// runtime) case when `x.0` is now `true`, we branch to /// runtime) case when `x.0` is now `true`, we branch to
/// `otherwise_block`. /// `otherwise_block`.
fn match_candidates<'pat>( fn match_candidates<'pat>(
@ -938,26 +944,31 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
); );
} }
/// Link up matched candidates. For example, if we have something like /// Link up matched candidates.
/// this: ///
/// For example, if we have something like this:
/// ///
/// ```rust /// ```rust
/// ... /// ...
/// Some(x) if cond => ... /// Some(x) if cond1 => ...
/// Some(x) => ... /// Some(x) => ...
/// Some(x) if cond => ... /// Some(x) if cond2 => ...
/// ... /// ...
/// ``` /// ```
/// ///
/// We generate real edges from: /// We generate real edges from:
/// * `start_block` to the `prebinding_block` of the first pattern,
/// * the otherwise block of the first pattern to the second pattern,
/// * the otherwise block of the third pattern to the a block with an
/// Unreachable terminator.
/// ///
/// As well as that we add fake edges from the otherwise blocks to the /// * `start_block` to the [pre-binding block] of the first pattern,
/// prebinding block of the next candidate in the original set of /// * the [otherwise block] of the first pattern to the second pattern,
/// * the [otherwise block] of the third pattern to a block with an
/// [`Unreachable` terminator](TerminatorKind::Unreachable).
///
/// In addition, we add fake edges from the otherwise blocks to the
/// pre-binding block of the next candidate in the original set of
/// candidates. /// candidates.
///
/// [pre-binding block]: Candidate::pre_binding_block
/// [otherwise block]: Candidate::otherwise_block
fn select_matched_candidates( fn select_matched_candidates(
&mut self, &mut self,
matched_candidates: &mut [&mut Candidate<'_, 'tcx>], matched_candidates: &mut [&mut Candidate<'_, 'tcx>],
@ -1044,7 +1055,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
/// forwards to [Builder::test_candidates]. /// forwards to [Builder::test_candidates].
/// ///
/// Given a pattern `(P | Q, R | S)` we (in principle) generate a CFG like /// Given a pattern `(P | Q, R | S)` we (in principle) generate a CFG like
/// so /// so:
/// ///
/// ```text /// ```text
/// [ start ] /// [ start ]
@ -1214,10 +1225,11 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
/// This is the most subtle part of the matching algorithm. At /// This is the most subtle part of the matching algorithm. At
/// this point, the input candidates have been fully simplified, /// this point, the input candidates have been fully simplified,
/// and so we know that all remaining match-pairs require some /// and so we know that all remaining match-pairs require some
/// sort of test. To decide what test to do, we take the highest /// sort of test. To decide what test to perform, we take the highest
/// priority candidate (last one in the list) and extract the /// priority candidate (the first one in the list, as of January 2021)
/// first match-pair from the list. From this we decide what kind /// and extract the first match-pair from the list. From this we decide
/// of test is needed using `test`, defined in the `test` module. /// what kind of test is needed using [`Builder::test`], defined in the
/// [`test` module](mod@test).
/// ///
/// *Note:* taking the first match pair is somewhat arbitrary, and /// *Note:* taking the first match pair is somewhat arbitrary, and
/// we might do better here by choosing more carefully what to /// we might do better here by choosing more carefully what to
@ -1225,20 +1237,23 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
/// ///
/// For example, consider the following possible match-pairs: /// For example, consider the following possible match-pairs:
/// ///
/// 1. `x @ Some(P)` -- we will do a `Switch` to decide what variant `x` has /// 1. `x @ Some(P)` -- we will do a [`Switch`] to decide what variant `x` has
/// 2. `x @ 22` -- we will do a `SwitchInt` /// 2. `x @ 22` -- we will do a [`SwitchInt`] to decide what value `x` has
/// 3. `x @ 3..5` -- we will do a range test /// 3. `x @ 3..5` -- we will do a [`Range`] test to decide what range `x` falls in
/// 4. etc. /// 4. etc.
/// ///
/// [`Switch`]: TestKind::Switch
/// [`SwitchInt`]: TestKind::SwitchInt
/// [`Range`]: TestKind::Range
///
/// Once we know what sort of test we are going to perform, this /// Once we know what sort of test we are going to perform, this
/// Tests may also help us with other candidates. So we walk over /// test may also help us winnow down our candidates. So we walk over
/// the candidates (from high to low priority) and check. This /// the candidates (from high to low priority) and check. This
/// gives us, for each outcome of the test, a transformed list of /// gives us, for each outcome of the test, a transformed list of
/// candidates. For example, if we are testing the current /// candidates. For example, if we are testing `x.0`'s variant,
/// variant of `x.0`, and we have a candidate `{x.0 @ Some(v), x.1 /// and we have a candidate `(x.0 @ Some(v), x.1 @ 22)`,
/// @ 22}`, then we would have a resulting candidate of `{(x.0 as /// then we would have a resulting candidate of `((x.0 as Some).0 @ v, x.1 @ 22)`.
/// Some).0 @ v, x.1 @ 22}`. Note that the first match-pair is now /// Note that the first match-pair is now simpler (and, in fact, irrefutable).
/// simpler (and, in fact, irrefutable).
/// ///
/// But there may also be candidates that the test just doesn't /// But there may also be candidates that the test just doesn't
/// apply to. The classical example involves wildcards: /// apply to. The classical example involves wildcards:
@ -1268,7 +1283,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
/// is trivially NP-complete: /// is trivially NP-complete:
/// ///
/// ```rust /// ```rust
/// match (var0, var1, var2, var3, ..) { /// match (var0, var1, var2, var3, ...) {
/// (true, _, _, false, true, ...) => false, /// (true, _, _, false, true, ...) => false,
/// (_, true, true, false, _, ...) => false, /// (_, true, true, false, _, ...) => false,
/// (false, _, false, false, _, ...) => false, /// (false, _, false, false, _, ...) => false,
@ -1283,7 +1298,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
/// ///
/// That kind of exponential worst-case might not occur in practice, but /// That kind of exponential worst-case might not occur in practice, but
/// our simplistic treatment of constants and guards would make it occur /// our simplistic treatment of constants and guards would make it occur
/// in very common situations - for example #29740: /// in very common situations - for example [#29740]:
/// ///
/// ```rust /// ```rust
/// match x { /// match x {
@ -1294,13 +1309,17 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
/// } /// }
/// ``` /// ```
/// ///
/// Here we first test the match-pair `x @ "foo"`, which is an `Eq` test. /// [#29740]: https://github.com/rust-lang/rust/issues/29740
///
/// Here we first test the match-pair `x @ "foo"`, which is an [`Eq` test].
///
/// [`Eq` test]: TestKind::Eq
/// ///
/// It might seem that we would end up with 2 disjoint candidate /// It might seem that we would end up with 2 disjoint candidate
/// sets, consisting of the first candidate or the other 3, but our /// sets, consisting of the first candidate or the other two, but our
/// algorithm doesn't reason about "foo" being distinct from the other /// algorithm doesn't reason about `"foo"` being distinct from the other
/// constants; it considers the latter arms to potentially match after /// constants; it considers the latter arms to potentially match after
/// both outcomes, which obviously leads to an exponential amount /// both outcomes, which obviously leads to an exponential number
/// of tests. /// of tests.
/// ///
/// To avoid these kinds of problems, our algorithm tries to ensure /// To avoid these kinds of problems, our algorithm tries to ensure
@ -1312,16 +1331,16 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
/// ///
/// After we perform our test, we branch into the appropriate candidate /// After we perform our test, we branch into the appropriate candidate
/// set and recurse with `match_candidates`. These sub-matches are /// set and recurse with `match_candidates`. These sub-matches are
/// obviously inexhaustive - as we discarded our otherwise set - so /// obviously non-exhaustive - as we discarded our otherwise set - so
/// we set their continuation to do `match_candidates` on the /// we set their continuation to do `match_candidates` on the
/// "unmatched" set (which is again inexhaustive). /// "unmatched" set (which is again non-exhaustive).
/// ///
/// If you apply this to the above test, you basically wind up /// If you apply this to the above test, you basically wind up
/// with an if-else-if chain, testing each candidate in turn, /// with an if-else-if chain, testing each candidate in turn,
/// which is precisely what we want. /// which is precisely what we want.
/// ///
/// In addition to avoiding exponential-time blowups, this algorithm /// In addition to avoiding exponential-time blowups, this algorithm
/// also has nice property that each guard and arm is only generated /// also has the nice property that each guard and arm is only generated
/// once. /// once.
fn test_candidates<'pat, 'b, 'c>( fn test_candidates<'pat, 'b, 'c>(
&mut self, &mut self,

View File

@ -23,7 +23,7 @@ use std::cmp::Ordering;
impl<'a, 'tcx> Builder<'a, 'tcx> { impl<'a, 'tcx> Builder<'a, 'tcx> {
/// Identifies what test is needed to decide if `match_pair` is applicable. /// Identifies what test is needed to decide if `match_pair` is applicable.
/// ///
/// It is a bug to call this with a simplifiable pattern. /// It is a bug to call this with a not-fully-simplified pattern.
pub(super) fn test<'pat>(&mut self, match_pair: &MatchPair<'pat, 'tcx>) -> Test<'tcx> { pub(super) fn test<'pat>(&mut self, match_pair: &MatchPair<'pat, 'tcx>) -> Test<'tcx> {
match *match_pair.pattern.kind { match *match_pair.pattern.kind {
PatKind::Variant { ref adt_def, substs: _, variant_index: _, subpatterns: _ } => Test { PatKind::Variant { ref adt_def, substs: _, variant_index: _, subpatterns: _ } => Test {

View File

@ -13,4 +13,5 @@ rustc_typeck = { path = "../rustc_typeck" }
rustc_session = { path = "../rustc_session" } rustc_session = { path = "../rustc_session" }
rustc_span = { path = "../rustc_span" } rustc_span = { path = "../rustc_span" }
rustc_data_structures = { path = "../rustc_data_structures" } rustc_data_structures = { path = "../rustc_data_structures" }
rustc_trait_selection = { path = "../rustc_trait_selection" }
tracing = "0.1" tracing = "0.1"

View File

@ -18,15 +18,17 @@ use rustc_hir::{AssocItemKind, HirIdSet, Node, PatKind};
use rustc_middle::bug; use rustc_middle::bug;
use rustc_middle::hir::map::Map; use rustc_middle::hir::map::Map;
use rustc_middle::middle::privacy::{AccessLevel, AccessLevels}; use rustc_middle::middle::privacy::{AccessLevel, AccessLevels};
use rustc_middle::mir::abstract_const::Node as ACNode;
use rustc_middle::span_bug; use rustc_middle::span_bug;
use rustc_middle::ty::fold::TypeVisitor; use rustc_middle::ty::fold::TypeVisitor;
use rustc_middle::ty::query::Providers; use rustc_middle::ty::query::Providers;
use rustc_middle::ty::subst::InternalSubsts; use rustc_middle::ty::subst::{InternalSubsts, Subst};
use rustc_middle::ty::{self, GenericParamDefKind, TraitRef, Ty, TyCtxt, TypeFoldable}; use rustc_middle::ty::{self, Const, GenericParamDefKind, TraitRef, Ty, TyCtxt, TypeFoldable};
use rustc_session::lint; use rustc_session::lint;
use rustc_span::hygiene::Transparency; use rustc_span::hygiene::Transparency;
use rustc_span::symbol::{kw, Ident}; use rustc_span::symbol::{kw, Ident};
use rustc_span::Span; use rustc_span::Span;
use rustc_trait_selection::traits::const_evaluatable::{self, AbstractConst};
use std::marker::PhantomData; use std::marker::PhantomData;
use std::ops::ControlFlow; use std::ops::ControlFlow;
@ -112,19 +114,35 @@ where
ty.visit_with(self) ty.visit_with(self)
} }
ty::PredicateKind::RegionOutlives(..) => ControlFlow::CONTINUE, ty::PredicateKind::RegionOutlives(..) => ControlFlow::CONTINUE,
ty::PredicateKind::ConstEvaluatable(..) ty::PredicateKind::ConstEvaluatable(defs, substs)
if self.def_id_visitor.tcx().features().const_evaluatable_checked => if self.def_id_visitor.tcx().features().const_evaluatable_checked =>
{ {
// FIXME(const_evaluatable_checked): If the constant used here depends on a let tcx = self.def_id_visitor.tcx();
// private function we may have to do something here... if let Ok(Some(ct)) = AbstractConst::new(tcx, defs, substs) {
// self.visit_abstract_const_expr(tcx, ct)?;
// For now, let's just pretend that everything is fine. }
ControlFlow::CONTINUE ControlFlow::CONTINUE
} }
_ => bug!("unexpected predicate: {:?}", predicate), _ => bug!("unexpected predicate: {:?}", predicate),
} }
} }
fn visit_abstract_const_expr(
&mut self,
tcx: TyCtxt<'tcx>,
ct: AbstractConst<'tcx>,
) -> ControlFlow<V::BreakTy> {
const_evaluatable::walk_abstract_const(tcx, ct, |node| match node {
ACNode::Leaf(leaf) => {
let leaf = leaf.subst(tcx, ct.substs);
self.visit_const(leaf)
}
ACNode::Binop(..) | ACNode::UnaryOp(..) | ACNode::FunctionCall(_, _) => {
ControlFlow::CONTINUE
}
})
}
fn visit_predicates( fn visit_predicates(
&mut self, &mut self,
predicates: ty::GenericPredicates<'tcx>, predicates: ty::GenericPredicates<'tcx>,
@ -241,6 +259,15 @@ where
ty.super_visit_with(self) ty.super_visit_with(self)
} }
} }
fn visit_const(&mut self, c: &'tcx Const<'tcx>) -> ControlFlow<Self::BreakTy> {
self.visit_ty(c.ty)?;
let tcx = self.def_id_visitor.tcx();
if let Ok(Some(ct)) = AbstractConst::from_const(tcx, c) {
self.visit_abstract_const_expr(tcx, ct)?;
}
ControlFlow::CONTINUE
}
} }
fn min(vis1: ty::Visibility, vis2: ty::Visibility, tcx: TyCtxt<'_>) -> ty::Visibility { fn min(vis1: ty::Visibility, vis2: ty::Visibility, tcx: TyCtxt<'_>) -> ty::Visibility {

View File

@ -6,7 +6,7 @@ pub mod auto_trait;
mod chalk_fulfill; mod chalk_fulfill;
pub mod codegen; pub mod codegen;
mod coherence; mod coherence;
mod const_evaluatable; pub mod const_evaluatable;
mod engine; mod engine;
pub mod error_reporting; pub mod error_reporting;
mod fulfill; mod fulfill;

View File

@ -117,7 +117,7 @@ impl<W: Write> BufWriter<W> {
/// "successfully written" (by returning nonzero success values from /// "successfully written" (by returning nonzero success values from
/// `write`), any 0-length writes from `inner` must be reported as i/o /// `write`), any 0-length writes from `inner` must be reported as i/o
/// errors from this method. /// errors from this method.
pub(super) fn flush_buf(&mut self) -> io::Result<()> { pub(in crate::io) fn flush_buf(&mut self) -> io::Result<()> {
/// Helper struct to ensure the buffer is updated after all the writes /// Helper struct to ensure the buffer is updated after all the writes
/// are complete. It tracks the number of written bytes and drains them /// are complete. It tracks the number of written bytes and drains them
/// all from the front of the buffer when dropped. /// all from the front of the buffer when dropped.
@ -243,6 +243,18 @@ impl<W: Write> BufWriter<W> {
&self.buf &self.buf
} }
/// Returns a mutable reference to the internal buffer.
///
/// This can be used to write data directly into the buffer without triggering writers
/// to the underlying writer.
///
/// That the buffer is a `Vec` is an implementation detail.
/// Callers should not modify the capacity as there currently is no public API to do so
/// and thus any capacity changes would be unexpected by the user.
pub(in crate::io) fn buffer_mut(&mut self) -> &mut Vec<u8> {
&mut self.buf
}
/// Returns the number of bytes the internal buffer can hold without flushing. /// Returns the number of bytes the internal buffer can hold without flushing.
/// ///
/// # Examples /// # Examples

View File

@ -1,4 +1,4 @@
use crate::io::{self, ErrorKind, Read, Write}; use super::{BufWriter, ErrorKind, Read, Result, Write, DEFAULT_BUF_SIZE};
use crate::mem::MaybeUninit; use crate::mem::MaybeUninit;
/// Copies the entire contents of a reader into a writer. /// Copies the entire contents of a reader into a writer.
@ -40,7 +40,7 @@ use crate::mem::MaybeUninit;
/// } /// }
/// ``` /// ```
#[stable(feature = "rust1", since = "1.0.0")] #[stable(feature = "rust1", since = "1.0.0")]
pub fn copy<R: ?Sized, W: ?Sized>(reader: &mut R, writer: &mut W) -> io::Result<u64> pub fn copy<R: ?Sized, W: ?Sized>(reader: &mut R, writer: &mut W) -> Result<u64>
where where
R: Read, R: Read,
W: Write, W: Write,
@ -54,14 +54,82 @@ where
} }
} }
/// The general read-write-loop implementation of /// The userspace read-write-loop implementation of `io::copy` that is used when
/// `io::copy` that is used when specializations are not available or not applicable. /// OS-specific specializations for copy offloading are not available or not applicable.
pub(crate) fn generic_copy<R: ?Sized, W: ?Sized>(reader: &mut R, writer: &mut W) -> io::Result<u64> pub(crate) fn generic_copy<R: ?Sized, W: ?Sized>(reader: &mut R, writer: &mut W) -> Result<u64>
where where
R: Read, R: Read,
W: Write, W: Write,
{ {
let mut buf = MaybeUninit::<[u8; super::DEFAULT_BUF_SIZE]>::uninit(); BufferedCopySpec::copy_to(reader, writer)
}
/// Specialization of the read-write loop that either uses a stack buffer
/// or reuses the internal buffer of a BufWriter
trait BufferedCopySpec: Write {
fn copy_to<R: Read + ?Sized>(reader: &mut R, writer: &mut Self) -> Result<u64>;
}
impl<W: Write + ?Sized> BufferedCopySpec for W {
default fn copy_to<R: Read + ?Sized>(reader: &mut R, writer: &mut Self) -> Result<u64> {
stack_buffer_copy(reader, writer)
}
}
impl<I: Write> BufferedCopySpec for BufWriter<I> {
fn copy_to<R: Read + ?Sized>(reader: &mut R, writer: &mut Self) -> Result<u64> {
if writer.capacity() < DEFAULT_BUF_SIZE {
return stack_buffer_copy(reader, writer);
}
// FIXME: #42788
//
// - This creates a (mut) reference to a slice of
// _uninitialized_ integers, which is **undefined behavior**
//
// - Only the standard library gets to soundly "ignore" this,
// based on its privileged knowledge of unstable rustc
// internals;
unsafe {
let spare_cap = writer.buffer_mut().spare_capacity_mut();
reader.initializer().initialize(MaybeUninit::slice_assume_init_mut(spare_cap));
}
let mut len = 0;
loop {
let buf = writer.buffer_mut();
let spare_cap = buf.spare_capacity_mut();
if spare_cap.len() >= DEFAULT_BUF_SIZE {
match reader.read(unsafe { MaybeUninit::slice_assume_init_mut(spare_cap) }) {
Ok(0) => return Ok(len), // EOF reached
Ok(bytes_read) => {
assert!(bytes_read <= spare_cap.len());
// Safety: The initializer contract guarantees that either it or `read`
// will have initialized these bytes. And we just checked that the number
// of bytes is within the buffer capacity.
unsafe { buf.set_len(buf.len() + bytes_read) };
len += bytes_read as u64;
// Read again if the buffer still has enough capacity, as BufWriter itself would do
// This will occur if the reader returns short reads
continue;
}
Err(ref e) if e.kind() == ErrorKind::Interrupted => continue,
Err(e) => return Err(e),
}
}
writer.flush_buf()?;
}
}
}
fn stack_buffer_copy<R: Read + ?Sized, W: Write + ?Sized>(
reader: &mut R,
writer: &mut W,
) -> Result<u64> {
let mut buf = MaybeUninit::<[u8; DEFAULT_BUF_SIZE]>::uninit();
// FIXME: #42788 // FIXME: #42788
// //
// - This creates a (mut) reference to a slice of // - This creates a (mut) reference to a slice of

View File

@ -1,5 +1,8 @@
use crate::cmp::{max, min};
use crate::io::prelude::*; use crate::io::prelude::*;
use crate::io::{copy, empty, repeat, sink, Empty, Repeat, SeekFrom, Sink}; use crate::io::{
copy, empty, repeat, sink, BufWriter, Empty, Repeat, Result, SeekFrom, Sink, DEFAULT_BUF_SIZE,
};
#[test] #[test]
fn copy_copies() { fn copy_copies() {
@ -11,6 +14,51 @@ fn copy_copies() {
assert_eq!(copy(&mut r as &mut dyn Read, &mut w as &mut dyn Write).unwrap(), 1 << 17); assert_eq!(copy(&mut r as &mut dyn Read, &mut w as &mut dyn Write).unwrap(), 1 << 17);
} }
struct ShortReader {
cap: usize,
read_size: usize,
observed_buffer: usize,
}
impl Read for ShortReader {
fn read(&mut self, buf: &mut [u8]) -> Result<usize> {
let bytes = min(self.cap, self.read_size);
self.cap -= bytes;
self.observed_buffer = max(self.observed_buffer, buf.len());
Ok(bytes)
}
}
struct WriteObserver {
observed_buffer: usize,
}
impl Write for WriteObserver {
fn write(&mut self, buf: &[u8]) -> Result<usize> {
self.observed_buffer = max(self.observed_buffer, buf.len());
Ok(buf.len())
}
fn flush(&mut self) -> Result<()> {
Ok(())
}
}
#[test]
fn copy_specializes_bufwriter() {
let cap = 117 * 1024;
let buf_sz = 16 * 1024;
let mut r = ShortReader { cap, observed_buffer: 0, read_size: 1337 };
let mut w = BufWriter::with_capacity(buf_sz, WriteObserver { observed_buffer: 0 });
assert_eq!(
copy(&mut r, &mut w).unwrap(),
cap as u64,
"expected the whole capacity to be copied"
);
assert_eq!(r.observed_buffer, buf_sz, "expected a large buffer to be provided to the reader");
assert!(w.get_mut().observed_buffer > DEFAULT_BUF_SIZE, "expected coalesced writes");
}
#[test] #[test]
fn sink_sinks() { fn sink_sinks() {
let mut s = sink(); let mut s = sink();

View File

@ -284,6 +284,7 @@
#![feature(maybe_uninit_extra)] #![feature(maybe_uninit_extra)]
#![feature(maybe_uninit_ref)] #![feature(maybe_uninit_ref)]
#![feature(maybe_uninit_slice)] #![feature(maybe_uninit_slice)]
#![feature(maybe_uninit_uninit_array)]
#![feature(min_specialization)] #![feature(min_specialization)]
#![feature(needs_panic_runtime)] #![feature(needs_panic_runtime)]
#![feature(negative_impls)] #![feature(negative_impls)]
@ -327,6 +328,7 @@
#![feature(unsafe_cell_raw_get)] #![feature(unsafe_cell_raw_get)]
#![feature(unwind_attributes)] #![feature(unwind_attributes)]
#![feature(vec_into_raw_parts)] #![feature(vec_into_raw_parts)]
#![feature(vec_spare_capacity)]
#![feature(wake_trait)] #![feature(wake_trait)]
// NB: the above list is sorted to minimize merge conflicts. // NB: the above list is sorted to minimize merge conflicts.
#![default_lib_allocator] #![default_lib_allocator]

View File

@ -74,9 +74,9 @@ macro_rules! compat_fn {
/// used, and would remove it. /// used, and would remove it.
#[used] #[used]
#[link_section = ".CRT$XCU"] #[link_section = ".CRT$XCU"]
static INIT_TABLE_ENTRY: fn() = init; static INIT_TABLE_ENTRY: unsafe extern "C" fn() = init;
fn init() { unsafe extern "C" fn init() {
// There is no locking here. This code is executed before main() is entered, and // There is no locking here. This code is executed before main() is entered, and
// is guaranteed to be single-threaded. // is guaranteed to be single-threaded.
// //
@ -84,16 +84,14 @@ macro_rules! compat_fn {
// any Rust functions or CRT functions, if those functions touch any global state, // any Rust functions or CRT functions, if those functions touch any global state,
// because this function runs during global initialization. For example, DO NOT // because this function runs during global initialization. For example, DO NOT
// do any dynamic allocation, don't call LoadLibrary, etc. // do any dynamic allocation, don't call LoadLibrary, etc.
unsafe { let module_name: *const u8 = concat!($module, "\0").as_ptr();
let module_name: *const u8 = concat!($module, "\0").as_ptr(); let symbol_name: *const u8 = concat!(stringify!($symbol), "\0").as_ptr();
let symbol_name: *const u8 = concat!(stringify!($symbol), "\0").as_ptr(); let module_handle = $crate::sys::c::GetModuleHandleA(module_name as *const i8);
let module_handle = $crate::sys::c::GetModuleHandleA(module_name as *const i8); if !module_handle.is_null() {
if !module_handle.is_null() { match $crate::sys::c::GetProcAddress(module_handle, symbol_name as *const i8) as usize {
match $crate::sys::c::GetProcAddress(module_handle, symbol_name as *const i8) as usize { 0 => {}
0 => {} n => {
n => { PTR = Some(mem::transmute::<usize, F>(n));
PTR = Some(mem::transmute::<usize, F>(n));
}
} }
} }
} }

View File

@ -1050,8 +1050,10 @@ impl Step for Assemble {
builder.copy(&lld_install.join("bin").join(&src_exe), &libdir_bin.join(&dst_exe)); builder.copy(&lld_install.join("bin").join(&src_exe), &libdir_bin.join(&dst_exe));
} }
// Similarly, copy `llvm-dwp` into libdir for Split DWARF. // Similarly, copy `llvm-dwp` into libdir for Split DWARF. Only copy it when the LLVM
{ // backend is used to avoid unnecessarily building LLVM and because LLVM is not checked
// out by default when the LLVM backend is not enabled.
if builder.config.rust_codegen_backends.contains(&INTERNER.intern_str("llvm")) {
let src_exe = exe("llvm-dwp", target_compiler.host); let src_exe = exe("llvm-dwp", target_compiler.host);
let dst_exe = exe("rust-llvm-dwp", target_compiler.host); let dst_exe = exe("rust-llvm-dwp", target_compiler.host);
let llvm_config_bin = builder.ensure(native::Llvm { target: target_compiler.host }); let llvm_config_bin = builder.ensure(native::Llvm { target: target_compiler.host });

View File

@ -1695,15 +1695,6 @@ function defocusSearchBar() {
search.innerHTML = output; search.innerHTML = output;
showSearchResults(search); showSearchResults(search);
var tds = search.getElementsByTagName("td");
var td_width = 0;
if (tds.length > 0) {
td_width = tds[0].offsetWidth;
}
var width = search.offsetWidth - 40 - td_width;
onEachLazy(search.getElementsByClassName("desc"), function(e) {
e.style.width = width + "px";
});
initSearchNav(); initSearchNav();
var elems = document.getElementById("titles").childNodes; var elems = document.getElementById("titles").childNodes;
elems[0].onclick = function() { printTab(0); }; elems[0].onclick = function() { printTab(0); };

View File

@ -1425,7 +1425,7 @@ h4 > .notable-traits {
margin-left: 0px; margin-left: 0px;
} }
#main { #main, #search {
margin-top: 45px; margin-top: 45px;
padding: 0; padding: 0;
} }
@ -1579,7 +1579,7 @@ h4 > .notable-traits {
} }
} }
@media (max-width: 416px) { @media (max-width: 464px) {
#titles, #titles > button { #titles, #titles > button {
height: 73px; height: 73px;
} }

View File

@ -0,0 +1,31 @@
#![crate_type = "lib"]
#![feature(const_generics, const_evaluatable_checked)]
#![allow(incomplete_features)]
pub struct Const<const U: u8>;
pub trait Trait {
type AssocTy;
fn assoc_fn() -> Self::AssocTy;
}
impl<const U: u8> Trait for Const<U>
//~^ WARN private type
//~| WARN this was previously
//~| WARN private type
//~| WARN this was previously
where
Const<{ my_const_fn(U) }>: ,
{
type AssocTy = Const<{ my_const_fn(U) }>;
//~^ ERROR private type
fn assoc_fn() -> Self::AssocTy {
Const
}
}
const fn my_const_fn(val: u8) -> u8 {
// body of this function doesn't matter
val
}

View File

@ -0,0 +1,43 @@
warning: private type `fn(u8) -> u8 {my_const_fn}` in public interface (error E0446)
--> $DIR/eval-privacy.rs:12:1
|
LL | / impl<const U: u8> Trait for Const<U>
LL | |
LL | |
LL | |
... |
LL | | }
LL | | }
| |_^
|
= note: `#[warn(private_in_public)]` on by default
= warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
= note: for more information, see issue #34537 <https://github.com/rust-lang/rust/issues/34537>
warning: private type `fn(u8) -> u8 {my_const_fn}` in public interface (error E0446)
--> $DIR/eval-privacy.rs:12:1
|
LL | / impl<const U: u8> Trait for Const<U>
LL | |
LL | |
LL | |
... |
LL | | }
LL | | }
| |_^
|
= warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
= note: for more information, see issue #34537 <https://github.com/rust-lang/rust/issues/34537>
error[E0446]: private type `fn(u8) -> u8 {my_const_fn}` in public interface
--> $DIR/eval-privacy.rs:21:5
|
LL | type AssocTy = Const<{ my_const_fn(U) }>;
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ can't leak private type
...
LL | const fn my_const_fn(val: u8) -> u8 {
| ----------------------------------- `fn(u8) -> u8 {my_const_fn}` declared as private
error: aborting due to previous error; 2 warnings emitted
For more information about this error, try `rustc --explain E0446`.

View File

@ -2,7 +2,7 @@ warning: type `𝕟𝕠𝕥𝕒𝕔𝕒𝕞𝕖𝕝` should have an upper camel
--> $DIR/special-upper-lower-cases.rs:11:8 --> $DIR/special-upper-lower-cases.rs:11:8
| |
LL | struct 𝕟𝕠𝕥𝕒𝕔𝕒𝕞𝕖𝕝; LL | struct 𝕟𝕠𝕥𝕒𝕔𝕒𝕞𝕖𝕝;
| ^^^^^^^^^ | ^^^^^^^^^ should have an UpperCamelCase name
| |
= note: `#[warn(non_camel_case_types)]` on by default = note: `#[warn(non_camel_case_types)]` on by default
@ -10,13 +10,13 @@ warning: type `𝕟𝕠𝕥_𝕒_𝕔𝕒𝕞𝕖𝕝` should have an upper came
--> $DIR/special-upper-lower-cases.rs:15:8 --> $DIR/special-upper-lower-cases.rs:15:8
| |
LL | struct 𝕟𝕠𝕥_𝕒_𝕔𝕒𝕞𝕖𝕝; LL | struct 𝕟𝕠𝕥_𝕒_𝕔𝕒𝕞𝕖𝕝;
| ^^^^^^^^^^^ help: convert the identifier to upper camel case: `𝕟𝕠𝕥𝕒𝕔𝕒𝕞𝕖𝕝` | ^^^^^^^^^^^ should have an UpperCamelCase name
warning: static variable `𝗻𝗼𝗻𝘂𝗽𝗽𝗲𝗿𝗰𝗮𝘀𝗲` should have an upper case name warning: static variable `𝗻𝗼𝗻𝘂𝗽𝗽𝗲𝗿𝗰𝗮𝘀𝗲` should have an upper case name
--> $DIR/special-upper-lower-cases.rs:18:8 --> $DIR/special-upper-lower-cases.rs:18:8
| |
LL | static 𝗻𝗼𝗻𝘂𝗽𝗽𝗲𝗿𝗰𝗮𝘀𝗲: i32 = 1; LL | static 𝗻𝗼𝗻𝘂𝗽𝗽𝗲𝗿𝗰𝗮𝘀𝗲: i32 = 1;
| ^^^^^^^^^^^^ | ^^^^^^^^^^^^ should have an UPPER_CASE name
| |
= note: `#[warn(non_upper_case_globals)]` on by default = note: `#[warn(non_upper_case_globals)]` on by default
@ -24,7 +24,7 @@ warning: variable `𝓢𝓝𝓐𝓐𝓐𝓐𝓚𝓔𝓢` should have a snake cas
--> $DIR/special-upper-lower-cases.rs:22:9 --> $DIR/special-upper-lower-cases.rs:22:9
| |
LL | let 𝓢𝓝𝓐𝓐𝓐𝓐𝓚𝓔𝓢 = 1; LL | let 𝓢𝓝𝓐𝓐𝓐𝓐𝓚𝓔𝓢 = 1;
| ^^^^^^^^^ | ^^^^^^^^^ should have a snake_case name
| |
= note: `#[warn(non_snake_case)]` on by default = note: `#[warn(non_snake_case)]` on by default

Some files were not shown because too many files have changed in this diff Show More