resolved conflict with upstream commit

This commit is contained in:
Brad Gibson 2018-05-06 07:56:53 -07:00
commit 6a78c0a10f
554 changed files with 10878 additions and 4869 deletions

View File

@ -346,6 +346,9 @@
# Whether to deny warnings in crates
#deny-warnings = true
# Print backtrace on internal compiler errors during bootstrap
#backtrace-on-ice = false
# =============================================================================
# Options for specific targets
#

134
src/Cargo.lock generated
View File

@ -390,10 +390,12 @@ dependencies = [
"env_logger 0.5.8 (registry+https://github.com/rust-lang/crates.io-index)",
"filetime 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)",
"getopts 0.2.17 (registry+https://github.com/rust-lang/crates.io-index)",
"lazy_static 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.40 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)",
"miow 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
"regex 0.2.10 (registry+https://github.com/rust-lang/crates.io-index)",
"rustfix 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.40 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_derive 1.0.40 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_json 1.0.15 (registry+https://github.com/rust-lang/crates.io-index)",
@ -996,9 +998,10 @@ dependencies = [
[[package]]
name = "languageserver-types"
version = "0.36.0"
version = "0.39.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"bitflags 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
"enum_primitive 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.40 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_derive 1.0.40 (registry+https://github.com/rust-lang/crates.io-index)",
@ -1325,16 +1328,16 @@ dependencies = [
[[package]]
name = "parking_lot"
version = "0.5.4"
version = "0.5.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"owning_ref 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
"parking_lot_core 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)",
"parking_lot_core 0.2.14 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "parking_lot_core"
version = "0.2.13"
version = "0.2.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"libc 0.2.40 (registry+https://github.com/rust-lang/crates.io-index)",
@ -1623,7 +1626,7 @@ dependencies = [
[[package]]
name = "rls"
version = "0.126.0"
version = "0.127.0"
dependencies = [
"cargo 0.28.0",
"cargo_metadata 0.5.4 (registry+https://github.com/rust-lang/crates.io-index)",
@ -1632,7 +1635,7 @@ dependencies = [
"failure 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
"json 0.11.13 (registry+https://github.com/rust-lang/crates.io-index)",
"jsonrpc-core 8.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
"languageserver-types 0.36.0 (registry+https://github.com/rust-lang/crates.io-index)",
"languageserver-types 0.39.0 (registry+https://github.com/rust-lang/crates.io-index)",
"lazy_static 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)",
"num_cpus 1.8.0 (registry+https://github.com/rust-lang/crates.io-index)",
@ -1644,7 +1647,7 @@ dependencies = [
"rls-rustc 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
"rls-span 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
"rls-vfs 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)",
"rustfmt-nightly 0.6.0",
"rustfmt-nightly 0.6.1",
"serde 1.0.40 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_derive 1.0.40 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_json 1.0.15 (registry+https://github.com/rust-lang/crates.io-index)",
@ -1728,7 +1731,6 @@ dependencies = [
"log 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)",
"proc_macro 0.0.0",
"rustc_apfloat 0.0.0",
"rustc_const_math 0.0.0",
"rustc_data_structures 0.0.0",
"rustc_errors 0.0.0",
"rustc_target 0.0.0",
@ -1740,70 +1742,84 @@ dependencies = [
[[package]]
name = "rustc-ap-rustc_cratesio_shim"
version = "110.0.0"
version = "113.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"bitflags 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)",
"term 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "rustc-ap-rustc_data_structures"
version = "110.0.0"
version = "113.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"cfg-if 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
"ena 0.9.2 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)",
"parking_lot 0.5.4 (registry+https://github.com/rust-lang/crates.io-index)",
"parking_lot_core 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc-ap-serialize 110.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
"parking_lot 0.5.5 (registry+https://github.com/rust-lang/crates.io-index)",
"parking_lot_core 0.2.14 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc-ap-rustc_cratesio_shim 113.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc-ap-serialize 113.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
"stable_deref_trait 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
"term 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "rustc-ap-rustc_errors"
version = "110.0.0"
version = "113.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"atty 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc-ap-rustc_data_structures 110.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc-ap-serialize 110.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc-ap-syntax_pos 110.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc-ap-rustc_data_structures 113.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc-ap-serialize 113.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc-ap-syntax_pos 113.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
"term 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
"termcolor 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"unicode-width 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "rustc-ap-serialize"
version = "110.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "rustc-ap-syntax"
version = "110.0.0"
name = "rustc-ap-rustc_target"
version = "113.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"bitflags 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc-ap-rustc_cratesio_shim 110.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc-ap-rustc_data_structures 110.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc-ap-rustc_errors 110.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc-ap-serialize 110.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc-ap-syntax_pos 110.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc-ap-rustc_cratesio_shim 113.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc-ap-serialize 113.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
"term 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "rustc-ap-serialize"
version = "113.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "rustc-ap-syntax"
version = "113.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"bitflags 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc-ap-rustc_data_structures 113.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc-ap-rustc_errors 113.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc-ap-rustc_target 113.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc-ap-serialize 113.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc-ap-syntax_pos 113.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
"scoped-tls 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
"term 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "rustc-ap-syntax_pos"
version = "110.0.0"
version = "113.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"rustc-ap-rustc_data_structures 110.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc-ap-serialize 110.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc-ap-rustc_data_structures 113.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc-ap-serialize 113.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
"scoped-tls 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
"term 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
"unicode-width 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
@ -1872,15 +1888,6 @@ dependencies = [
"syntax_pos 0.0.0",
]
[[package]]
name = "rustc_const_math"
version = "0.0.0"
dependencies = [
"rustc_apfloat 0.0.0",
"serialize 0.0.0",
"syntax 0.0.0",
]
[[package]]
name = "rustc_cratesio_shim"
version = "0.0.0"
@ -1896,8 +1903,8 @@ dependencies = [
"cfg-if 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
"ena 0.9.2 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)",
"parking_lot 0.5.4 (registry+https://github.com/rust-lang/crates.io-index)",
"parking_lot_core 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)",
"parking_lot 0.5.5 (registry+https://github.com/rust-lang/crates.io-index)",
"parking_lot_core 0.2.14 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc_cratesio_shim 0.0.0",
"serialize 0.0.0",
"stable_deref_trait 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
@ -2026,7 +2033,6 @@ dependencies = [
"log_settings 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc 0.0.0",
"rustc_apfloat 0.0.0",
"rustc_const_math 0.0.0",
"rustc_data_structures 0.0.0",
"rustc_errors 0.0.0",
"rustc_target 0.0.0",
@ -2053,7 +2059,6 @@ version = "0.0.0"
dependencies = [
"log 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc 0.0.0",
"rustc_const_math 0.0.0",
"rustc_data_structures 0.0.0",
"rustc_errors 0.0.0",
"rustc_mir 0.0.0",
@ -2154,7 +2159,6 @@ dependencies = [
"rustc-demangle 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc_allocator 0.0.0",
"rustc_apfloat 0.0.0",
"rustc_const_math 0.0.0",
"rustc_data_structures 0.0.0",
"rustc_errors 0.0.0",
"rustc_incremental 0.0.0",
@ -2205,7 +2209,6 @@ dependencies = [
"fmt_macros 0.0.0",
"log 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc 0.0.0",
"rustc_const_math 0.0.0",
"rustc_data_structures 0.0.0",
"rustc_errors 0.0.0",
"rustc_platform_intrinsics 0.0.0",
@ -2233,22 +2236,35 @@ dependencies = [
"rustdoc 0.0.0",
]
[[package]]
name = "rustfix"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"quick-error 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.40 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_derive 1.0.40 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_json 1.0.15 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "rustfmt-nightly"
version = "0.6.0"
version = "0.6.1"
dependencies = [
"assert_cli 0.5.4 (registry+https://github.com/rust-lang/crates.io-index)",
"cargo_metadata 0.5.4 (registry+https://github.com/rust-lang/crates.io-index)",
"derive-new 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)",
"diff 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)",
"env_logger 0.5.8 (registry+https://github.com/rust-lang/crates.io-index)",
"failure 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
"getopts 0.2.17 (registry+https://github.com/rust-lang/crates.io-index)",
"itertools 0.7.8 (registry+https://github.com/rust-lang/crates.io-index)",
"lazy_static 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.40 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)",
"regex 0.2.10 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc-ap-syntax 110.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc-ap-rustc_target 113.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc-ap-syntax 113.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.40 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_derive 1.0.40 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_json 1.0.15 (registry+https://github.com/rust-lang/crates.io-index)",
@ -3063,7 +3079,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
"checksum json 0.11.13 (registry+https://github.com/rust-lang/crates.io-index)" = "9ad0485404155f45cce53a40d4b2d6ac356418300daed05273d9e26f91c390be"
"checksum jsonrpc-core 8.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "ddf83704f4e79979a424d1082dd2c1e52683058056c9280efa19ac5f6bc9033c"
"checksum kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7507624b29483431c0ba2d82aece8ca6cdba9382bff4ddd0f7490560c056098d"
"checksum languageserver-types 0.36.0 (registry+https://github.com/rust-lang/crates.io-index)" = "174cdfb8bed13225bb419bec66ee1c970099c875688645f9c4a82e3af43ba69d"
"checksum languageserver-types 0.39.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ad4cdd5e52d71aca47050e5b25f03082609c63a1e76b7362ebdd010895b3f854"
"checksum lazy_static 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)" = "76f033c7ad61445c5b347c7382dd1237847eb1bce590fe50365dcb33d546be73"
"checksum lazy_static 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "c8f31047daa365f19be14b47c29df4f7c3b581832407daabe6ae77397619237d"
"checksum lazycell 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "a6f08839bc70ef4a3fe1d566d5350f519c5912ea86be0df1740a7d247c7fc0ef"
@ -3096,8 +3112,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
"checksum openssl-sys 0.9.28 (registry+https://github.com/rust-lang/crates.io-index)" = "0bbd90640b148b46305c1691eed6039b5c8509bed16991e3562a01eeb76902a3"
"checksum ordermap 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "a86ed3f5f244b372d6b1a00b72ef7f8876d0bc6a78a4c9985c53614041512063"
"checksum owning_ref 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "cdf84f41639e037b484f93433aa3897863b561ed65c6e59c7073d7c561710f37"
"checksum parking_lot 0.5.4 (registry+https://github.com/rust-lang/crates.io-index)" = "9fd9d732f2de194336fb02fe11f9eed13d9e76f13f4315b4d88a14ca411750cd"
"checksum parking_lot_core 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)" = "538ef00b7317875071d5e00f603f24d16f0b474c1a5fc0ccb8b454ca72eafa79"
"checksum parking_lot 0.5.5 (registry+https://github.com/rust-lang/crates.io-index)" = "d4d05f1349491390b1730afba60bb20d55761bef489a954546b58b4b34e1e2ac"
"checksum parking_lot_core 0.2.14 (registry+https://github.com/rust-lang/crates.io-index)" = "4db1a8ccf734a7bce794cc19b3df06ed87ab2f3907036b693c68f56b4d4537fa"
"checksum percent-encoding 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "31010dd2e1ac33d5b46a5b413495239882813e0369f8ed8a5e266f173602f831"
"checksum pest 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)" = "0fce5d8b5cc33983fc74f78ad552b5522ab41442c4ca91606e4236eb4b5ceefc"
"checksum pest_derive 1.0.7 (registry+https://github.com/rust-lang/crates.io-index)" = "ab94faafeb93f4c5e3ce81ca0e5a779529a602ad5d09ae6d21996bfb8b6a52bf"
@ -3134,14 +3150,16 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
"checksum rls-rustc 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "885f66b92757420572cbb02e033d4a9558c7413ca9b7ac206f28fd58ffdb44ea"
"checksum rls-span 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "5d7c7046dc6a92f2ae02ed302746db4382e75131b9ce20ce967259f6b5867a6a"
"checksum rls-vfs 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)" = "be231e1e559c315bc60ced5ad2cc2d7a9c208ed7d4e2c126500149836fda19bb"
"checksum rustc-ap-rustc_cratesio_shim 110.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "0256e318ad99c467d24bd7188f2d4a3028360621bb92d769b4b65fc44717d514"
"checksum rustc-ap-rustc_data_structures 110.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "83430df7f76ea85c1f70fe145041576eee8fd5d77053bf426df24b480918d185"
"checksum rustc-ap-rustc_errors 110.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "2b03f874277103039816f6467b1ff30a81b1d6a29d4de6efccefe4c488f6535a"
"checksum rustc-ap-serialize 110.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "a2e47cf949f06b0c7ab7566c2f69d49f28cb3ecf1bb8bf0bda48b1ba5b7945ae"
"checksum rustc-ap-syntax 110.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "625e6fb41fde299082cda3bceb08f81c9ba56b14a2ec737b4366f9c3c9be07d8"
"checksum rustc-ap-syntax_pos 110.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "474a23ef1a1245ae02c5fd6a1e9a0725ce6fd25ca2294703c03bddce041f867b"
"checksum rustc-ap-rustc_cratesio_shim 113.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "a01334797c5c4cf56cc40bb9636d7b4c4a076665b9b9b7f100fd666cf0a02ffc"
"checksum rustc-ap-rustc_data_structures 113.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "03d6f8f7da0de905f6ef80dc14dce3bbc372430622b6aeb421cf13190bc70e8a"
"checksum rustc-ap-rustc_errors 113.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3dfd6183804a685c48601651d8c8c7b0daa8f83b0b5e24edfbcb6a0337085127"
"checksum rustc-ap-rustc_target 113.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "5f223157f51bf0e0621bef099de862468892ee4c4b83056f48f63e1bc00ccb72"
"checksum rustc-ap-serialize 113.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "2104a55a87d65cba8a845656f1f19a35da52af403863cd2a4bd5876ba522d879"
"checksum rustc-ap-syntax 113.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b50671adb9b0a7c57a4690ac6a40cb614879f543b64aada42f55b66212492323"
"checksum rustc-ap-syntax_pos 113.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "55793c2a775230c42661194c48d44b35d4c8439d79ad8528e56651e854c48c63"
"checksum rustc-demangle 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)" = "11fb43a206a04116ffd7cfcf9bcb941f8eb6cc7ff667272246b0a1c74259a3cb"
"checksum rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)" = "dcf128d1287d2ea9d80910b5f1120d0b8eede3fbf1abe91c40d39ea7d51e6fda"
"checksum rustfix 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "165a212dd11124d7070892da20f71d82970ef1d1dd41cd804b70f39740a21c85"
"checksum same-file 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "d931a44fdaa43b8637009e7632a02adc4f2b2e0733c08caa4cf00e8da4a117a7"
"checksum same-file 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "cfb6eded0b06a0b512c8ddbcf04089138c9b4362c2f696f3c3d76039d68f3637"
"checksum schannel 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)" = "85fd9df495640643ad2d00443b3d78aae69802ad488debab4f1dd52fc1806ade"

View File

@ -40,14 +40,6 @@ members = [
"tools/rls/test_data/workspace_symbol",
]
# Curiously, compiletest will segfault if compiled with opt-level=3 on 64-bit
# MSVC when running the compile-fail test suite when a should-fail test panics.
# But hey if this is removed and it gets past the bots, sounds good to me.
[profile.release]
opt-level = 2
[profile.bench]
opt-level = 2
# These options are controlled from our rustc wrapper script, so turn them off
# here and have them controlled elsewhere.
[profile.dev]

View File

@ -64,6 +64,10 @@ The script accepts commands, flags, and arguments to determine what to do:
# execute tests in the standard library in stage0
./x.py test --stage 0 src/libstd
# execute tests in the core and standard library in stage0,
# without running doc tests (thus avoid depending on building the compiler)
./x.py test --stage 0 --no-doc src/libcore src/libstd
# execute all doc tests
./x.py test src/doc
```

View File

@ -107,6 +107,13 @@ fn main() {
env::join_paths(&dylib_path).unwrap());
let mut maybe_crate = None;
// Print backtrace in case of ICE
if env::var("RUSTC_BACKTRACE_ON_ICE").is_ok() && env::var("RUST_BACKTRACE").is_err() {
cmd.env("RUST_BACKTRACE", "1");
}
cmd.env("RUSTC_BREAK_ON_ICE", "1");
if let Some(target) = target {
// The stage0 compiler has a special sysroot distinct from what we
// actually downloaded, so we just always pass the `--sysroot` option.

View File

@ -25,7 +25,7 @@ use compile;
use install;
use dist;
use util::{exe, libdir, add_lib_path};
use {Build, Mode};
use {Build, Mode, DocTests};
use cache::{INTERNER, Interned, Cache};
use check;
use test;
@ -323,7 +323,7 @@ impl<'a> Builder<'a> {
test::Cargotest, test::Cargo, test::Rls, test::ErrorIndex, test::Distcheck,
test::RunMakeFullDeps,
test::Nomicon, test::Reference, test::RustdocBook, test::RustByExample,
test::TheBook, test::UnstableBook,
test::TheBook, test::UnstableBook, test::RustcBook,
test::Rustfmt, test::Miri, test::Clippy, test::RustdocJS, test::RustdocTheme,
// Run run-make last, since these won't pass without make on Windows
test::RunMake, test::RustdocUi),
@ -331,7 +331,7 @@ impl<'a> Builder<'a> {
Kind::Doc => describe!(doc::UnstableBook, doc::UnstableBookGen, doc::TheBook,
doc::Standalone, doc::Std, doc::Test, doc::WhitelistedRustc, doc::Rustc,
doc::ErrorIndex, doc::Nomicon, doc::Reference, doc::Rustdoc, doc::RustByExample,
doc::CargoBook),
doc::RustcBook, doc::CargoBook),
Kind::Dist => describe!(dist::Docs, dist::RustcDocs, dist::Mingw, dist::Rustc,
dist::DebuggerScripts, dist::Std, dist::Analysis, dist::Src,
dist::PlainSourceTarball, dist::Cargo, dist::Rls, dist::Rustfmt, dist::Extended,
@ -591,6 +591,8 @@ impl<'a> Builder<'a> {
format!("{} {}", env::var("RUSTFLAGS").unwrap_or_default(), extra_args));
}
let want_rustdoc = self.doc_tests != DocTests::No;
// Customize the compiler we're running. Specify the compiler to cargo
// as our shim and then pass it some various options used to configure
// how the actual compiler itself is called.
@ -607,7 +609,7 @@ impl<'a> Builder<'a> {
.env("RUSTC_LIBDIR", self.rustc_libdir(compiler))
.env("RUSTC_RPATH", self.config.rust_rpath.to_string())
.env("RUSTDOC", self.out.join("bootstrap/debug/rustdoc"))
.env("RUSTDOC_REAL", if cmd == "doc" || cmd == "test" {
.env("RUSTDOC_REAL", if cmd == "doc" || (cmd == "test" && want_rustdoc) {
self.rustdoc(compiler.host)
} else {
PathBuf::from("/path/to/nowhere/rustdoc/not/required")
@ -624,7 +626,7 @@ impl<'a> Builder<'a> {
if let Some(ref error_format) = self.config.rustc_error_format {
cargo.env("RUSTC_ERROR_FORMAT", error_format);
}
if cmd != "build" && cmd != "check" {
if cmd != "build" && cmd != "check" && want_rustdoc {
cargo.env("RUSTDOC_LIBDIR", self.rustc_libdir(self.compiler(2, self.config.build)));
}
@ -706,6 +708,10 @@ impl<'a> Builder<'a> {
cargo.env("RUSTC_PRINT_STEP_TIMINGS", "1");
}
if self.config.backtrace_on_ice {
cargo.env("RUSTC_BACKTRACE_ON_ICE", "1");
}
cargo.env("RUSTC_VERBOSE", format!("{}", self.verbosity));
// in std, we want to avoid denying warnings for stage 0 as that makes cfg's painful.
@ -1403,4 +1409,39 @@ mod __test {
},
]);
}
#[test]
fn test_with_no_doc_stage0() {
let mut config = configure(&[], &[]);
config.stage = Some(0);
config.cmd = Subcommand::Test {
paths: vec!["src/libstd".into()],
test_args: vec![],
rustc_args: vec![],
fail_fast: true,
doc_tests: DocTests::No,
};
let build = Build::new(config);
let mut builder = Builder::new(&build);
let host = INTERNER.intern_str("A");
builder.run_step_descriptions(
&[StepDescription::from::<test::Crate>()],
&["src/libstd".into()],
);
// Ensure we don't build any compiler artifacts.
assert!(builder.cache.all::<compile::Rustc>().is_empty());
assert_eq!(first(builder.cache.all::<test::Crate>()), &[
test::Crate {
compiler: Compiler { host, stage: 0 },
target: host,
mode: Mode::Libstd,
test_kind: test::TestKind::Test,
krate: INTERNER.intern_str("std"),
},
]);
}
}

View File

@ -72,6 +72,7 @@ pub struct Config {
pub dry_run: bool,
pub deny_warnings: bool,
pub backtrace_on_ice: bool,
// llvm codegen options
pub llvm_enabled: bool,
@ -306,6 +307,7 @@ struct Rust {
wasm_syscall: Option<bool>,
lld: Option<bool>,
deny_warnings: Option<bool>,
backtrace_on_ice: Option<bool>,
}
/// TOML representation of how each build target is configured.
@ -325,6 +327,14 @@ struct TomlTarget {
}
impl Config {
fn path_from_python(var_key: &str) -> PathBuf {
match env::var_os(var_key) {
// Do not trust paths from Python and normalize them slightly (#49785).
Some(var_val) => Path::new(&var_val).components().collect(),
_ => panic!("expected '{}' to be set", var_key),
}
}
pub fn default_opts() -> Config {
let mut config = Config::default();
config.llvm_enabled = true;
@ -348,9 +358,9 @@ impl Config {
config.deny_warnings = true;
// set by bootstrap.py
config.src = env::var_os("SRC").map(PathBuf::from).expect("'SRC' to be set");
config.build = INTERNER.intern_str(&env::var("BUILD").expect("'BUILD' to be set"));
config.out = env::var_os("BUILD_DIR").map(PathBuf::from).expect("'BUILD_DIR' set");
config.src = Config::path_from_python("SRC");
config.out = Config::path_from_python("BUILD_DIR");
let stage0_root = config.out.join(&config.build).join("stage0/bin");
config.initial_rustc = stage0_root.join(exe("rustc", &config.build));
@ -523,6 +533,7 @@ impl Config {
config.musl_root = rust.musl_root.clone().map(PathBuf::from);
config.save_toolstates = rust.save_toolstates.clone().map(PathBuf::from);
set(&mut config.deny_warnings, rust.deny_warnings.or(flags.warnings));
set(&mut config.backtrace_on_ice, rust.backtrace_on_ice);
if let Some(ref backends) = rust.codegen_backends {
config.rust_codegen_backends = backends.iter()

View File

@ -120,6 +120,8 @@ v("musl-root-arm", "target.arm-unknown-linux-musleabi.musl-root",
"arm-unknown-linux-musleabi install directory")
v("musl-root-armhf", "target.arm-unknown-linux-musleabihf.musl-root",
"arm-unknown-linux-musleabihf install directory")
v("musl-root-armv5te", "target.armv5te-unknown-linux-musleabi.musl-root",
"armv5te-unknown-linux-musleabi install directory")
v("musl-root-armv7", "target.armv7-unknown-linux-musleabihf.musl-root",
"armv7-unknown-linux-musleabihf install directory")
v("musl-root-aarch64", "target.aarch64-unknown-linux-musl.musl-root",

View File

@ -71,6 +71,7 @@ book!(
Nomicon, "src/doc/nomicon", "nomicon";
Reference, "src/doc/reference", "reference";
Rustdoc, "src/doc/rustdoc", "rustdoc";
RustcBook, "src/doc/rustc", "rustc";
RustByExample, "src/doc/rust-by-example", "rust-by-example";
);

View File

@ -19,7 +19,7 @@ use std::process;
use getopts::Options;
use Build;
use {Build, DocTests};
use config::Config;
use metadata;
use builder::Builder;
@ -62,7 +62,7 @@ pub enum Subcommand {
test_args: Vec<String>,
rustc_args: Vec<String>,
fail_fast: bool,
doc_tests: bool,
doc_tests: DocTests,
},
Bench {
paths: Vec<PathBuf>,
@ -171,7 +171,8 @@ To learn more about a subcommand, run `./x.py <subcommand> -h`");
"extra options to pass the compiler when running tests",
"ARGS",
);
opts.optflag("", "doc", "run doc tests");
opts.optflag("", "no-doc", "do not run doc tests");
opts.optflag("", "doc", "only run doc tests");
},
"bench" => { opts.optmulti("", "test-args", "extra arguments", "ARGS"); },
"clean" => { opts.optflag("", "all", "clean all build artifacts"); },
@ -324,7 +325,13 @@ Arguments:
test_args: matches.opt_strs("test-args"),
rustc_args: matches.opt_strs("rustc-args"),
fail_fast: !matches.opt_present("no-fail-fast"),
doc_tests: matches.opt_present("doc"),
doc_tests: if matches.opt_present("doc") {
DocTests::Only
} else if matches.opt_present("no-doc") {
DocTests::No
} else {
DocTests::Yes
}
}
}
"bench" => {
@ -411,10 +418,10 @@ impl Subcommand {
}
}
pub fn doc_tests(&self) -> bool {
pub fn doc_tests(&self) -> DocTests {
match *self {
Subcommand::Test { doc_tests, .. } => doc_tests,
_ => false,
_ => DocTests::Yes,
}
}
}

View File

@ -122,12 +122,10 @@ struct JOBOBJECT_BASIC_LIMIT_INFORMATION {
}
pub unsafe fn setup(build: &mut Build) {
// Tell Windows to not show any UI on errors (such as not finding a required dll
// during startup or terminating abnormally). This is important for running tests,
// since some of them use abnormal termination by design.
// This mode is inherited by all child processes.
let mode = SetErrorMode(SEM_NOGPFAULTERRORBOX); // read inherited flags
SetErrorMode(mode | SEM_FAILCRITICALERRORS | SEM_NOGPFAULTERRORBOX);
// Enable the Windows Error Reporting dialog which msys disables,
// so we can JIT debug rustc
let mode = SetErrorMode(0);
SetErrorMode(mode & !SEM_NOGPFAULTERRORBOX);
// Create a new job object for us to use
let job = CreateJobObjectW(0 as *mut _, 0 as *const _);

View File

@ -210,6 +210,16 @@ pub struct Compiler {
host: Interned<String>,
}
#[derive(PartialEq, Eq, Copy, Clone, Debug)]
pub enum DocTests {
// Default, run normal tests and doc tests.
Yes,
// Do not run any doc tests.
No,
// Only run doc tests.
Only,
}
/// Global configuration for the build system.
///
/// This structure transitively contains all configuration for the build system.
@ -233,7 +243,7 @@ pub struct Build {
rustfmt_info: channel::GitInfo,
local_rebuild: bool,
fail_fast: bool,
doc_tests: bool,
doc_tests: DocTests,
verbosity: usize,
// Targets for which to build.
@ -294,7 +304,7 @@ impl Crate {
///
/// These entries currently correspond to the various output directories of the
/// build system, with each mod generating output in a different directory.
#[derive(Debug, Hash, Clone, Copy, PartialEq, Eq)]
#[derive(Debug, Hash, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
pub enum Mode {
/// Build the standard library, placing output in the "stageN-std" directory.
Libstd,

View File

@ -32,13 +32,13 @@ use dist;
use native;
use tool::{self, Tool};
use util::{self, dylib_path, dylib_path_var};
use Mode;
use {Mode, DocTests};
use toolstate::ToolState;
const ADB_TEST_DIR: &str = "/data/tmp/work";
/// The two modes of the test runner; tests or benchmarks.
#[derive(Debug, PartialEq, Eq, Hash, Copy, Clone)]
#[derive(Debug, PartialEq, Eq, Hash, Copy, Clone, PartialOrd, Ord)]
pub enum TestKind {
/// Run `cargo test`
Test,
@ -1212,6 +1212,7 @@ test_book!(
Nomicon, "src/doc/nomicon", "nomicon", default=false;
Reference, "src/doc/reference", "reference", default=false;
RustdocBook, "src/doc/rustdoc", "rustdoc", default=true;
RustcBook, "src/doc/rustc", "rustc", default=true;
RustByExample, "src/doc/rust-by-example", "rust-by-example", default=false;
TheBook, "src/doc/book", "book", default=false;
UnstableBook, "src/doc/unstable-book", "unstable-book", default=true;
@ -1406,13 +1407,13 @@ impl Step for CrateNotDefault {
}
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
#[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct Crate {
compiler: Compiler,
target: Interned<String>,
mode: Mode,
test_kind: TestKind,
krate: Interned<String>,
pub compiler: Compiler,
pub target: Interned<String>,
pub mode: Mode,
pub test_kind: TestKind,
pub krate: Interned<String>,
}
impl Step for Crate {
@ -1518,8 +1519,14 @@ impl Step for Crate {
if test_kind.subcommand() == "test" && !builder.fail_fast {
cargo.arg("--no-fail-fast");
}
if builder.doc_tests {
cargo.arg("--doc");
match builder.doc_tests {
DocTests::Only => {
cargo.arg("--doc");
}
DocTests::No => {
cargo.args(&["--lib", "--bins", "--examples", "--tests", "--benches"]);
}
DocTests::Yes => {}
}
cargo.arg("-p").arg(krate);

View File

@ -43,6 +43,10 @@ ENV STAGING_DIR=/tmp
COPY scripts/musl.sh /build
RUN env \
CC=arm-linux-gnueabi-gcc CFLAGS="-march=armv5te -marm -mfloat-abi=soft" \
CXX=arm-linux-gnueabi-g++ CXXFLAGS="-march=armv5te -marm -mfloat-abi=soft" \
bash musl.sh armv5te && \
env \
CC=arm-linux-gnueabi-gcc CFLAGS="-march=armv6 -marm" \
CXX=arm-linux-gnueabi-g++ CXXFLAGS="-march=armv6 -marm" \
bash musl.sh arm && \
@ -84,6 +88,7 @@ ENV TARGETS=$TARGETS,mipsel-unknown-linux-musl
ENV TARGETS=$TARGETS,arm-unknown-linux-musleabi
ENV TARGETS=$TARGETS,arm-unknown-linux-musleabihf
ENV TARGETS=$TARGETS,armv5te-unknown-linux-gnueabi
ENV TARGETS=$TARGETS,armv5te-unknown-linux-musleabi
ENV TARGETS=$TARGETS,armv7-unknown-linux-musleabihf
ENV TARGETS=$TARGETS,aarch64-unknown-linux-musl
ENV TARGETS=$TARGETS,sparc64-unknown-linux-gnu
@ -100,9 +105,12 @@ ENV CC_mipsel_unknown_linux_musl=mipsel-openwrt-linux-gcc \
CC_sparc64_unknown_linux_gnu=sparc64-linux-gnu-gcc \
CC_x86_64_unknown_redox=x86_64-unknown-redox-gcc \
CC_armv5te_unknown_linux_gnueabi=arm-linux-gnueabi-gcc \
CFLAGS_armv5te_unknown_linux_gnueabi="-march=armv5te -marm -mfloat-abi=soft"
CFLAGS_armv5te_unknown_linux_gnueabi="-march=armv5te -marm -mfloat-abi=soft" \
CC_armv5te_unknown_linux_musleabi=arm-linux-gnueabi-gcc \
CFLAGS_armv5te_unknown_linux_musleabi="-march=armv5te -marm -mfloat-abi=soft"
ENV RUST_CONFIGURE_ARGS \
--musl-root-armv5te=/musl-armv5te \
--musl-root-arm=/musl-arm \
--musl-root-armhf=/musl-armhf \
--musl-root-armv7=/musl-armv7 \

View File

@ -19,4 +19,5 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
COPY scripts/sccache.sh /scripts/
RUN sh /scripts/sccache.sh
ENV RUN_CHECK_WITH_PARALLEL_QUERIES 1
ENV SCRIPT python2.7 ../x.py check --target=i686-pc-windows-gnu --host=i686-pc-windows-gnu

View File

@ -16,7 +16,7 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
COPY scripts/sccache.sh /scripts/
RUN sh /scripts/sccache.sh
ENV PARALLEL_CHECK 1
ENV RUN_CHECK_WITH_PARALLEL_QUERIES 1
ENV RUST_CONFIGURE_ARGS \
--build=x86_64-unknown-linux-gnu \
--enable-debug \

View File

@ -78,9 +78,9 @@ fi
# sccache server at the start of the build, but no need to worry if this fails.
SCCACHE_IDLE_TIMEOUT=10800 sccache --start-server || true
if [ "$PARALLEL_CHECK" != "" ]; then
if [ "$RUN_CHECK_WITH_PARALLEL_QUERIES" != "" ]; then
$SRC/configure --enable-experimental-parallel-queries
python2.7 ../x.py check
CARGO_INCREMENTAL=0 python2.7 ../x.py check
rm -f config.toml
rm -rf build
fi

@ -1 +1 @@
Subproject commit b889e1e30c5e9953834aa9fa6c982bb28df46ac9
Subproject commit f51127530d46b9acbf4747c859da185e771cfcf3

View File

@ -43,6 +43,10 @@ Rust's standard library has [extensive API documentation](std/index.html),
with explanations of how to use various things, as well as example code for
accomplishing various tasks.
## The Rustc Book
[The Rustc Book](rustc/index.html) describes the Rust compiler, `rustc`.
## The Cargo Book
[The Cargo Book](cargo/index.html) is a guide to Cargo, Rust's build tool and dependency manager.

@ -1 +1 @@
Subproject commit 3c56329d1bd9038e5341f1962bcd8d043312a712
Subproject commit 748a5e6742db4a21c4c630a58087f818828e8a0a

@ -1 +1 @@
Subproject commit 76296346e97c3702974d3398fdb94af9e10111a2
Subproject commit 134f419ee62714590b04712fe6072253bc2a7822

@ -1 +1 @@
Subproject commit d5ec87eabe5733cc2348c7dada89fc67c086f391
Subproject commit eebda16e4b45f2eed4310cf7b9872cc752278163

1
src/doc/rustc/.gitignore vendored Normal file
View File

@ -0,0 +1 @@
book

5
src/doc/rustc/book.toml Normal file
View File

@ -0,0 +1,5 @@
[book]
authors = ["The Rust Project Developers"]
multilingual = false
src = "src"
title = "The rustc book"

View File

@ -0,0 +1,16 @@
# The Rustc Book
- [What is rustc?](what-is-rustc.md)
- [Command-line arguments](command-line-arguments.md)
- [Lints](lints/index.md)
- [Lint levels](lints/levels.md)
- [Lint Groups](lints/groups.md)
- [Lint listing](lints/listing/index.md)
- [Allowed-by-default lints](lints/listing/allowed-by-default.md)
- [Warn-by-default lints](lints/listing/warn-by-default.md)
- [Deny-by-default lints](lints/listing/deny-by-default.md)
- [Codegen options](codegen-options/index.md)
- [Targets](targets/index.md)
- [Built-in Targets](targets/built-in.md)
- [Custom Targets](targets/custom.md)
- [Contributing to `rustc`](contributing.md)

View File

@ -0,0 +1,209 @@
# Codegen options
All of these options are passed to `rustc` via the `-C` flag, short for "codegen." You can see
a version of this list for your exact compiler by running `rustc -C help`.
## ar
This option is deprecated and does nothing.
## linker
This flag lets you control which linker `rustc` invokes to link your code.
## link-arg=val
This flag lets you append a single extra argument to the linker invocation.
"Append" is significant; you can pass this flag multiple times to add multiple arguments.
## link-args
This flag lets you append multiple extra arguments to the linker invocation. The
options should be separated by spaces.
## link-dead-code
Normally, the linker will remove dead code. This flag disables this behavior.
An example of when this flag might be useful is when trying to construct code coverage
metrics.
## lto
This flag instructs LLVM to use [link time
optimizations](https://llvm.org/docs/LinkTimeOptimization.html).
It takes one of two values, `thin` and `fat`. 'thin' LTO [is a new feature of
LLVM](http://blog.llvm.org/2016/06/thinlto-scalable-and-incremental-lto.html),
'fat' referring to the classic version of LTO.
## target-cpu
This instructs `rustc` to generate code specifically for a particular processor.
You can run `rustc --print target-cpus` to see the valid options to pass
here. Additionally, `native` can be passed to use the processor of the host
machine.
## target-feature
Individual targets will support different features; this flag lets you control
enabling or disabling a feature.
To see the valid options and an example of use, run `rustc --print
target-features`.
## passes
This flag can be used to add extra LLVM passes to the compilation.
The list must be separated by spaces.
## llvm-args
This flag can be used to pass a list of arguments directly to LLVM.
The list must be separated by spaces.
## save-temps
`rustc` will generate temporary files during compilation; normally it will
delete them after it's done with its work. This option will cause them to be
preserved instead of removed.
## rpath
This option allows you to set the value of
[`rpath`](https://en.wikipedia.org/wiki/Rpath).
## overflow-checks
This flag allows you to control the behavior of integer overflow. This flag
can be passed many options:
* To turn overflow checks on: `y`, `yes`, or `on`.
* To turn overflow checks off: `n`, `no`, or `off`.
## no-prepopulate-passes
The pass manager comes pre-populated with a list of passes; this flag
ensures that list is empty.
## no-vectorize-loops
By default, `rustc` will attempt to [vectorize
loops](https://llvm.org/docs/Vectorizers.html#the-loop-vectorizer). This
flag will turn that behavior off.
## no-vectorize-slp
By default, `rustc` will attempt to vectorize loops using [superword-level
parallelism](https://llvm.org/docs/Vectorizers.html#the-slp-vectorizer). This
flag will turn that behavior off.
## soft-float
This option will make `rustc` generate code using "soft floats." By default,
a lot of hardware supports floating point instructions, and so the code generated
will take advantage of this. "soft floats" emulate floating point instructions
in software.
## prefer-dynamic
By default, `rustc` prefers to statically link dependencies. This option will
make it use dynamic linking instead.
## no-integrated-as
LLVM comes with an internal assembler; this option will let you use an
external assembler instead.
## no-redzone
This flag allows you to disable [the
red zone](https://en.wikipedia.org/wiki/Red_zone_\(computing\)). This flag can
be passed many options:
* To enable the red zone: `y`, `yes`, or `on`.
* To disable it: `n`, `no`, or `off`.
## relocation-model
This option lets you choose which relocation model to use.
To find the valid options for this flag, run `rustc --print relocation-models`.
## code-model=val
This option lets you choose which code model to use.
To find the valid options for this flag, run `rustc --print code-models`.
## metadata
This option allows you to control the metadata used for symbol mangling.
## extra-filename
This option allows you to put extra data in each output filename.
## codegen-units
This flag lets you control how many threads are used when doing
code generation.
Increasing paralellism may speed up compile times, but may also
produce slower code.
## remark
This flag lets you print remarks for these optimization passes.
The list of passes should be separated by spaces.
`all` will remark on every pass.
## no-stack-check
This option is deprecated and does nothing.
## debuginfo
This flag lets you control debug information:
* `0`: no debug info at all
* `1`: line tables only
* `2`: full debug info
## opt-level
This flag lets you control the optimization level.
* `0`: no optimizations
* `1`: basic optimizations
* `2`: some optimizations
* `3`: all optimizations
* `s`: optimize for binary size
* `z`: optimize for binary size, but also turn off loop vectorization.
## debug-assertions
This flag lets you turn `cfg(debug_assertions)` on or off.
## inline-threshold
This option lets you set the threshold for inlining a function.
The default is 225.
## panic
This option lets you control what happens when the code panics.
* `abort`: terminate the process upon panic
* `unwind`: unwind the stack upon panic
## incremental
This flag allows you to enable incremental compilation.

View File

@ -0,0 +1,116 @@
# Command-line arguments
Here's a list of command-line arguments to `rustc` and what they do.
## `-h`/`--help`: get help
This flag will print out help information for `rustc`.
## `--cfg`: configure the compilation environment
This flag can turn on or off various `#[cfg]` settings.
## `-L`: add a directory to the library search path
When looking for external crates, a directory passed to this flag will be searched.
## `-l`: link the generated crate to a native library
This flag allows you to specify linking to a specific native library when building
a crate.
## `--crate-type`: a list of types of crates for the compiler to emit
This instructs `rustc` on which crate type to build.
## `--crate-name`: specify the name of the crate being built
This informs `rustc` of the name of your crate.
## `--emit`: emit output other than a crate
Instead of producing a crate, this flag can print out things like the assembly or LLVM-IR.
## `--print`: print compiler information
This flag prints out various information about the compiler.
## `-g`: include debug information
A synonym for `-C debug-level=2`.
## `-O`: optimize your code
A synonym for `-C opt-level=2`.
## `-o`: filename of the output
This flag controls the output filename.
## `--out-dir`: directory to write the output in
The outputted crate will be written to this directory.
## `--explain`: provide a detailed explanation of an error message
Each error of `rustc`'s comes with an error code; this will print
out a longer explanation of a given error.
## `--test`: build a test harness
When compiling this crate, `rustc` will ignore your `main` function
and instead produce a test harness.
## `--target`: select a target triple to build
This controls which [target](targets/index.html) to produce.
## `-W`: set lint warnings
This flag will set which lints should be set to the [warn level](lints/levels.html#warn).
## `-A`: set lint allowed
This flag will set which lints should be set to the [allow level](lints/levels.html#allow).
## `-D`: set lint denied
This flag will set which lints should be set to the [deny level](lints/levels.html#deny).
## `-F`: set lint forbidden
This flag will set which lints should be set to the [forbid level](lints/levels.html#forbid).
## `--cap-lints`: set the most restrictive lint level
This flag lets you 'cap' lints, for more, [see here](lints/levels.html#capping-lints).
## `-C`/`--codegen`: code generation options
This flag will allow you to set [codegen options](codegen-options/index.html).
## `-V`/`--version`: print a version
This flag will print out `rustc`'s version.
## `-v`/`--verbose`: use verbose output
This flag, when combined with other flags, makes them produce extra output.
## `--extern`: specify where an external library is located
This flag allows you to pass the name and location of an external crate that will
be linked into the crate you're buildling.
## `--sysroot`: Override the system root
The "sysroot" is where `rustc` looks for the crates that come with the Rust
distribution; this flag allows that to be overridden.
## `--error-format`: control how errors are produced
This flag lets you control the format of errors.
## `--color`: configure coloring of output
This flag lets you control color settings of the output.

View File

@ -0,0 +1,6 @@
# Contributing to rustc
We'd love to have your help improving `rustc`! To that end, we've written [a
whole book](https://rust-lang-nursery.github.io/rustc-guide/) on its
internals, how it works, and how to get started working on it. To learn
more, you'll want to check that out.

View File

@ -0,0 +1,29 @@
# Lint Groups
`rustc` has the concept of a "lint group", where you can toggle several warnings
through one name.
For example, the `nonstandard-style` lint sets `non-camel-case-types`,
`non-snake-case`, and `non-upper-case-globals` all at once. So these are
equivalent:
```bash
$ rustc -D nonstandard-style
$ rustc -D non-camel-case-types -D non-snake-case -D non-upper-case-globals
```
Here's a list of each lint group, and the lints that they are made up of:
| group | description | lints |
|---------------------|---------------------------------------------------------------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
| nonstandard-style | Violation of standard naming conventions | non-camel-case-types, non-snake-case, non-upper-case-globals |
| warnings | all lints that would be issuing warnings | all lints that would be issuing warnings |
| edition-2018 | Lints that will be turned into errors in Rust 2018 | tyvar-behind-raw-pointer |
| rust-2018-idioms | Lints to nudge you toward idiomatic features of Rust 2018 | bare-trait-object, unreachable-pub |
| unused | These lints detect things being declared but not used | unused-imports, unused-variables, unused-assignments, dead-code, unused-mut, unreachable-code, unreachable-patterns, unused-must-use, unused-unsafe, path-statements, unused-attributes, unused-macros, unused-allocation, unused-doc-comment, unused-extern-crates, unused-features, unused-parens |
| future-incompatible | Lints that detect code that has future-compatibility problems | private-in-public, pub-use-of-private-extern-crate, patterns-in-fns-without-body, safe-extern-statics, invalid-type-param-default, legacy-directory-ownership, legacy-imports, legacy-constructor-visibility, missing-fragment-specifier, illegal-floating-point-literal-pattern, anonymous-parameters, parenthesized-params-in-types-and-modules, late-bound-lifetime-arguments, safe-packed-borrows, incoherent-fundamental-impls, tyvar-behind-raw-pointer, unstable-name-collision |
Additionally, there's a `bad-style` lint group that's a deprecated alias for `nonstandard-style`.
Finally, you can also see the table above by invoking `rustc -W help`. This will give you the exact values for the specific
compiler you have installed.

View File

@ -0,0 +1,28 @@
# Lints
In software, a "lint" is a tool used to help improve your source code. The
Rust compiler contains a number of lints, and when it compiles your code, it will
also run the lints. These lints may produce a warning, an error, or nothing at all,
depending on how you've configured things.
Here's a small example:
```bash
$ cat main.rs
fn main() {
let x = 5;
}
> rustc main.rs
warning: unused variable: `x`
--> main.rs:2:9
|
2 | let x = 5;
| ^
|
= note: #[warn(unused_variables)] on by default
= note: to avoid this warning, consider using `_x` instead
```
This is the `unused_variables` lint, and it tells you that you've introduced
a variable that you don't use in your code. That's not *wrong*, so it's not
an error, but it might be a bug, so you get a warning.

View File

@ -0,0 +1,252 @@
# Lint levels
In `rustc`, lints are divided into four *levels*:
1. allow
2. warn
3. deny
4. forbid
Each lint has a default level (explained in the lint listing later in this
chapter), and the compiler has a default warning level. First, let's explain
what these levels mean, and then we'll talk about configuration.
## allow
These lints exist, but by default, do nothing. For example, consider this
source:
```rust
pub fn foo() {}
```
Compiling this file produces no warnings:
```bash
$ rustc lib.rs --crate-type=lib
$
```
But this code violates the `missing_docs` lint.
These lints exist mostly to be manually turned on via configuration, as we'll
talk about later in this section.
## warn
The 'warn' lint level will produce a warning if you violate the lint. For example,
this code runs afoul of the `unused_variable` lint:
```rust
pub fn foo() {
let x = 5;
}
```
This will produce this warning:
```console
$ rustc lib.rs --crate-type=lib
warning: unused variable: `x`
--> lib.rs:2:9
|
2 | let x = 5;
| ^
|
= note: #[warn(unused_variables)] on by default
= note: to avoid this warning, consider using `_x` instead
```
## deny
A 'deny' lint produces an error if you violate it. For example, this code
runs into the `exceeding_bitshifts` lint.
```rust,ignore
fn main() {
100u8 << 10;
}
```
```bash
> rustc main.rs
error: bitshift exceeds the type's number of bits
--> main.rs:2:13
|
2 | 100u8 << 10;
| ^^^^^^^^^^^
|
= note: #[deny(exceeding_bitshifts)] on by default
```
What's the difference between an error from a lint and a regular old error?
Lints are configurable via levels, so in a similar way to 'allow' lints,
warnings that are 'deny' by default let you allow them. Similarly, you may
wish to set up a lint that is `warn` by default to produce an error instead.
This lint level gives you that.
## forbid
'forbid' is a special lint level that's stronger than 'deny'. It's the same
as 'deny' in that a lint at this level will produce an error, but unlike the
'deny' level, the 'forbid' level can not be overridden to be anything lower
than an error.
## Configuring warning levels
Remember our `missing_docs` example from the 'allow' lint level?
```bash
$ cat lib.rs
pub fn foo() {}
$ rustc lib.rs --crate-type=lib
$
```
We can configure this lint to operate at a higher level, both with
compiler flags, as well as with an attribute in the source code.
You can also "cap" lints so that the compiler can choose to ignore
certain lint levels. We'll talk about that last.
### Via compiler flag
The `-A`, `-W`, `-D`, and `-F` flags let you turn one or more lints
into allowed, warning, deny, or forbid levels, like this:
```bash
$ rustc lib.rs --crate-type=lib -W missing-docs
warning: missing documentation for crate
--> lib.rs:1:1
|
1 | pub fn foo() {}
| ^^^^^^^^^^^^
|
= note: requested on the command line with `-W missing-docs`
warning: missing documentation for a function
--> lib.rs:1:1
|
1 | pub fn foo() {}
| ^^^^^^^^^^^^
> rustc lib.rs --crate-type=lib -D missing-docs
error: missing documentation for crate
--> lib.rs:1:1
|
1 | pub fn foo() {}
| ^^^^^^^^^^^^
|
= note: requested on the command line with `-D missing-docs`
error: missing documentation for a function
--> lib.rs:1:1
|
1 | pub fn foo() {}
| ^^^^^^^^^^^^
error: aborting due to 2 previous errors
```
You can also pass each flag more than once for changing multiple lints:
```bash
rustc lib.rs --crate-type=lib -D missing-docs -D unused-variables
```
And of course, you can mix these four flags together:
```bash
rustc lib.rs --crate-type=lib -D missing-docs -A unused-variables
```
### Via an attribute
You can also modify the lint level with a crate-wide attribute:
```bash
> cat lib.rs
#![warn(missing_docs)]
pub fn foo() {}
$ rustc lib.rs --crate-type=lib
warning: missing documentation for crate
--> lib.rs:1:1
|
1 | / #![warn(missing_docs)]
2 | |
3 | | pub fn foo() {}
| |_______________^
|
note: lint level defined here
--> lib.rs:1:9
|
1 | #![warn(missing_docs)]
| ^^^^^^^^^^^^
warning: missing documentation for a function
--> lib.rs:3:1
|
3 | pub fn foo() {}
| ^^^^^^^^^^^^
```
All four, `warn`, `allow`, `deny`, and `forbid` all work this way.
You can also pass in multiple lints per attribute:
```rust
#![warn(missing_docs, unused_variables)]
pub fn foo() {}
```
And use multiple attributes together:
```rust
#![warn(missing_docs)]
#![deny(unused_variables)]
pub fn foo() {}
```
### Capping lints
`rustc` supports a flag, `--cap-lints LEVEL` that sets the "lint cap level."
This is the maximum level for all lints. So for example, if we take our
code sample from the "deny" lint level above:
```rust,ignore
fn main() {
100u8 << 10;
}
```
And we compile it, capping lints to warn:
```bash
$ rustc lib.rs --cap-lints warn
warning: bitshift exceeds the type's number of bits
--> lib.rs:2:5
|
2 | 100u8 << 10;
| ^^^^^^^^^^^
|
= note: #[warn(exceeding_bitshifts)] on by default
warning: this expression will panic at run-time
--> lib.rs:2:5
|
2 | 100u8 << 10;
| ^^^^^^^^^^^ attempt to shift left with overflow
```
It now only warns, rather than errors. We can go further and allow all lints:
```bash
$ rustc lib.rs --cap-lints allow
$
```
This feature is used heavily by Cargo; it will pass `--cap-lints allow` when
compiling your dependencies, so that if they have any warnings, they do not
pollute the output of your build.

View File

@ -0,0 +1,453 @@
# Allowed-by-default lints
These lints are all set to the 'allow' level by default. As such, they won't show up
unless you set them to a higher lint level with a flag or attribute.
## anonymous-parameters
This lint detects anonymous parameters. Some example code that triggers this lint:
```rust
trait Foo {
fn foo(usize);
}
```
When set to 'deny', this will produce:
```text
error: use of deprecated anonymous parameter
--> src/lib.rs:5:11
|
5 | fn foo(usize);
| ^
|
= warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
= note: for more information, see issue #41686 <https://github.com/rust-lang/rust/issues/41686>
```
This syntax is mostly a historical accident, and can be worked around quite
easily:
```rust
trait Foo {
fn foo(_: usize);
}
```
## bare-trait-object
This lint suggests using `dyn Trait` for trait objects. Some example code
that triggers this lint:
```rust
#![feature(dyn_trait)]
trait Trait { }
fn takes_trait_object(_: Box<Trait>) {
}
```
When set to 'deny', this will produce:
```text
error: trait objects without an explicit `dyn` are deprecated
--> src/lib.rs:7:30
|
7 | fn takes_trait_object(_: Box<Trait>) {
| ^^^^^ help: use `dyn`: `dyn Trait`
|
```
To fix it, do as the help message suggests:
```rust
#![feature(dyn_trait)]
#![deny(bare_trait_object)]
trait Trait { }
fn takes_trait_object(_: Box<dyn Trait>) {
}
```
## box-pointers
This lints use of the Box type. Some example code that triggers this lint:
```rust
struct Foo {
x: Box<isize>,
}
```
When set to 'deny', this will produce:
```text
error: type uses owned (Box type) pointers: std::boxed::Box<isize>
--> src/lib.rs:6:5
|
6 | x: Box<isize> //~ ERROR type uses owned
| ^^^^^^^^^^^^^
|
```
This lint is mostly historical, and not particularly useful. `Box<T>` used to
be built into the language, and the only way to do heap allocation. Today's
Rust can call into other allocators, etc.
## elided-lifetime-in-path
This lint detects the use of hidden lifetime parameters. Some example code
that triggers this lint:
```rust
struct Foo<'a> {
x: &'a u32
}
fn foo(x: &Foo) {
}
```
When set to 'deny', this will produce:
```text
error: hidden lifetime parameters are deprecated, try `Foo<'_>`
--> src/lib.rs:5:12
|
5 | fn foo(x: &Foo) {
| ^^^
|
```
Lifetime elision elides this lifetime, but that is being deprecated.
## missing-copy-implementations
This lint detects potentially-forgotten implementations of `Copy`. Some
example code that triggers this lint:
```rust
pub struct Foo {
pub field: i32
}
```
When set to 'deny', this will produce:
```text
error: type could implement `Copy`; consider adding `impl Copy`
--> src/main.rs:3:1
|
3 | / pub struct Foo { //~ ERROR type could implement `Copy`; consider adding `impl Copy`
4 | | pub field: i32
5 | | }
| |_^
|
```
You can fix the lint by deriving `Copy`.
This lint is set to 'allow' because this code isn't bad; it's common to write
newtypes like this specifically so that a `Copy` type is no longer `Copy`.
## missing-debug-implementations
This lint detects missing implementations of `fmt::Debug`. Some example code
that triggers this lint:
```rust
pub struct Foo;
```
When set to 'deny', this will produce:
```text
error: type does not implement `fmt::Debug`; consider adding #[derive(Debug)] or a manual implementation
--> src/main.rs:3:1
|
3 | pub struct Foo;
| ^^^^^^^^^^^^^^^
|
```
You can fix the lint by deriving `Debug`.
## missing-docs
This lint detects missing documentation for public items. Some example code
that triggers this lint:
```rust
pub fn foo() {}
```
When set to 'deny', this will produce:
```text
error: missing documentation for crate
--> src/main.rs:1:1
|
1 | / #![deny(missing_docs)]
2 | |
3 | | pub fn foo() {}
4 | |
5 | | fn main() {}
| |____________^
|
error: missing documentation for a function
--> src/main.rs:3:1
|
3 | pub fn foo() {}
| ^^^^^^^^^^^^
```
To fix the lint, add documentation to all items.
## single-use-lifetime
This lint detects lifetimes that are only used once. Some example code that
triggers this lint:
```rust
struct Foo<'x> {
x: &'x u32
}
```
When set to 'deny', this will produce:
```text
error: lifetime name `'x` only used once
--> src/main.rs:3:12
|
3 | struct Foo<'x> {
| ^^
|
```
## trivial-casts
This lint detects trivial casts which could be removed. Some example code
that triggers this lint:
```rust
let x: &u32 = &42;
let _ = x as *const u32;
```
When set to 'deny', this will produce:
```text
error: trivial cast: `&u32` as `*const u32`. Cast can be replaced by coercion, this might require type ascription or a temporary variable
--> src/main.rs:5:13
|
5 | let _ = x as *const u32;
| ^^^^^^^^^^^^^^^
|
note: lint level defined here
--> src/main.rs:1:9
|
1 | #![deny(trivial_casts)]
| ^^^^^^^^^^^^^
```
## trivial-numeric-casts
This lint detects trivial casts of numeric types which could be removed. Some
example code that triggers this lint:
```rust
let x = 42i32 as i32;
```
When set to 'deny', this will produce:
```text
error: trivial numeric cast: `i32` as `i32`. Cast can be replaced by coercion, this might require type ascription or a temporary variable
--> src/main.rs:4:13
|
4 | let x = 42i32 as i32;
| ^^^^^^^^^^^^
|
```
## unreachable-pub
This lint triggers for `pub` items not reachable from the crate root. Some
example code that triggers this lint:
```rust
mod foo {
pub mod bar {
}
}
```
When set to 'deny', this will produce:
```text
error: unreachable `pub` item
--> src/main.rs:4:5
|
4 | pub mod bar {
| ---^^^^^^^^
| |
| help: consider restricting its visibility: `pub(crate)`
|
```
## unsafe-code
This lint catches usage of `unsafe` code. Some example code that triggers this lint:
```rust
fn main() {
unsafe {
}
}
```
When set to 'deny', this will produce:
```text
error: usage of an `unsafe` block
--> src/main.rs:4:5
|
4 | / unsafe {
5 | |
6 | | }
| |_____^
|
```
## unstable-features
This lint is deprecated and no longer used.
## unused-extern-crates
This lint guards against `extern crate` items that are never used. Some
example code that triggers this lint:
```rust,ignore
extern crate semver;
```
When set to 'deny', this will produce:
```text
error: unused extern crate
--> src/main.rs:3:1
|
3 | extern crate semver;
| ^^^^^^^^^^^^^^^^^^^^
|
```
## unused-import-braces
This lint catches unnecessary braces around an imported item. Some example
code that triggers this lint:
```rust
use test::{A};
pub mod test {
pub struct A;
}
# fn main() {}
```
When set to 'deny', this will produce:
```text
error: braces around A is unnecessary
--> src/main.rs:3:1
|
3 | use test::{A};
| ^^^^^^^^^^^^^^
|
```
To fix it, `use test::A;`
## unused-qualifications
This lint detects unnecessarily qualified names. Some example code that triggers this lint:
```rust
mod foo {
pub fn bar() {}
}
fn main() {
use foo::bar;
foo::bar();
}
```
When set to 'deny', this will produce:
```text
error: unnecessary qualification
--> src/main.rs:9:5
|
9 | foo::bar();
| ^^^^^^^^
|
```
You can call `bar()` directly, without the `foo::`.
## unused-results
This lint checks for the unused result of an expression in a statement. Some
example code that triggers this lint:
```rust,no_run
fn foo<T>() -> T { panic!() }
fn main() {
foo::<usize>();
}
```
When set to 'deny', this will produce:
```text
error: unused result
--> src/main.rs:6:5
|
6 | foo::<usize>();
| ^^^^^^^^^^^^^^^
|
```
## variant-size-differences
This lint detects enums with widely varying variant sizes. Some example code that triggers this lint:
```rust
enum En {
V0(u8),
VBig([u8; 1024]),
}
```
When set to 'deny', this will produce:
```text
error: enum variant is more than three times larger (1024 bytes) than the next largest
--> src/main.rs:5:5
|
5 | VBig([u8; 1024]), //~ ERROR variant is more than three times larger
| ^^^^^^^^^^^^^^^^
|
```

View File

@ -0,0 +1,241 @@
# Deny-by-default lints
These lints are all set to the 'deny' level by default.
## exceeding-bitshifts
This lint detects that a shift exceeds the type's number of bits. Some
example code that triggers this lint:
```rust,ignore
1_i32 << 32;
```
This will produce:
```text
error: bitshift exceeds the type's number of bits
--> src/main.rs:2:5
|
2 | 1_i32 << 32;
| ^^^^^^^^^^^
|
```
## invalid-type-param-default
This lint detects type parameter default erroneously allowed in invalid location. Some
example code that triggers this lint:
```rust,ignore
fn foo<T=i32>(t: T) {}
```
This will produce:
```text
error: defaults for type parameters are only allowed in `struct`, `enum`, `type`, or `trait` definitions.
--> src/main.rs:4:8
|
4 | fn foo<T=i32>(t: T) {}
| ^
|
= note: #[deny(invalid_type_param_default)] on by default
= warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
= note: for more information, see issue #36887 <https://github.com/rust-lang/rust/issues/36887>
```
## legacy-constructor-visibility
[RFC 1506](https://github.com/rust-lang/rfcs/blob/master/text/1506-adt-kinds.md) modified some
visibility rules, and changed the visibility of struct constructors. Some
example code that triggers this lint:
```rust,ignore
mod m {
pub struct S(u8);
fn f() {
// this is trying to use S from the 'use' line, but becuase the `u8` is
// not pub, it is private
::S;
}
}
use m::S;
```
This will produce:
```text
error: private struct constructors are not usable through re-exports in outer modules
--> src/main.rs:5:9
|
5 | ::S;
| ^^^
|
= note: #[deny(legacy_constructor_visibility)] on by default
= warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
= note: for more information, see issue #39207 <https://github.com/rust-lang/rust/issues/39207>
```
## legacy-directory-ownership
The legacy_directory_ownership warning is issued when
* There is a non-inline module with a #[path] attribute (e.g. #[path = "foo.rs"] mod bar;),
* The module's file ("foo.rs" in the above example) is not named "mod.rs", and
* The module's file contains a non-inline child module without a #[path] attribute.
The warning can be fixed by renaming the parent module to "mod.rs" and moving
it into its own directory if appropriate.
## legacy-imports
This lint detects names that resolve to ambiguous glob imports. Some example
code that triggers this lint:
```rust,ignore
pub struct Foo;
mod bar {
struct Foo;
mod baz {
use *;
use bar::*;
fn f(_: Foo) {}
}
}
```
This will produce:
```text
error: `Foo` is ambiguous
--> src/main.rs:9:17
|
7 | use *;
| - `Foo` could refer to the name imported here
8 | use bar::*;
| ------ `Foo` could also refer to the name imported here
9 | fn f(_: Foo) {}
| ^^^
|
= note: #[deny(legacy_imports)] on by default
= warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
= note: for more information, see issue #38260 <https://github.com/rust-lang/rust/issues/38260>
```
## missing-fragment-specifier
The missing_fragment_specifier warning is issued when an unused pattern in a
`macro_rules!` macro definition has a meta-variable (e.g. `$e`) that is not
followed by a fragment specifier (e.g. `:expr`).
This warning can always be fixed by removing the unused pattern in the
`macro_rules!` macro definition.
## mutable-transmutes
This lint catches transmuting from `&T` to `&mut T` becuase it is undefined
behavior. Some example code that triggers this lint:
```rust,ignore
unsafe {
let y = std::mem::transmute::<&i32, &mut i32>(&5);
}
```
This will produce:
```text
error: mutating transmuted &mut T from &T may cause undefined behavior, consider instead using an UnsafeCell
--> src/main.rs:3:17
|
3 | let y = std::mem::transmute::<&i32, &mut i32>(&5);
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
```
## no-mangle-const-items
This lint detects any `const` items with the `#[no_mangle]` attribute.
Constants do not have their symbols exported, and therefore, this probably
means you meant to use a `static`, not a `const`. Some example code that
triggers this lint:
```rust,ignore
#[no_mangle]
const FOO: i32 = 5;
```
This will produce:
```text
error: const items should never be #[no_mangle]
--> src/main.rs:3:1
|
3 | const FOO: i32 = 5;
| -----^^^^^^^^^^^^^^
| |
| help: try a static value: `pub static`
|
```
## parenthesized-params-in-types-and-modules
This lint detects incorrect parentheses. Some example code that triggers this
lint:
```rust,ignore
let x = 5 as usize();
```
This will produce:
```text
error: parenthesized parameters may only be used with a trait
--> src/main.rs:2:21
|
2 | let x = 5 as usize();
| ^^
|
= note: #[deny(parenthesized_params_in_types_and_modules)] on by default
= warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
= note: for more information, see issue #42238 <https://github.com/rust-lang/rust/issues/42238>
```
To fix it, remove the `()`s.
## pub-use-of-private-extern-crate
This lint detects a specific situation of re-exporting a private `extern crate`;
## safe-extern-statics
In older versions of Rust, there was a soundness issue where `extern static`s were allowed
to be accessed in safe code. This lint now catches and denies this kind of code.
## unknown-crate-types
This lint detects an unknown crate type found in a `#[crate_type]` directive. Some
example code that triggers this lint:
```rust,ignore
#![crate_type="lol"]
```
This will produce:
```text
error: invalid `crate_type` value
--> src/lib.rs:1:1
|
1 | #![crate_type="lol"]
| ^^^^^^^^^^^^^^^^^^^^
|
```

View File

@ -0,0 +1,5 @@
# Lint listing
This section lists out all of the lints, grouped by their default lint levels.
You can also see this list by running `rustc -W help`.

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,10 @@
# Built-in Targets
`rustc` ships with the ability to compile to many targets automatically, we
call these "built-in" targets, and they generally correspond to targets that
the team is supporting directly.
To see the list of built-in targets, you can run `rustc --print target-list`,
or look at [the API
docs](https://doc.rust-lang.org/nightly/nightly-rustc/rustc_back/target/#modules).
Each module there defines a builder for a particular target.

View File

@ -0,0 +1,17 @@
# Custom Targets
If you'd like to build for a target that is not yet supported by `rustc`, you can use a
"custom target specification" to define a target. These target specification files
are JSON. To see the JSON for the host target, you can run:
```bash
$ rustc +nightly -Z unstable-options --print target-spec-json
```
To see it for a different target, add the `--target` flag:
```bash
$ rustc +nightly -Z unstable-options --target=wasm32-unknown-unknown --print target-spec-json
```
To use a custom target, see [`xargo`](https://github.com/japaric/xargo).

View File

@ -0,0 +1,13 @@
# Targets
`rustc` is a cross-compiler by default. This means that you can use any compiler to build for any
architecture. The list of *targets* are the possible architectures that you can build for.
To see all the options that you can set with a target, see the docs
[here](https://doc.rust-lang.org/nightly/nightly-rustc/rustc_back/target/struct.Target.html).
To compile to a particular target, use the `--target` flag:
```bash
$ rustc src/main.rs --target=wasm32-unknown-unknown
```

View File

@ -0,0 +1,68 @@
# What is rustc?
Welcome to "The rustc book"! `rustc` is the compiler for the Rust programming
language, provided by the project itself. Compilers take your source code and
produce binary code, either as a library or executable.
Most Rust programmers don't invoke `rustc` directly, but instead do it through
[Cargo](../cargo/index.html). It's all in service of `rustc` though! If you
want to see how Cargo calls `rustc`, you can
```bash
$ cargo build --verbose
```
And it will print out each `rustc` invocation. This book can help you
understand what each of these options does. Additionally, while most
Rustaceans use Cargo, not all do: sometimes they integrate `rustc` into other
build systems. This book should provide a guide to all of the options you'd
need to do so.
## Basic usage
Let's say you've got a little hello world program in a file `hello.rs`:
```rust
fn main() {
println!("Hello, world!");
}
```
To turn this source code into an executable, you can use `rustc`:
```bash
$ rustc hello.rs
$ ./hello # on a *NIX
$ .\hello.exe # on Windows
```
Note that we only ever pass `rustc` the *crate root*, not every file we wish
to compile. For example, if we had a `main.rs` that looked like this:
```rust,ignore
mod foo;
fn main() {
foo::hello();
}
```
And a `foo.rs` that had this:
```rust,ignore
fn hello() {
println!("Hello, world!");
}
```
To compile this, we'd run this command:
```bash
$ rustc main.rs
```
No need to tell `rustc` about `foo.rs`; the `mod` statements give it
everything that it needs. This is different than how you would use a C
compiler, where you invoke the compiler on each file, and then link
everything together. In other words, the *crate* is a translation unit, not a
particular module.

View File

@ -1,30 +0,0 @@
# `fn_must_use`
The tracking issue for this feature is [#43302].
[#43302]: https://github.com/rust-lang/rust/issues/43302
------------------------
The `fn_must_use` feature allows functions and methods to be annotated with
`#[must_use]`, indicating that the `unused_must_use` lint should require their
return values to be used (similarly to how types annotated with `must_use`,
most notably `Result`, are linted if not used).
## Examples
```rust
#![feature(fn_must_use)]
#[must_use]
fn double(x: i32) -> i32 {
2 * x
}
fn main() {
double(4); // warning: unused return value of `double` which must be used
let _ = double(4); // (no warning)
}
```

View File

@ -0,0 +1,26 @@
# `tool_attributes`
The tracking issue for this feature is: [#44690]
[#44690]: https://github.com/rust-lang/rust/issues/44690
------------------------
Tool attributes let you use scoped attributes to control the behavior
of certain tools.
Currently tool names which can be appear in scoped attributes are restricted to
`clippy` and `rustfmt`.
## An example
```rust
#![feature(tool_attributes)]
#[rustfmt::skip]
fn foo() { println!("hello, world"); }
fn main() {
foo();
}
```

View File

@ -16,7 +16,7 @@
issue = "32838")]
use core::intrinsics::{min_align_of_val, size_of_val};
use core::ptr::NonNull;
use core::ptr::{NonNull, Unique};
use core::usize;
#[doc(inline)]
@ -152,9 +152,17 @@ unsafe fn exchange_malloc(size: usize, align: usize) -> *mut u8 {
}
}
#[cfg_attr(not(test), lang = "box_free")]
#[cfg(stage0)]
#[lang = "box_free"]
#[inline]
pub(crate) unsafe fn box_free<T: ?Sized>(ptr: *mut T) {
unsafe fn old_box_free<T: ?Sized>(ptr: *mut T) {
box_free(Unique::new_unchecked(ptr))
}
#[cfg_attr(not(any(test, stage0)), lang = "box_free")]
#[inline]
pub(crate) unsafe fn box_free<T: ?Sized>(ptr: Unique<T>) {
let ptr = ptr.as_ptr();
let size = size_of_val(&*ptr);
let align = min_align_of_val(&*ptr);
// We do not allocate for Box<T> when T is ZST, so deallocation is also not necessary.

View File

@ -566,7 +566,8 @@ impl<T: ?Sized> Arc<T> {
fn from_box(v: Box<T>) -> Arc<T> {
unsafe {
let bptr = Box::into_raw(v);
let box_unique = Box::into_unique(v);
let bptr = box_unique.as_ptr();
let value_size = size_of_val(&*bptr);
let ptr = Self::allocate_for_ptr(bptr);
@ -578,7 +579,7 @@ impl<T: ?Sized> Arc<T> {
value_size);
// Free the allocation without dropping its contents
box_free(bptr);
box_free(box_unique);
Arc { ptr: NonNull::new_unchecked(ptr), phantom: PhantomData }
}

View File

@ -184,6 +184,7 @@ impl<T: ?Sized> Box<T> {
#[unstable(feature = "ptr_internals", issue = "0", reason = "use into_raw_non_null instead")]
#[inline]
#[doc(hidden)]
pub fn into_unique(b: Box<T>) -> Unique<T> {
let unique = b.0;
mem::forget(b);

View File

@ -96,7 +96,7 @@
#![feature(dropck_eyepatch)]
#![feature(exact_size_is_empty)]
#![feature(fmt_internals)]
#![feature(fn_must_use)]
#![cfg_attr(stage0, feature(fn_must_use))]
#![feature(from_ref)]
#![feature(fundamental)]
#![feature(lang_items)]
@ -122,8 +122,9 @@
#![feature(on_unimplemented)]
#![feature(exact_chunks)]
#![feature(pointer_methods)]
#![feature(inclusive_range_fields)]
#![feature(inclusive_range_methods)]
#![cfg_attr(stage0, feature(generic_param_attrs))]
#![feature(rustc_const_unstable)]
#![cfg_attr(not(test), feature(fn_traits, i128))]
#![cfg_attr(test, feature(test))]

View File

@ -56,14 +56,16 @@ pub struct RawVec<T, A: Alloc = Global> {
impl<T, A: Alloc> RawVec<T, A> {
/// Like `new` but parameterized over the choice of allocator for
/// the returned RawVec.
pub fn new_in(a: A) -> Self {
pub const fn new_in(a: A) -> Self {
// !0 is usize::MAX. This branch should be stripped at compile time.
let cap = if mem::size_of::<T>() == 0 { !0 } else { 0 };
// FIXME(mark-i-m): use this line when `if`s are allowed in `const`
//let cap = if mem::size_of::<T>() == 0 { !0 } else { 0 };
// Unique::empty() doubles as "unallocated" and "zero-sized allocation"
RawVec {
ptr: Unique::empty(),
cap,
// FIXME(mark-i-m): use `cap` when ifs are allowed in const
cap: [0, !0][(mem::size_of::<T>() == 0) as usize],
a,
}
}
@ -120,7 +122,7 @@ impl<T> RawVec<T, Global> {
/// RawVec with capacity 0. If T has 0 size, then it makes a
/// RawVec with capacity `usize::MAX`. Useful for implementing
/// delayed allocation.
pub fn new() -> Self {
pub const fn new() -> Self {
Self::new_in(Global)
}

View File

@ -681,7 +681,8 @@ impl<T: ?Sized> Rc<T> {
fn from_box(v: Box<T>) -> Rc<T> {
unsafe {
let bptr = Box::into_raw(v);
let box_unique = Box::into_unique(v);
let bptr = box_unique.as_ptr();
let value_size = size_of_val(&*bptr);
let ptr = Self::allocate_for_ptr(bptr);
@ -693,7 +694,7 @@ impl<T: ?Sized> Rc<T> {
value_size);
// Free the allocation without dropping its contents
box_free(bptr);
box_free(box_unique);
Rc { ptr: NonNull::new_unchecked(ptr), phantom: PhantomData }
}

View File

@ -25,7 +25,7 @@
#![feature(try_reserve)]
#![feature(unboxed_closures)]
#![feature(exact_chunks)]
#![feature(inclusive_range_fields)]
#![feature(inclusive_range_methods)]
extern crate alloc_system;
extern crate core;

View File

@ -1282,6 +1282,7 @@ fn test_box_slice_clone() {
}
#[test]
#[allow(unused_must_use)] // here, we care about the side effects of `.clone()`
#[cfg_attr(target_os = "emscripten", ignore)]
fn test_box_slice_clone_panics() {
use std::sync::Arc;

View File

@ -322,7 +322,8 @@ impl<T> Vec<T> {
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn new() -> Vec<T> {
#[rustc_const_unstable(feature = "const_vec_new")]
pub const fn new() -> Vec<T> {
Vec {
buf: RawVec::new(),
len: 0,

View File

@ -106,6 +106,8 @@ use self::Ordering::*;
/// ```
#[lang = "eq"]
#[stable(feature = "rust1", since = "1.0.0")]
#[doc(alias = "==")]
#[doc(alias = "!=")]
#[rustc_on_unimplemented = "can't compare `{Self}` with `{Rhs}`"]
pub trait PartialEq<Rhs: ?Sized = Self> {
/// This method tests for `self` and `other` values to be equal, and is used
@ -160,6 +162,8 @@ pub trait PartialEq<Rhs: ?Sized = Self> {
/// }
/// impl Eq for Book {}
/// ```
#[doc(alias = "==")]
#[doc(alias = "!=")]
#[stable(feature = "rust1", since = "1.0.0")]
pub trait Eq: PartialEq<Self> {
// this method is used solely by #[deriving] to assert
@ -428,6 +432,10 @@ impl<T: Ord> Ord for Reverse<T> {
/// }
/// ```
#[lang = "ord"]
#[doc(alias = "<")]
#[doc(alias = ">")]
#[doc(alias = "<=")]
#[doc(alias = ">=")]
#[stable(feature = "rust1", since = "1.0.0")]
pub trait Ord: Eq + PartialOrd<Self> {
/// This method returns an `Ordering` between `self` and `other`.
@ -599,6 +607,10 @@ impl PartialOrd for Ordering {
/// ```
#[lang = "partial_ord"]
#[stable(feature = "rust1", since = "1.0.0")]
#[doc(alias = ">")]
#[doc(alias = "<")]
#[doc(alias = "<=")]
#[doc(alias = ">=")]
#[rustc_on_unimplemented = "can't compare `{Self}` with `{Rhs}`"]
pub trait PartialOrd<Rhs: ?Sized = Self>: PartialEq<Rhs> {
/// This method returns an ordering between `self` and `other` values if one exists.

View File

@ -542,11 +542,12 @@ impl<'a> Display for Arguments<'a> {
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_on_unimplemented(
on(crate_local, label="`{Self}` cannot be formatted using `:?`; \
add `#[derive(Debug)]` or manually implement `{Debug}`"),
on(crate_local, label="`{Self}` cannot be formatted using `{{:?}}`",
note="add `#[derive(Debug)]` or manually implement `{Debug}`"),
message="`{Self}` doesn't implement `{Debug}`",
label="`{Self}` cannot be formatted using `:?` because it doesn't implement `{Debug}`",
label="`{Self}` cannot be formatted using `{{:?}}` because it doesn't implement `{Debug}`",
)]
#[doc(alias = "{:?}")]
#[lang = "debug_trait"]
pub trait Debug {
/// Formats the value using the given formatter.
@ -609,9 +610,11 @@ pub trait Debug {
/// ```
#[rustc_on_unimplemented(
message="`{Self}` doesn't implement `{Display}`",
label="`{Self}` cannot be formatted with the default formatter; \
try using `:?` instead if you are using a format string",
label="`{Self}` cannot be formatted with the default formatter",
note="in format strings you may be able to use `{{:?}}` \
(or {{:#?}} for pretty-print) instead",
)]
#[doc(alias = "{}")]
#[stable(feature = "rust1", since = "1.0.0")]
pub trait Display {
/// Formats the value using the given formatter.

View File

@ -1094,6 +1094,8 @@ pub trait Iterator {
/// `flatten()` a three-dimensional array the result will be
/// two-dimensional and not one-dimensional. To get a one-dimensional
/// structure, you have to `flatten()` again.
///
/// [`flat_map()`]: #method.flat_map
#[inline]
#[unstable(feature = "iterator_flatten", issue = "48213")]
fn flatten(self) -> Flatten<Self>

View File

@ -50,6 +50,15 @@
// Since libcore defines many fundamental lang items, all tests live in a
// separate crate, libcoretest, to avoid bizarre issues.
//
// Here we explicitly #[cfg]-out this whole crate when testing. If we don't do
// this, both the generated test artifact and the linked libtest (which
// transitively includes libcore) will both define the same set of lang items,
// and this will cause the E0152 "duplicate lang item found" error. See
// discussion in #50466 for details.
//
// This cfg won't affect doc tests.
#![cfg(not(test))]
#![stable(feature = "core", since = "1.6.0")]
#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
@ -76,7 +85,6 @@
#![feature(doc_cfg)]
#![feature(doc_spotlight)]
#![feature(extern_types)]
#![feature(fn_must_use)]
#![feature(fundamental)]
#![feature(intrinsics)]
#![feature(iterator_flatten)]
@ -103,6 +111,7 @@
#![feature(untagged_unions)]
#![feature(unwind_attributes)]
#![feature(doc_alias)]
#![feature(inclusive_range_methods)]
#![cfg_attr(not(stage0), feature(mmx_target_feature))]
#![cfg_attr(not(stage0), feature(tbm_target_feature))]
@ -114,6 +123,7 @@
#![cfg_attr(stage0, feature(target_feature))]
#![cfg_attr(stage0, feature(cfg_target_feature))]
#![cfg_attr(stage0, feature(fn_must_use))]
#[prelude_import]
#[allow(unused)]

View File

@ -606,8 +606,8 @@ mod builtin {
#[macro_export]
#[cfg(dox)]
macro_rules! concat_idents {
($($e:ident),*) => ({ /* compiler built-in */ });
($($e:ident,)*) => ({ /* compiler built-in */ });
($($e:ident),+) => ({ /* compiler built-in */ });
($($e:ident,)+) => ({ /* compiler built-in */ });
}
/// Concatenates literals into a static string slice.

View File

@ -602,6 +602,8 @@ unsafe impl<'a, T: ?Sized> Freeze for &'a mut T {}
/// `Pin` pointer.
///
/// This trait is automatically implemented for almost every type.
///
/// [`Pin`]: ../mem/struct.Pin.html
#[unstable(feature = "pin", issue = "49150")]
pub unsafe auto trait Unpin {}

View File

@ -94,6 +94,7 @@ pub trait Add<RHS=Self> {
type Output;
/// Performs the `+` operation.
#[must_use]
#[stable(feature = "rust1", since = "1.0.0")]
fn add(self, rhs: RHS) -> Self::Output;
}
@ -191,6 +192,7 @@ pub trait Sub<RHS=Self> {
type Output;
/// Performs the `-` operation.
#[must_use]
#[stable(feature = "rust1", since = "1.0.0")]
fn sub(self, rhs: RHS) -> Self::Output;
}
@ -310,6 +312,7 @@ pub trait Mul<RHS=Self> {
type Output;
/// Performs the `*` operation.
#[must_use]
#[stable(feature = "rust1", since = "1.0.0")]
fn mul(self, rhs: RHS) -> Self::Output;
}
@ -433,6 +436,7 @@ pub trait Div<RHS=Self> {
type Output;
/// Performs the `/` operation.
#[must_use]
#[stable(feature = "rust1", since = "1.0.0")]
fn div(self, rhs: RHS) -> Self::Output;
}
@ -517,6 +521,7 @@ pub trait Rem<RHS=Self> {
type Output = Self;
/// Performs the `%` operation.
#[must_use]
#[stable(feature = "rust1", since = "1.0.0")]
fn rem(self, rhs: RHS) -> Self::Output;
}
@ -601,6 +606,7 @@ pub trait Neg {
type Output;
/// Performs the unary `-` operation.
#[must_use]
#[stable(feature = "rust1", since = "1.0.0")]
fn neg(self) -> Self::Output;
}

View File

@ -46,6 +46,7 @@ pub trait Not {
type Output;
/// Performs the unary `!` operation.
#[must_use]
#[stable(feature = "rust1", since = "1.0.0")]
fn not(self) -> Self::Output;
}
@ -119,6 +120,7 @@ not_impl! { bool usize u8 u16 u32 u64 u128 isize i8 i16 i32 i64 i128 }
/// assert_eq!(bv1 & bv2, expected);
/// ```
#[lang = "bitand"]
#[doc(alias = "&")]
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_on_unimplemented(message="no implementation for `{Self} & {RHS}`",
label="no implementation for `{Self} & {RHS}`")]
@ -128,6 +130,7 @@ pub trait BitAnd<RHS=Self> {
type Output;
/// Performs the `&` operation.
#[must_use]
#[stable(feature = "rust1", since = "1.0.0")]
fn bitand(self, rhs: RHS) -> Self::Output;
}
@ -201,6 +204,7 @@ bitand_impl! { bool usize u8 u16 u32 u64 u128 isize i8 i16 i32 i64 i128 }
/// assert_eq!(bv1 | bv2, expected);
/// ```
#[lang = "bitor"]
#[doc(alias = "|")]
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_on_unimplemented(message="no implementation for `{Self} | {RHS}`",
label="no implementation for `{Self} | {RHS}`")]
@ -210,6 +214,7 @@ pub trait BitOr<RHS=Self> {
type Output;
/// Performs the `|` operation.
#[must_use]
#[stable(feature = "rust1", since = "1.0.0")]
fn bitor(self, rhs: RHS) -> Self::Output;
}
@ -286,6 +291,7 @@ bitor_impl! { bool usize u8 u16 u32 u64 u128 isize i8 i16 i32 i64 i128 }
/// assert_eq!(bv1 ^ bv2, expected);
/// ```
#[lang = "bitxor"]
#[doc(alias = "^")]
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_on_unimplemented(message="no implementation for `{Self} ^ {RHS}`",
label="no implementation for `{Self} ^ {RHS}`")]
@ -295,6 +301,7 @@ pub trait BitXor<RHS=Self> {
type Output;
/// Performs the `^` operation.
#[must_use]
#[stable(feature = "rust1", since = "1.0.0")]
fn bitxor(self, rhs: RHS) -> Self::Output;
}
@ -372,6 +379,7 @@ bitxor_impl! { bool usize u8 u16 u32 u64 u128 isize i8 i16 i32 i64 i128 }
/// SpinVector { vec: vec![2, 3, 4, 0, 1] });
/// ```
#[lang = "shl"]
#[doc(alias = "<<")]
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_on_unimplemented(message="no implementation for `{Self} << {RHS}`",
label="no implementation for `{Self} << {RHS}`")]
@ -381,6 +389,7 @@ pub trait Shl<RHS=Self> {
type Output;
/// Performs the `<<` operation.
#[must_use]
#[stable(feature = "rust1", since = "1.0.0")]
fn shl(self, rhs: RHS) -> Self::Output;
}
@ -479,6 +488,7 @@ shl_impl_all! { u8 u16 u32 u64 u128 usize i8 i16 i32 i64 isize i128 }
/// SpinVector { vec: vec![3, 4, 0, 1, 2] });
/// ```
#[lang = "shr"]
#[doc(alias = ">>")]
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_on_unimplemented(message="no implementation for `{Self} >> {RHS}`",
label="no implementation for `{Self} >> {RHS}`")]
@ -488,6 +498,7 @@ pub trait Shr<RHS=Self> {
type Output;
/// Performs the `>>` operation.
#[must_use]
#[stable(feature = "rust1", since = "1.0.0")]
fn shr(self, rhs: RHS) -> Self::Output;
}
@ -593,6 +604,7 @@ shr_impl_all! { u8 u16 u32 u64 u128 usize i8 i16 i32 i64 i128 isize }
/// assert_eq!(bv, expected);
/// ```
#[lang = "bitand_assign"]
#[doc(alias = "&=")]
#[stable(feature = "op_assign_traits", since = "1.8.0")]
#[rustc_on_unimplemented(message="no implementation for `{Self} &= {Rhs}`",
label="no implementation for `{Self} &= {Rhs}`")]
@ -641,6 +653,7 @@ bitand_assign_impl! { bool usize u8 u16 u32 u64 u128 isize i8 i16 i32 i64 i128 }
/// assert_eq!(prefs, PersonalPreferences { likes_cats: true, likes_dogs: true });
/// ```
#[lang = "bitor_assign"]
#[doc(alias = "|=")]
#[stable(feature = "op_assign_traits", since = "1.8.0")]
#[rustc_on_unimplemented(message="no implementation for `{Self} |= {Rhs}`",
label="no implementation for `{Self} |= {Rhs}`")]
@ -689,6 +702,7 @@ bitor_assign_impl! { bool usize u8 u16 u32 u64 u128 isize i8 i16 i32 i64 i128 }
/// assert_eq!(personality, Personality { has_soul: true, likes_knitting: false});
/// ```
#[lang = "bitxor_assign"]
#[doc(alias = "^=")]
#[stable(feature = "op_assign_traits", since = "1.8.0")]
#[rustc_on_unimplemented(message="no implementation for `{Self} ^= {Rhs}`",
label="no implementation for `{Self} ^= {Rhs}`")]
@ -735,6 +749,7 @@ bitxor_assign_impl! { bool usize u8 u16 u32 u64 u128 isize i8 i16 i32 i64 i128 }
/// assert_eq!(scalar, Scalar(16));
/// ```
#[lang = "shl_assign"]
#[doc(alias = "<<=")]
#[stable(feature = "op_assign_traits", since = "1.8.0")]
#[rustc_on_unimplemented(message="no implementation for `{Self} <<= {Rhs}`",
label="no implementation for `{Self} <<= {Rhs}`")]
@ -802,6 +817,7 @@ shl_assign_impl_all! { u8 u16 u32 u64 u128 usize i8 i16 i32 i64 i128 isize }
/// assert_eq!(scalar, Scalar(4));
/// ```
#[lang = "shr_assign"]
#[doc(alias = ">>=")]
#[stable(feature = "op_assign_traits", since = "1.8.0")]
#[rustc_on_unimplemented(message="no implementation for `{Self} >>= {Rhs}`",
label="no implementation for `{Self} >>= {Rhs}`")]

View File

@ -68,6 +68,8 @@
/// assert_eq!('a', *x);
/// ```
#[lang = "deref"]
#[doc(alias = "*")]
#[doc(alias = "&*")]
#[stable(feature = "rust1", since = "1.0.0")]
pub trait Deref {
/// The resulting type after dereferencing.
@ -75,6 +77,7 @@ pub trait Deref {
type Target: ?Sized;
/// Dereferences the value.
#[must_use]
#[stable(feature = "rust1", since = "1.0.0")]
fn deref(&self) -> &Self::Target;
}
@ -162,6 +165,7 @@ impl<'a, T: ?Sized> Deref for &'a mut T {
/// assert_eq!('b', *x);
/// ```
#[lang = "deref_mut"]
#[doc(alias = "*")]
#[stable(feature = "rust1", since = "1.0.0")]
pub trait DerefMut: Deref {
/// Mutably dereferences the value.

View File

@ -45,6 +45,7 @@ use fmt;
/// [`IntoIterator`]: ../iter/trait.Iterator.html
/// [`Iterator`]: ../iter/trait.IntoIterator.html
/// [slicing index]: ../slice/trait.SliceIndex.html
#[doc(alias = "..")]
#[derive(Copy, Clone, PartialEq, Eq, Hash)]
#[stable(feature = "rust1", since = "1.0.0")]
pub struct RangeFull;
@ -74,6 +75,7 @@ impl fmt::Debug for RangeFull {
/// assert_eq!(arr[1.. ], [ 'b', 'c', 'd']);
/// assert_eq!(arr[1..3], [ 'b', 'c' ]); // Range
/// ```
#[doc(alias = "..")]
#[derive(Clone, PartialEq, Eq, Hash)] // not Copy -- see #27186
#[stable(feature = "rust1", since = "1.0.0")]
pub struct Range<Idx> {
@ -175,6 +177,7 @@ impl<Idx: PartialOrd<Idx>> Range<Idx> {
/// ```
///
/// [`Iterator`]: ../iter/trait.IntoIterator.html
#[doc(alias = "..")]
#[derive(Clone, PartialEq, Eq, Hash)] // not Copy -- see #27186
#[stable(feature = "rust1", since = "1.0.0")]
pub struct RangeFrom<Idx> {
@ -256,6 +259,7 @@ impl<Idx: PartialOrd<Idx>> RangeFrom<Idx> {
/// [`IntoIterator`]: ../iter/trait.Iterator.html
/// [`Iterator`]: ../iter/trait.IntoIterator.html
/// [slicing index]: ../slice/trait.SliceIndex.html
#[doc(alias = "..")]
#[derive(Copy, Clone, PartialEq, Eq, Hash)]
#[stable(feature = "rust1", since = "1.0.0")]
pub struct RangeTo<Idx> {
@ -314,26 +318,101 @@ impl<Idx: PartialOrd<Idx>> RangeTo<Idx> {
/// # Examples
///
/// ```
/// #![feature(inclusive_range_fields)]
/// #![feature(inclusive_range_methods)]
///
/// assert_eq!((3..=5), std::ops::RangeInclusive { start: 3, end: 5 });
/// assert_eq!((3..=5), std::ops::RangeInclusive::new(3, 5));
/// assert_eq!(3 + 4 + 5, (3..=5).sum());
///
/// let arr = [0, 1, 2, 3];
/// assert_eq!(arr[ ..=2], [0,1,2 ]);
/// assert_eq!(arr[1..=2], [ 1,2 ]); // RangeInclusive
/// ```
#[doc(alias = "..=")]
#[derive(Clone, PartialEq, Eq, Hash)] // not Copy -- see #27186
#[stable(feature = "inclusive_range", since = "1.26.0")]
pub struct RangeInclusive<Idx> {
// FIXME: The current representation follows RFC 1980,
// but it is known that LLVM is not able to optimize loops following that RFC.
// Consider adding an extra `bool` field to indicate emptiness of the range.
// See #45222 for performance test cases.
#[cfg(not(stage0))]
pub(crate) start: Idx,
#[cfg(not(stage0))]
pub(crate) end: Idx,
/// The lower bound of the range (inclusive).
#[cfg(stage0)]
#[unstable(feature = "inclusive_range_fields", issue = "49022")]
pub start: Idx,
/// The upper bound of the range (inclusive).
#[cfg(stage0)]
#[unstable(feature = "inclusive_range_fields", issue = "49022")]
pub end: Idx,
}
impl<Idx> RangeInclusive<Idx> {
/// Creates a new inclusive range. Equivalent to writing `start..=end`.
///
/// # Examples
///
/// ```
/// #![feature(inclusive_range_methods)]
/// use std::ops::RangeInclusive;
///
/// assert_eq!(3..=5, RangeInclusive::new(3, 5));
/// ```
#[unstable(feature = "inclusive_range_methods", issue = "49022")]
#[inline]
pub const fn new(start: Idx, end: Idx) -> Self {
Self { start, end }
}
/// Returns the lower bound of the range (inclusive).
///
/// When using an inclusive range for iteration, the values of `start()` and
/// [`end()`] are unspecified after the iteration ended. To determine
/// whether the inclusive range is empty, use the [`is_empty()`] method
/// instead of comparing `start() > end()`.
///
/// [`end()`]: #method.end
/// [`is_empty()`]: #method.is_empty
///
/// # Examples
///
/// ```
/// #![feature(inclusive_range_methods)]
///
/// assert_eq!((3..=5).start(), &3);
/// ```
#[unstable(feature = "inclusive_range_methods", issue = "49022")]
#[inline]
pub fn start(&self) -> &Idx {
&self.start
}
/// Returns the upper bound of the range (inclusive).
///
/// When using an inclusive range for iteration, the values of [`start()`]
/// and `end()` are unspecified after the iteration ended. To determine
/// whether the inclusive range is empty, use the [`is_empty()`] method
/// instead of comparing `start() > end()`.
///
/// [`start()`]: #method.start
/// [`is_empty()`]: #method.is_empty
///
/// # Examples
///
/// ```
/// #![feature(inclusive_range_methods)]
///
/// assert_eq!((3..=5).end(), &5);
/// ```
#[unstable(feature = "inclusive_range_methods", issue = "49022")]
#[inline]
pub fn end(&self) -> &Idx {
&self.end
}
}
#[stable(feature = "inclusive_range", since = "1.26.0")]
impl<Idx: fmt::Debug> fmt::Debug for RangeInclusive<Idx> {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
@ -449,6 +528,7 @@ impl<Idx: PartialOrd<Idx>> RangeInclusive<Idx> {
/// [`IntoIterator`]: ../iter/trait.Iterator.html
/// [`Iterator`]: ../iter/trait.IntoIterator.html
/// [slicing index]: ../slice/trait.SliceIndex.html
#[doc(alias = "..=")]
#[derive(Copy, Clone, PartialEq, Eq, Hash)]
#[stable(feature = "inclusive_range", since = "1.26.0")]
pub struct RangeToInclusive<Idx> {

View File

@ -2513,6 +2513,7 @@ impl<T: ?Sized> PartialOrd for *mut T {
reason = "use NonNull instead and consider PhantomData<T> \
(if you also use #[may_dangle]), Send, and/or Sync")]
#[allow(deprecated)]
#[doc(hidden)]
pub struct Unique<T: ?Sized> {
pointer: NonZero<*const T>,
// NOTE: this marker has no consequences for variance, but is necessary
@ -2551,10 +2552,9 @@ impl<T: Sized> Unique<T> {
/// This is useful for initializing types which lazily allocate, like
/// `Vec::new` does.
// FIXME: rename to dangling() to match NonNull?
pub fn empty() -> Self {
pub const fn empty() -> Self {
unsafe {
let ptr = mem::align_of::<T>() as *mut T;
Unique::new_unchecked(ptr)
Unique::new_unchecked(mem::align_of::<T>() as *mut T)
}
}
}

View File

@ -39,21 +39,10 @@ fn repeat_byte(b: u8) -> usize {
(b as usize) << 8 | b as usize
}
#[cfg(target_pointer_width = "32")]
#[cfg(not(target_pointer_width = "16"))]
#[inline]
fn repeat_byte(b: u8) -> usize {
let mut rep = (b as usize) << 8 | b as usize;
rep = rep << 16 | rep;
rep
}
#[cfg(target_pointer_width = "64")]
#[inline]
fn repeat_byte(b: u8) -> usize {
let mut rep = (b as usize) << 8 | b as usize;
rep = rep << 16 | rep;
rep = rep << 32 | rep;
rep
(b as usize) * (::usize::MAX / 255)
}
/// Return the first index matching the byte `x` in `text`.
@ -146,85 +135,3 @@ pub fn memrchr(x: u8, text: &[u8]) -> Option<usize> {
// find the byte before the point the body loop stopped
text[..offset].iter().rposition(|elt| *elt == x)
}
// test fallback implementations on all platforms
#[test]
fn matches_one() {
assert_eq!(Some(0), memchr(b'a', b"a"));
}
#[test]
fn matches_begin() {
assert_eq!(Some(0), memchr(b'a', b"aaaa"));
}
#[test]
fn matches_end() {
assert_eq!(Some(4), memchr(b'z', b"aaaaz"));
}
#[test]
fn matches_nul() {
assert_eq!(Some(4), memchr(b'\x00', b"aaaa\x00"));
}
#[test]
fn matches_past_nul() {
assert_eq!(Some(5), memchr(b'z', b"aaaa\x00z"));
}
#[test]
fn no_match_empty() {
assert_eq!(None, memchr(b'a', b""));
}
#[test]
fn no_match() {
assert_eq!(None, memchr(b'a', b"xyz"));
}
#[test]
fn matches_one_reversed() {
assert_eq!(Some(0), memrchr(b'a', b"a"));
}
#[test]
fn matches_begin_reversed() {
assert_eq!(Some(3), memrchr(b'a', b"aaaa"));
}
#[test]
fn matches_end_reversed() {
assert_eq!(Some(0), memrchr(b'z', b"zaaaa"));
}
#[test]
fn matches_nul_reversed() {
assert_eq!(Some(4), memrchr(b'\x00', b"aaaa\x00"));
}
#[test]
fn matches_past_nul_reversed() {
assert_eq!(Some(0), memrchr(b'z', b"z\x00aaaa"));
}
#[test]
fn no_match_empty_reversed() {
assert_eq!(None, memrchr(b'a', b""));
}
#[test]
fn no_match_reversed() {
assert_eq!(None, memrchr(b'a', b"xyz"));
}
#[test]
fn each_alignment_reversed() {
let mut data = [1u8; 64];
let needle = 2;
let pos = 40;
data[pos] = needle;
for start in 0..16 {
assert_eq!(Some(pos - start), memrchr(needle, &data[start..]));
}
}

View File

@ -880,7 +880,6 @@ macro_rules! slice_core_methods { () => {
#[inline]
pub fn split_last(&self) -> Option<(&T, &[T])> {
SliceExt::split_last(self)
}
/// Returns the last and all the rest of the elements of the slice, or `None` if it is empty.

View File

@ -44,9 +44,10 @@
#![feature(exact_chunks)]
#![cfg_attr(stage0, feature(atomic_nand))]
#![feature(reverse_bits)]
#![feature(inclusive_range_fields)]
#![feature(inclusive_range_methods)]
#![feature(iterator_find_map)]
#![feature(inner_deref)]
#![feature(slice_internals)]
extern crate core;
extern crate test;
@ -75,4 +76,5 @@ mod result;
mod slice;
mod str;
mod str_lossy;
mod time;
mod tuple;

View File

@ -98,6 +98,7 @@ mod tests {
}
#[test]
#[cfg(not(stage0))]
fn test_reverse_bits() {
assert_eq!(A.reverse_bits().reverse_bits(), A);
assert_eq!(B.reverse_bits().reverse_bits(), B);

View File

@ -50,21 +50,21 @@ fn test_full_range() {
#[test]
fn test_range_inclusive() {
let mut r = RangeInclusive { start: 1i8, end: 2 };
let mut r = RangeInclusive::new(1i8, 2);
assert_eq!(r.next(), Some(1));
assert_eq!(r.next(), Some(2));
assert_eq!(r.next(), None);
r = RangeInclusive { start: 127i8, end: 127 };
r = RangeInclusive::new(127i8, 127);
assert_eq!(r.next(), Some(127));
assert_eq!(r.next(), None);
r = RangeInclusive { start: -128i8, end: -128 };
r = RangeInclusive::new(-128i8, -128);
assert_eq!(r.next_back(), Some(-128));
assert_eq!(r.next_back(), None);
// degenerate
r = RangeInclusive { start: 1, end: -1 };
r = RangeInclusive::new(1, -1);
assert_eq!(r.size_hint(), (0, Some(0)));
assert_eq!(r.next(), None);
}

View File

@ -550,3 +550,89 @@ fn sort_unstable() {
v.sort_unstable();
assert!(v == [0xDEADBEEF]);
}
pub mod memchr {
use core::slice::memchr::{memchr, memrchr};
// test fallback implementations on all platforms
#[test]
fn matches_one() {
assert_eq!(Some(0), memchr(b'a', b"a"));
}
#[test]
fn matches_begin() {
assert_eq!(Some(0), memchr(b'a', b"aaaa"));
}
#[test]
fn matches_end() {
assert_eq!(Some(4), memchr(b'z', b"aaaaz"));
}
#[test]
fn matches_nul() {
assert_eq!(Some(4), memchr(b'\x00', b"aaaa\x00"));
}
#[test]
fn matches_past_nul() {
assert_eq!(Some(5), memchr(b'z', b"aaaa\x00z"));
}
#[test]
fn no_match_empty() {
assert_eq!(None, memchr(b'a', b""));
}
#[test]
fn no_match() {
assert_eq!(None, memchr(b'a', b"xyz"));
}
#[test]
fn matches_one_reversed() {
assert_eq!(Some(0), memrchr(b'a', b"a"));
}
#[test]
fn matches_begin_reversed() {
assert_eq!(Some(3), memrchr(b'a', b"aaaa"));
}
#[test]
fn matches_end_reversed() {
assert_eq!(Some(0), memrchr(b'z', b"zaaaa"));
}
#[test]
fn matches_nul_reversed() {
assert_eq!(Some(4), memrchr(b'\x00', b"aaaa\x00"));
}
#[test]
fn matches_past_nul_reversed() {
assert_eq!(Some(0), memrchr(b'z', b"z\x00aaaa"));
}
#[test]
fn no_match_empty_reversed() {
assert_eq!(None, memrchr(b'a', b""));
}
#[test]
fn no_match_reversed() {
assert_eq!(None, memrchr(b'a', b"xyz"));
}
#[test]
fn each_alignment_reversed() {
let mut data = [1u8; 64];
let needle = 2;
let pos = 40;
data[pos] = needle;
for start in 0..16 {
assert_eq!(Some(pos - start), memrchr(needle, &data[start..]));
}
}
}

124
src/libcore/tests/time.rs Normal file
View File

@ -0,0 +1,124 @@
// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use core::time::Duration;
#[test]
fn creation() {
assert!(Duration::from_secs(1) != Duration::from_secs(0));
assert_eq!(Duration::from_secs(1) + Duration::from_secs(2),
Duration::from_secs(3));
assert_eq!(Duration::from_millis(10) + Duration::from_secs(4),
Duration::new(4, 10 * 1_000_000));
assert_eq!(Duration::from_millis(4000), Duration::new(4, 0));
}
#[test]
fn secs() {
assert_eq!(Duration::new(0, 0).as_secs(), 0);
assert_eq!(Duration::from_secs(1).as_secs(), 1);
assert_eq!(Duration::from_millis(999).as_secs(), 0);
assert_eq!(Duration::from_millis(1001).as_secs(), 1);
}
#[test]
fn nanos() {
assert_eq!(Duration::new(0, 0).subsec_nanos(), 0);
assert_eq!(Duration::new(0, 5).subsec_nanos(), 5);
assert_eq!(Duration::new(0, 1_000_000_001).subsec_nanos(), 1);
assert_eq!(Duration::from_secs(1).subsec_nanos(), 0);
assert_eq!(Duration::from_millis(999).subsec_nanos(), 999 * 1_000_000);
assert_eq!(Duration::from_millis(1001).subsec_nanos(), 1 * 1_000_000);
}
#[test]
fn add() {
assert_eq!(Duration::new(0, 0) + Duration::new(0, 1),
Duration::new(0, 1));
assert_eq!(Duration::new(0, 500_000_000) + Duration::new(0, 500_000_001),
Duration::new(1, 1));
}
#[test]
fn checked_add() {
assert_eq!(Duration::new(0, 0).checked_add(Duration::new(0, 1)),
Some(Duration::new(0, 1)));
assert_eq!(Duration::new(0, 500_000_000).checked_add(Duration::new(0, 500_000_001)),
Some(Duration::new(1, 1)));
assert_eq!(Duration::new(1, 0).checked_add(Duration::new(::core::u64::MAX, 0)), None);
}
#[test]
fn sub() {
assert_eq!(Duration::new(0, 1) - Duration::new(0, 0),
Duration::new(0, 1));
assert_eq!(Duration::new(0, 500_000_001) - Duration::new(0, 500_000_000),
Duration::new(0, 1));
assert_eq!(Duration::new(1, 0) - Duration::new(0, 1),
Duration::new(0, 999_999_999));
}
#[test]
fn checked_sub() {
let zero = Duration::new(0, 0);
let one_nano = Duration::new(0, 1);
let one_sec = Duration::new(1, 0);
assert_eq!(one_nano.checked_sub(zero), Some(Duration::new(0, 1)));
assert_eq!(one_sec.checked_sub(one_nano),
Some(Duration::new(0, 999_999_999)));
assert_eq!(zero.checked_sub(one_nano), None);
assert_eq!(zero.checked_sub(one_sec), None);
}
#[test]
#[should_panic]
fn sub_bad1() {
let _ = Duration::new(0, 0) - Duration::new(0, 1);
}
#[test]
#[should_panic]
fn sub_bad2() {
let _ = Duration::new(0, 0) - Duration::new(1, 0);
}
#[test]
fn mul() {
assert_eq!(Duration::new(0, 1) * 2, Duration::new(0, 2));
assert_eq!(Duration::new(1, 1) * 3, Duration::new(3, 3));
assert_eq!(Duration::new(0, 500_000_001) * 4, Duration::new(2, 4));
assert_eq!(Duration::new(0, 500_000_001) * 4000,
Duration::new(2000, 4000));
}
#[test]
fn checked_mul() {
assert_eq!(Duration::new(0, 1).checked_mul(2), Some(Duration::new(0, 2)));
assert_eq!(Duration::new(1, 1).checked_mul(3), Some(Duration::new(3, 3)));
assert_eq!(Duration::new(0, 500_000_001).checked_mul(4), Some(Duration::new(2, 4)));
assert_eq!(Duration::new(0, 500_000_001).checked_mul(4000),
Some(Duration::new(2000, 4000)));
assert_eq!(Duration::new(::core::u64::MAX - 1, 0).checked_mul(2), None);
}
#[test]
fn div() {
assert_eq!(Duration::new(0, 1) / 2, Duration::new(0, 0));
assert_eq!(Duration::new(1, 1) / 3, Duration::new(0, 333_333_333));
assert_eq!(Duration::new(99, 999_999_000) / 100,
Duration::new(0, 999_999_990));
}
#[test]
fn checked_div() {
assert_eq!(Duration::new(2, 0).checked_div(2), Some(Duration::new(1, 0)));
assert_eq!(Duration::new(1, 0).checked_div(2), Some(Duration::new(0, 500_000_000)));
assert_eq!(Duration::new(2, 0).checked_div(0), None);
}

View File

@ -481,119 +481,3 @@ impl<'a> Sum<&'a Duration> for Duration {
iter.fold(Duration::new(0, 0), |a, b| a + *b)
}
}
#[cfg(test)]
mod tests {
use super::Duration;
#[test]
fn creation() {
assert!(Duration::from_secs(1) != Duration::from_secs(0));
assert_eq!(Duration::from_secs(1) + Duration::from_secs(2),
Duration::from_secs(3));
assert_eq!(Duration::from_millis(10) + Duration::from_secs(4),
Duration::new(4, 10 * 1_000_000));
assert_eq!(Duration::from_millis(4000), Duration::new(4, 0));
}
#[test]
fn secs() {
assert_eq!(Duration::new(0, 0).as_secs(), 0);
assert_eq!(Duration::from_secs(1).as_secs(), 1);
assert_eq!(Duration::from_millis(999).as_secs(), 0);
assert_eq!(Duration::from_millis(1001).as_secs(), 1);
}
#[test]
fn nanos() {
assert_eq!(Duration::new(0, 0).subsec_nanos(), 0);
assert_eq!(Duration::new(0, 5).subsec_nanos(), 5);
assert_eq!(Duration::new(0, 1_000_000_001).subsec_nanos(), 1);
assert_eq!(Duration::from_secs(1).subsec_nanos(), 0);
assert_eq!(Duration::from_millis(999).subsec_nanos(), 999 * 1_000_000);
assert_eq!(Duration::from_millis(1001).subsec_nanos(), 1 * 1_000_000);
}
#[test]
fn add() {
assert_eq!(Duration::new(0, 0) + Duration::new(0, 1),
Duration::new(0, 1));
assert_eq!(Duration::new(0, 500_000_000) + Duration::new(0, 500_000_001),
Duration::new(1, 1));
}
#[test]
fn checked_add() {
assert_eq!(Duration::new(0, 0).checked_add(Duration::new(0, 1)),
Some(Duration::new(0, 1)));
assert_eq!(Duration::new(0, 500_000_000).checked_add(Duration::new(0, 500_000_001)),
Some(Duration::new(1, 1)));
assert_eq!(Duration::new(1, 0).checked_add(Duration::new(::u64::MAX, 0)), None);
}
#[test]
fn sub() {
assert_eq!(Duration::new(0, 1) - Duration::new(0, 0),
Duration::new(0, 1));
assert_eq!(Duration::new(0, 500_000_001) - Duration::new(0, 500_000_000),
Duration::new(0, 1));
assert_eq!(Duration::new(1, 0) - Duration::new(0, 1),
Duration::new(0, 999_999_999));
}
#[test]
fn checked_sub() {
let zero = Duration::new(0, 0);
let one_nano = Duration::new(0, 1);
let one_sec = Duration::new(1, 0);
assert_eq!(one_nano.checked_sub(zero), Some(Duration::new(0, 1)));
assert_eq!(one_sec.checked_sub(one_nano),
Some(Duration::new(0, 999_999_999)));
assert_eq!(zero.checked_sub(one_nano), None);
assert_eq!(zero.checked_sub(one_sec), None);
}
#[test] #[should_panic]
fn sub_bad1() {
Duration::new(0, 0) - Duration::new(0, 1);
}
#[test] #[should_panic]
fn sub_bad2() {
Duration::new(0, 0) - Duration::new(1, 0);
}
#[test]
fn mul() {
assert_eq!(Duration::new(0, 1) * 2, Duration::new(0, 2));
assert_eq!(Duration::new(1, 1) * 3, Duration::new(3, 3));
assert_eq!(Duration::new(0, 500_000_001) * 4, Duration::new(2, 4));
assert_eq!(Duration::new(0, 500_000_001) * 4000,
Duration::new(2000, 4000));
}
#[test]
fn checked_mul() {
assert_eq!(Duration::new(0, 1).checked_mul(2), Some(Duration::new(0, 2)));
assert_eq!(Duration::new(1, 1).checked_mul(3), Some(Duration::new(3, 3)));
assert_eq!(Duration::new(0, 500_000_001).checked_mul(4), Some(Duration::new(2, 4)));
assert_eq!(Duration::new(0, 500_000_001).checked_mul(4000),
Some(Duration::new(2000, 4000)));
assert_eq!(Duration::new(::u64::MAX - 1, 0).checked_mul(2), None);
}
#[test]
fn div() {
assert_eq!(Duration::new(0, 1) / 2, Duration::new(0, 0));
assert_eq!(Duration::new(1, 1) / 3, Duration::new(0, 333_333_333));
assert_eq!(Duration::new(99, 999_999_000) / 100,
Duration::new(0, 999_999_990));
}
#[test]
fn checked_div() {
assert_eq!(Duration::new(2, 0).checked_div(2), Some(Duration::new(1, 0)));
assert_eq!(Duration::new(1, 0).checked_div(2), Some(Duration::new(0, 500_000_000)));
assert_eq!(Duration::new(2, 0).checked_div(0), None);
}
}

View File

@ -42,15 +42,15 @@ pub struct BoolTrie {
}
impl BoolTrie {
pub fn lookup(&self, c: char) -> bool {
let c = c as usize;
let c = c as u32;
if c < 0x800 {
trie_range_leaf(c, self.r1[c >> 6])
trie_range_leaf(c, self.r1[(c >> 6) as usize])
} else if c < 0x10000 {
let child = self.r2[(c >> 6) - 0x20];
let child = self.r2[(c >> 6) as usize - 0x20];
trie_range_leaf(c, self.r3[child as usize])
} else {
let child = self.r4[(c >> 12) - 0x10];
let leaf = self.r5[((child as usize) << 6) + ((c >> 6) & 0x3f)];
let child = self.r4[(c >> 12) as usize - 0x10];
let leaf = self.r5[((child as usize) << 6) + ((c >> 6) as usize & 0x3f)];
trie_range_leaf(c, self.r6[leaf as usize])
}
}
@ -63,14 +63,14 @@ pub struct SmallBoolTrie {
impl SmallBoolTrie {
pub fn lookup(&self, c: char) -> bool {
let c = c as usize;
match self.r1.get(c >> 6) {
let c = c as u32;
match self.r1.get((c >> 6) as usize) {
Some(&child) => trie_range_leaf(c, self.r2[child as usize]),
None => false,
}
}
}
fn trie_range_leaf(c: usize, bitmap_chunk: u64) -> bool {
fn trie_range_leaf(c: u32, bitmap_chunk: u64) -> bool {
((bitmap_chunk >> (c & 63)) & 1) != 0
}

View File

@ -74,6 +74,11 @@ use syntax_pos::hygiene::Mark;
#[derive(Clone)]
pub struct TokenStream(tokenstream::TokenStream);
#[unstable(feature = "proc_macro", issue = "38356")]
impl !Send for TokenStream {}
#[unstable(feature = "proc_macro", issue = "38356")]
impl !Sync for TokenStream {}
/// Error returned from `TokenStream::from_str`.
#[stable(feature = "proc_macro_lib", since = "1.15.0")]
#[derive(Debug)]
@ -81,6 +86,11 @@ pub struct LexError {
_inner: (),
}
#[unstable(feature = "proc_macro", issue = "38356")]
impl !Send for LexError {}
#[unstable(feature = "proc_macro", issue = "38356")]
impl !Sync for LexError {}
impl TokenStream {
/// Returns an empty `TokenStream`.
#[unstable(feature = "proc_macro", issue = "38356")]
@ -231,6 +241,11 @@ pub fn quote_span(span: Span) -> TokenStream {
#[derive(Copy, Clone)]
pub struct Span(syntax_pos::Span);
#[unstable(feature = "proc_macro", issue = "38356")]
impl !Send for Span {}
#[unstable(feature = "proc_macro", issue = "38356")]
impl !Sync for Span {}
macro_rules! diagnostic_method {
($name:ident, $level:expr) => (
/// Create a new `Diagnostic` with the given `message` at the span
@ -270,7 +285,7 @@ impl Span {
/// `self` was generated from, if any.
#[unstable(feature = "proc_macro", issue = "38356")]
pub fn parent(&self) -> Option<Span> {
self.0.ctxt().outer().expn_info().map(|i| Span(i.call_site))
self.0.parent().map(Span)
}
/// The span for the origin source code that `self` was generated from. If
@ -363,6 +378,11 @@ pub struct LineColumn {
pub column: usize
}
#[unstable(feature = "proc_macro", issue = "38356")]
impl !Send for LineColumn {}
#[unstable(feature = "proc_macro", issue = "38356")]
impl !Sync for LineColumn {}
/// The source file of a given `Span`.
#[unstable(feature = "proc_macro", issue = "38356")]
#[derive(Clone)]
@ -393,7 +413,7 @@ impl SourceFile {
/// Returns `true` if this source file is a real source file, and not generated by an external
/// macro's expansion.
# [unstable(feature = "proc_macro", issue = "38356")]
#[unstable(feature = "proc_macro", issue = "38356")]
pub fn is_real(&self) -> bool {
// This is a hack until intercrate spans are implemented and we can have real source files
// for spans generated in external macros.
@ -450,6 +470,11 @@ pub enum TokenTree {
Literal(Literal),
}
#[unstable(feature = "proc_macro", issue = "38356")]
impl !Send for TokenTree {}
#[unstable(feature = "proc_macro", issue = "38356")]
impl !Sync for TokenTree {}
impl TokenTree {
/// Returns the span of this token, accessing the `span` method of each of
/// the internal tokens.
@ -546,6 +571,11 @@ pub struct Group {
span: Span,
}
#[unstable(feature = "proc_macro", issue = "38356")]
impl !Send for Group {}
#[unstable(feature = "proc_macro", issue = "38356")]
impl !Sync for Group {}
/// Describes how a sequence of token trees is delimited.
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
#[unstable(feature = "proc_macro", issue = "38356")]
@ -628,6 +658,11 @@ pub struct Op {
span: Span,
}
#[unstable(feature = "proc_macro", issue = "38356")]
impl !Send for Op {}
#[unstable(feature = "proc_macro", issue = "38356")]
impl !Sync for Op {}
/// Whether an `Op` is either followed immediately by another `Op` or followed by whitespace.
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
#[unstable(feature = "proc_macro", issue = "38356")]
@ -694,6 +729,11 @@ pub struct Term {
span: Span,
}
#[unstable(feature = "proc_macro", issue = "38356")]
impl !Send for Term {}
#[unstable(feature = "proc_macro", issue = "38356")]
impl !Sync for Term {}
impl Term {
/// Creates a new `Term` with the given `string` as well as the specified
/// `span`.
@ -752,6 +792,11 @@ pub struct Literal {
span: Span,
}
#[unstable(feature = "proc_macro", issue = "38356")]
impl !Send for Literal {}
#[unstable(feature = "proc_macro", issue = "38356")]
impl !Sync for Literal {}
macro_rules! suffixed_int_literals {
($($name:ident => $kind:ident,)*) => ($(
/// Creates a new suffixed integer literal with the specified value.

View File

@ -19,7 +19,6 @@ log = { version = "0.4", features = ["release_max_level_info", "std"] }
proc_macro = { path = "../libproc_macro" }
rustc_apfloat = { path = "../librustc_apfloat" }
rustc_target = { path = "../librustc_target" }
rustc_const_math = { path = "../librustc_const_math" }
rustc_data_structures = { path = "../librustc_data_structures" }
rustc_errors = { path = "../librustc_errors" }
serialize = { path = "../libserialize" }

View File

@ -70,7 +70,8 @@ use rustc_data_structures::stable_hasher::{StableHasher, HashStable};
use std::fmt;
use std::hash::Hash;
use syntax_pos::symbol::InternedString;
use traits::query::{CanonicalProjectionGoal, CanonicalTyGoal};
use traits::query::{CanonicalProjectionGoal,
CanonicalTyGoal, CanonicalPredicateGoal};
use ty::{TyCtxt, Instance, InstanceDef, ParamEnv, ParamEnvAnd, PolyTraitRef, Ty};
use ty::subst::Substs;
@ -643,6 +644,7 @@ define_dep_nodes!( <'tcx>
[] NormalizeProjectionTy(CanonicalProjectionGoal<'tcx>),
[] NormalizeTyAfterErasingRegions(ParamEnvAnd<'tcx, Ty<'tcx>>),
[] DropckOutlives(CanonicalTyGoal<'tcx>),
[] EvaluateObligation(CanonicalPredicateGoal<'tcx>),
[] SubstituteNormalizeAndTestPredicates { key: (DefId, &'tcx Substs<'tcx>) },

View File

@ -30,6 +30,8 @@ enum Target {
ForeignMod,
Expression,
Statement,
Closure,
Static,
Other,
}
@ -42,6 +44,7 @@ impl Target {
hir::ItemEnum(..) => Target::Enum,
hir::ItemConst(..) => Target::Const,
hir::ItemForeignMod(..) => Target::ForeignMod,
hir::ItemStatic(..) => Target::Static,
_ => Target::Other,
}
}
@ -101,16 +104,17 @@ impl<'a, 'tcx> CheckAttrVisitor<'a, 'tcx> {
}
self.check_repr(item, target);
self.check_used(item, target);
}
/// Check if an `#[inline]` is applied to a function.
/// Check if an `#[inline]` is applied to a function or a closure.
fn check_inline(&self, attr: &hir::Attribute, span: &Span, target: Target) {
if target != Target::Fn {
if target != Target::Fn && target != Target::Closure {
struct_span_err!(self.tcx.sess,
attr.span,
E0518,
"attribute should be applied to function")
.span_label(*span, "not a function")
"attribute should be applied to function or closure")
.span_label(*span, "not a function or closure")
.emit();
}
}
@ -149,10 +153,7 @@ impl<'a, 'tcx> CheckAttrVisitor<'a, 'tcx> {
// ```
let hints: Vec<_> = item.attrs
.iter()
.filter(|attr| match attr.name() {
Some(name) => name == "repr",
None => false,
})
.filter(|attr| attr.name() == "repr")
.filter_map(|attr| attr.meta_item_list())
.flat_map(|hints| hints)
.collect();
@ -286,9 +287,13 @@ impl<'a, 'tcx> CheckAttrVisitor<'a, 'tcx> {
}
fn check_expr_attributes(&self, expr: &hir::Expr) {
let target = match expr.node {
hir::ExprClosure(..) => Target::Closure,
_ => Target::Expression,
};
for attr in expr.attrs.iter() {
if attr.check_name("inline") {
self.check_inline(attr, &expr.span, Target::Expression);
self.check_inline(attr, &expr.span, target);
}
if attr.check_name("repr") {
self.emit_repr_error(
@ -300,6 +305,15 @@ impl<'a, 'tcx> CheckAttrVisitor<'a, 'tcx> {
}
}
}
fn check_used(&self, item: &hir::Item, target: Target) {
for attr in &item.attrs {
if attr.name() == "used" && target != Target::Static {
self.tcx.sess
.span_err(attr.span, "attribute must be applied to a `static` variable");
}
}
}
}
impl<'a, 'tcx> Visitor<'tcx> for CheckAttrVisitor<'a, 'tcx> {

View File

@ -404,7 +404,7 @@ pub fn walk_local<'v, V: Visitor<'v>>(visitor: &mut V, local: &'v Local) {
// Intentionally visiting the expr first - the initialization expr
// dominates the local's definition.
walk_list!(visitor, visit_expr, &local.init);
walk_list!(visitor, visit_attribute, local.attrs.iter());
visitor.visit_id(local.id);
visitor.visit_pat(&local.pat);
walk_list!(visitor, visit_ty, &local.ty);
@ -731,6 +731,7 @@ pub fn walk_generic_param<'v, V: Visitor<'v>>(visitor: &mut V, param: &'v Generi
visitor.visit_name(ty_param.span, ty_param.name);
walk_list!(visitor, visit_ty_param_bound, &ty_param.bounds);
walk_list!(visitor, visit_ty, &ty_param.default);
walk_list!(visitor, visit_attribute, ty_param.attrs.iter());
}
}
}

View File

@ -655,7 +655,7 @@ impl<'a> LoweringContext<'a> {
self.resolver.definitions().create_def_with_parent(
parent_id.index,
def_node_id,
DefPathData::LifetimeDef(str_name),
DefPathData::LifetimeDef(str_name.as_interned_str()),
DefIndexAddressSpace::High,
Mark::root(),
span,
@ -1302,7 +1302,7 @@ impl<'a> LoweringContext<'a> {
self.context.resolver.definitions().create_def_with_parent(
self.parent,
def_node_id,
DefPathData::LifetimeDef(name.name().as_str()),
DefPathData::LifetimeDef(name.name().as_interned_str()),
DefIndexAddressSpace::High,
Mark::root(),
lifetime.span,
@ -3119,6 +3119,20 @@ impl<'a> LoweringContext<'a> {
ExprKind::Index(ref el, ref er) => {
hir::ExprIndex(P(self.lower_expr(el)), P(self.lower_expr(er)))
}
// Desugar `<start>..=<end>` to `std::ops::RangeInclusive::new(<start>, <end>)`
ExprKind::Range(Some(ref e1), Some(ref e2), RangeLimits::Closed) => {
// FIXME: Use e.span directly after RangeInclusive::new() is stabilized in stage0.
let span = self.allow_internal_unstable(CompilerDesugaringKind::DotFill, e.span);
let id = self.next_id();
let e1 = self.lower_expr(e1);
let e2 = self.lower_expr(e2);
let ty_path = P(self.std_path(span, &["ops", "RangeInclusive"], false));
let ty = self.ty_path(id, span, hir::QPath::Resolved(None, ty_path));
let new_seg = P(hir::PathSegment::from_name(Symbol::intern("new")));
let new_path = hir::QPath::TypeRelative(ty, new_seg);
let new = P(self.expr(span, hir::ExprPath(new_path), ThinVec::new()));
hir::ExprCall(new, hir_vec![e1, e2])
}
ExprKind::Range(ref e1, ref e2, lims) => {
use syntax::ast::RangeLimits::*;
@ -3128,7 +3142,7 @@ impl<'a> LoweringContext<'a> {
(&None, &Some(..), HalfOpen) => "RangeTo",
(&Some(..), &Some(..), HalfOpen) => "Range",
(&None, &Some(..), Closed) => "RangeToInclusive",
(&Some(..), &Some(..), Closed) => "RangeInclusive",
(&Some(..), &Some(..), Closed) => unreachable!(),
(_, &None, Closed) => self.diagnostic()
.span_fatal(e.span, "inclusive range with no end")
.raise(),
@ -4107,15 +4121,13 @@ impl<'a> LoweringContext<'a> {
}
fn maybe_lint_bare_trait(&self, span: Span, id: NodeId, is_global: bool) {
if self.sess.features_untracked().dyn_trait {
self.sess.buffer_lint_with_diagnostic(
builtin::BARE_TRAIT_OBJECT,
id,
span,
"trait objects without an explicit `dyn` are deprecated",
builtin::BuiltinLintDiagnostics::BareTraitObject(span, is_global),
)
}
self.sess.buffer_lint_with_diagnostic(
builtin::BARE_TRAIT_OBJECT,
id,
span,
"trait objects without an explicit `dyn` are deprecated",
builtin::BuiltinLintDiagnostics::BareTraitObject(span, is_global),
)
}
fn wrap_in_try_constructor(

View File

@ -107,18 +107,18 @@ impl<'a> visit::Visitor<'a> for DefCollector<'a> {
// information we encapsulate into
let def_data = match i.node {
ItemKind::Impl(..) => DefPathData::Impl,
ItemKind::Trait(..) => DefPathData::Trait(i.ident.name.as_str()),
ItemKind::Trait(..) => DefPathData::Trait(i.ident.name.as_interned_str()),
ItemKind::Enum(..) | ItemKind::Struct(..) | ItemKind::Union(..) |
ItemKind::TraitAlias(..) |
ItemKind::ExternCrate(..) | ItemKind::ForeignMod(..) | ItemKind::Ty(..) =>
DefPathData::TypeNs(i.ident.name.as_str()),
DefPathData::TypeNs(i.ident.name.as_interned_str()),
ItemKind::Mod(..) if i.ident == keywords::Invalid.ident() => {
return visit::walk_item(self, i);
}
ItemKind::Mod(..) => DefPathData::Module(i.ident.name.as_str()),
ItemKind::Mod(..) => DefPathData::Module(i.ident.name.as_interned_str()),
ItemKind::Static(..) | ItemKind::Const(..) | ItemKind::Fn(..) =>
DefPathData::ValueNs(i.ident.name.as_str()),
ItemKind::MacroDef(..) => DefPathData::MacroDef(i.ident.name.as_str()),
DefPathData::ValueNs(i.ident.name.as_interned_str()),
ItemKind::MacroDef(..) => DefPathData::MacroDef(i.ident.name.as_interned_str()),
ItemKind::Mac(..) => return self.visit_macro_invoc(i.id, false),
ItemKind::GlobalAsm(..) => DefPathData::Misc,
ItemKind::Use(..) => {
@ -133,7 +133,8 @@ impl<'a> visit::Visitor<'a> for DefCollector<'a> {
for v in &enum_definition.variants {
let variant_def_index =
this.create_def(v.node.data.id(),
DefPathData::EnumVariant(v.node.ident.name.as_str()),
DefPathData::EnumVariant(v.node.ident
.name.as_interned_str()),
REGULAR_SPACE,
v.span);
this.with_parent(variant_def_index, |this| {
@ -141,7 +142,7 @@ impl<'a> visit::Visitor<'a> for DefCollector<'a> {
let name = field.ident.map(|ident| ident.name)
.unwrap_or_else(|| Symbol::intern(&index.to_string()));
this.create_def(field.id,
DefPathData::Field(name.as_str()),
DefPathData::Field(name.as_interned_str()),
REGULAR_SPACE,
field.span);
}
@ -165,7 +166,7 @@ impl<'a> visit::Visitor<'a> for DefCollector<'a> {
let name = field.ident.map(|ident| ident.name)
.unwrap_or_else(|| Symbol::intern(&index.to_string()));
this.create_def(field.id,
DefPathData::Field(name.as_str()),
DefPathData::Field(name.as_interned_str()),
REGULAR_SPACE,
field.span);
}
@ -187,7 +188,7 @@ impl<'a> visit::Visitor<'a> for DefCollector<'a> {
}
let def = self.create_def(foreign_item.id,
DefPathData::ValueNs(foreign_item.ident.name.as_str()),
DefPathData::ValueNs(foreign_item.ident.name.as_interned_str()),
REGULAR_SPACE,
foreign_item.span);
@ -201,7 +202,7 @@ impl<'a> visit::Visitor<'a> for DefCollector<'a> {
GenericParam::Lifetime(ref lifetime_def) => {
self.create_def(
lifetime_def.lifetime.id,
DefPathData::LifetimeDef(lifetime_def.lifetime.ident.name.as_str()),
DefPathData::LifetimeDef(lifetime_def.lifetime.ident.name.as_interned_str()),
REGULAR_SPACE,
lifetime_def.lifetime.ident.span
);
@ -209,7 +210,7 @@ impl<'a> visit::Visitor<'a> for DefCollector<'a> {
GenericParam::Type(ref ty_param) => {
self.create_def(
ty_param.id,
DefPathData::TypeParam(ty_param.ident.name.as_str()),
DefPathData::TypeParam(ty_param.ident.name.as_interned_str()),
REGULAR_SPACE,
ty_param.ident.span
);
@ -222,8 +223,10 @@ impl<'a> visit::Visitor<'a> for DefCollector<'a> {
fn visit_trait_item(&mut self, ti: &'a TraitItem) {
let def_data = match ti.node {
TraitItemKind::Method(..) | TraitItemKind::Const(..) =>
DefPathData::ValueNs(ti.ident.name.as_str()),
TraitItemKind::Type(..) => DefPathData::AssocTypeInTrait(ti.ident.name.as_str()),
DefPathData::ValueNs(ti.ident.name.as_interned_str()),
TraitItemKind::Type(..) => {
DefPathData::AssocTypeInTrait(ti.ident.name.as_interned_str())
},
TraitItemKind::Macro(..) => return self.visit_macro_invoc(ti.id, false),
};
@ -240,8 +243,8 @@ impl<'a> visit::Visitor<'a> for DefCollector<'a> {
fn visit_impl_item(&mut self, ii: &'a ImplItem) {
let def_data = match ii.node {
ImplItemKind::Method(..) | ImplItemKind::Const(..) =>
DefPathData::ValueNs(ii.ident.name.as_str()),
ImplItemKind::Type(..) => DefPathData::AssocTypeInImpl(ii.ident.name.as_str()),
DefPathData::ValueNs(ii.ident.name.as_interned_str()),
ImplItemKind::Type(..) => DefPathData::AssocTypeInImpl(ii.ident.name.as_interned_str()),
ImplItemKind::Macro(..) => return self.visit_macro_invoc(ii.id, false),
};

View File

@ -701,7 +701,7 @@ impl DefPathData {
Typeof => "{{typeof}}",
};
Symbol::intern(s).as_str()
Symbol::intern(s).as_interned_str()
}
pub fn to_string(&self) -> String {
@ -731,7 +731,7 @@ macro_rules! define_global_metadata_kind {
definitions.create_def_with_parent(
CRATE_DEF_INDEX,
ast::DUMMY_NODE_ID,
DefPathData::GlobalMetaData(instance.name().as_str()),
DefPathData::GlobalMetaData(instance.name().as_interned_str()),
GLOBAL_MD_ADDRESS_SPACE,
Mark::root(),
DUMMY_SP
@ -746,7 +746,7 @@ macro_rules! define_global_metadata_kind {
let def_key = DefKey {
parent: Some(CRATE_DEF_INDEX),
disambiguated_data: DisambiguatedDefPathData {
data: DefPathData::GlobalMetaData(self.name().as_str()),
data: DefPathData::GlobalMetaData(self.name().as_interned_str()),
disambiguator: 0,
}
};

View File

@ -1,44 +0,0 @@
// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! This module contains `HashStable` implementations for various data types
//! from `rustc_const_math` in no particular order.
impl_stable_hash_for!(struct ::rustc_const_math::ConstFloat {
ty,
bits
});
impl_stable_hash_for!(enum ::rustc_const_math::ConstMathErr {
NotInRange,
CmpBetweenUnequalTypes,
UnequalTypes(op),
Overflow(op),
ShiftNegative,
DivisionByZero,
RemainderByZero,
UnsignedNegation,
ULitOutOfRange(int_ty),
LitOutOfRange(int_ty)
});
impl_stable_hash_for!(enum ::rustc_const_math::Op {
Add,
Sub,
Mul,
Div,
Rem,
Shr,
Shl,
Neg,
BitAnd,
BitOr,
BitXor
});

View File

@ -227,27 +227,6 @@ for mir::TerminatorKind<'gcx> {
}
}
impl<'a, 'gcx> HashStable<StableHashingContext<'a>>
for mir::AssertMessage<'gcx> {
fn hash_stable<W: StableHasherResult>(&self,
hcx: &mut StableHashingContext<'a>,
hasher: &mut StableHasher<W>) {
mem::discriminant(self).hash_stable(hcx, hasher);
match *self {
mir::AssertMessage::BoundsCheck { ref len, ref index } => {
len.hash_stable(hcx, hasher);
index.hash_stable(hcx, hasher);
}
mir::AssertMessage::Math(ref const_math_err) => {
const_math_err.hash_stable(hcx, hasher);
}
mir::AssertMessage::GeneratorResumedAfterReturn => (),
mir::AssertMessage::GeneratorResumedAfterPanic => (),
}
}
}
impl_stable_hash_for!(struct mir::Statement<'tcx> { source_info, kind });
impl<'a, 'gcx> HashStable<StableHashingContext<'a>>
@ -563,6 +542,11 @@ impl<'a, 'gcx> HashStable<StableHashingContext<'a>> for mir::Literal<'gcx> {
impl_stable_hash_for!(struct mir::Location { block, statement_index });
impl_stable_hash_for!(struct mir::BorrowCheckResult<'tcx> {
closure_requirements,
used_mut_upvars
});
impl_stable_hash_for!(struct mir::ClosureRegionRequirements<'tcx> {
num_external_vids,
outlives_requirements

View File

@ -19,7 +19,7 @@ use std::mem;
use syntax::ast;
use syntax::feature_gate;
use syntax::parse::token;
use syntax::symbol::InternedString;
use syntax::symbol::{InternedString, LocalInternedString};
use syntax::tokenstream;
use syntax_pos::FileMap;
@ -34,8 +34,7 @@ impl<'a> HashStable<StableHashingContext<'a>> for InternedString {
fn hash_stable<W: StableHasherResult>(&self,
hcx: &mut StableHashingContext<'a>,
hasher: &mut StableHasher<W>) {
let s: &str = &**self;
s.hash_stable(hcx, hasher);
self.with(|s| s.hash_stable(hcx, hasher))
}
}
@ -50,6 +49,27 @@ impl<'a> ToStableHashKey<StableHashingContext<'a>> for InternedString {
}
}
impl<'a> HashStable<StableHashingContext<'a>> for LocalInternedString {
#[inline]
fn hash_stable<W: StableHasherResult>(&self,
hcx: &mut StableHashingContext<'a>,
hasher: &mut StableHasher<W>) {
let s: &str = &**self;
s.hash_stable(hcx, hasher);
}
}
impl<'a> ToStableHashKey<StableHashingContext<'a>> for LocalInternedString {
type KeyType = LocalInternedString;
#[inline]
fn to_stable_hash_key(&self,
_: &StableHashingContext<'a>)
-> LocalInternedString {
self.clone()
}
}
impl<'a> HashStable<StableHashingContext<'a>> for ast::Name {
#[inline]
fn hash_stable<W: StableHasherResult>(&self,
@ -66,7 +86,7 @@ impl<'a> ToStableHashKey<StableHashingContext<'a>> for ast::Name {
fn to_stable_hash_key(&self,
_: &StableHashingContext<'a>)
-> InternedString {
self.as_str()
self.as_interned_str()
}
}
@ -179,8 +199,7 @@ impl<'a> HashStable<StableHashingContext<'a>> for [ast::Attribute] {
let filtered: AccumulateVec<[&ast::Attribute; 8]> = self
.iter()
.filter(|attr| {
!attr.is_sugared_doc &&
attr.name().map(|name| !hcx.is_ignored_attr(name)).unwrap_or(true)
!attr.is_sugared_doc && !hcx.is_ignored_attr(attr.name())
})
.collect();
@ -191,12 +210,23 @@ impl<'a> HashStable<StableHashingContext<'a>> for [ast::Attribute] {
}
}
impl<'a> HashStable<StableHashingContext<'a>> for ast::Path {
fn hash_stable<W: StableHasherResult>(&self,
hcx: &mut StableHashingContext<'a>,
hasher: &mut StableHasher<W>) {
self.segments.len().hash_stable(hcx, hasher);
for segment in &self.segments {
segment.ident.name.hash_stable(hcx, hasher);
}
}
}
impl<'a> HashStable<StableHashingContext<'a>> for ast::Attribute {
fn hash_stable<W: StableHasherResult>(&self,
hcx: &mut StableHashingContext<'a>,
hasher: &mut StableHasher<W>) {
// Make sure that these have been filtered out.
debug_assert!(self.name().map(|name| !hcx.is_ignored_attr(name)).unwrap_or(true));
debug_assert!(!hcx.is_ignored_attr(self.name()));
debug_assert!(!self.is_sugared_doc);
let ast::Attribute {
@ -209,10 +239,7 @@ impl<'a> HashStable<StableHashingContext<'a>> for ast::Attribute {
} = *self;
style.hash_stable(hcx, hasher);
path.segments.len().hash_stable(hcx, hasher);
for segment in &path.segments {
segment.ident.name.hash_stable(hcx, hasher);
}
path.hash_stable(hcx, hasher);
for tt in tokens.trees() {
tt.hash_stable(hcx, hasher);
}

View File

@ -505,9 +505,6 @@ for ::middle::const_val::ErrKind<'gcx> {
len.hash_stable(hcx, hasher);
index.hash_stable(hcx, hasher);
}
Math(ref const_math_err) => {
const_math_err.hash_stable(hcx, hasher);
}
LayoutError(ref layout_error) => {
layout_error.hash_stable(hcx, hasher);
}
@ -528,16 +525,26 @@ impl_stable_hash_for!(struct ty::GenericPredicates<'tcx> {
predicates
});
impl<'a, 'gcx> HashStable<StableHashingContext<'a>>
for ::mir::interpret::EvalError<'gcx> {
fn hash_stable<W: StableHasherResult>(&self,
hcx: &mut StableHashingContext<'a>,
hasher: &mut StableHasher<W>) {
self.kind.hash_stable(hcx, hasher)
}
}
impl<'a, 'gcx, O: HashStable<StableHashingContext<'a>>> HashStable<StableHashingContext<'a>>
for ::mir::interpret::EvalErrorKind<'gcx, O> {
fn hash_stable<W: StableHasherResult>(&self,
hcx: &mut StableHashingContext<'a>,
hasher: &mut StableHasher<W>) {
use mir::interpret::EvalErrorKind::*;
mem::discriminant(&self.kind).hash_stable(hcx, hasher);
mem::discriminant(&self).hash_stable(hcx, hasher);
match self.kind {
match *self {
DanglingPointerDeref |
DoubleFree |
InvalidMemoryAccess |
@ -568,8 +575,12 @@ for ::mir::interpret::EvalError<'gcx> {
TypeckError |
DerefFunctionPointer |
ExecuteMemory |
ReferencedConstant |
OverflowingMath => {}
OverflowNeg |
RemainderByZero |
DivisionByZero |
GeneratorResumedAfterReturn |
GeneratorResumedAfterPanic |
ReferencedConstant => {}
MachineError(ref err) => err.hash_stable(hcx, hasher),
FunctionPointerTyMismatch(a, b) => {
a.hash_stable(hcx, hasher);
@ -588,14 +599,9 @@ for ::mir::interpret::EvalError<'gcx> {
},
InvalidBoolOp(bop) => bop.hash_stable(hcx, hasher),
Unimplemented(ref s) => s.hash_stable(hcx, hasher),
ArrayIndexOutOfBounds(sp, a, b) => {
sp.hash_stable(hcx, hasher);
a.hash_stable(hcx, hasher);
b.hash_stable(hcx, hasher)
},
Math(sp, ref err) => {
sp.hash_stable(hcx, hasher);
err.hash_stable(hcx, hasher)
BoundsCheck { ref len, ref index } => {
len.hash_stable(hcx, hasher);
index.hash_stable(hcx, hasher)
},
Intrinsic(ref s) => s.hash_stable(hcx, hasher),
InvalidChar(c) => c.hash_stable(hcx, hasher),
@ -668,6 +674,7 @@ for ::mir::interpret::EvalError<'gcx> {
Layout(lay) => lay.hash_stable(hcx, hasher),
HeapAllocNonPowerOfTwoAlignment(n) => n.hash_stable(hcx, hasher),
PathNotFound(ref v) => v.hash_stable(hcx, hasher),
Overflow(op) => op.hash_stable(hcx, hasher),
}
}
}

View File

@ -18,7 +18,6 @@ mod fingerprint;
mod caching_codemap_view;
mod hcx;
mod impls_const_math;
mod impls_cstore;
mod impls_hir;
mod impls_mir;

View File

@ -407,7 +407,7 @@ impl<'cx, 'gcx, 'tcx> TypeRelation<'cx, 'gcx, 'tcx> for Generalizer<'cx, 'gcx, '
drop(variables);
self.relate(&u, &u)
}
TypeVariableValue::Unknown { .. } => {
TypeVariableValue::Unknown { universe } => {
match self.ambient_variance {
// Invariant: no need to make a fresh type variable.
ty::Invariant => return Ok(t),
@ -424,7 +424,7 @@ impl<'cx, 'gcx, 'tcx> TypeRelation<'cx, 'gcx, 'tcx> for Generalizer<'cx, 'gcx, '
}
let origin = *variables.var_origin(vid);
let new_var_id = variables.new_var(false, origin);
let new_var_id = variables.new_var(universe, false, origin);
let u = self.tcx().mk_var(new_var_id);
debug!("generalize: replacing original vid={:?} with new={:?}",
vid, u);

View File

@ -19,7 +19,7 @@ use super::{CombinedSnapshot,
use super::combine::CombineFields;
use super::region_constraints::{TaintDirections};
use std::collections::BTreeMap;
use rustc_data_structures::lazy_btree_map::LazyBTreeMap;
use ty::{self, TyCtxt, Binder, TypeFoldable};
use ty::error::TypeError;
use ty::relate::{Relate, RelateResult, TypeRelation};
@ -62,7 +62,7 @@ impl<'a, 'gcx, 'tcx> CombineFields<'a, 'gcx, 'tcx> {
// Second, we instantiate each bound region in the supertype with a
// fresh concrete region.
let (b_prime, skol_map) =
self.infcx.skolemize_late_bound_regions(b, snapshot);
self.infcx.skolemize_late_bound_regions(b);
debug!("a_prime={:?}", a_prime);
debug!("b_prime={:?}", b_prime);
@ -114,7 +114,7 @@ impl<'a, 'gcx, 'tcx> CombineFields<'a, 'gcx, 'tcx> {
// First, we instantiate each bound region in the matcher
// with a skolemized region.
let ((a_match, a_value), skol_map) =
self.infcx.skolemize_late_bound_regions(a_pair, snapshot);
self.infcx.skolemize_late_bound_regions(a_pair);
debug!("higher_ranked_match: a_match={:?}", a_match);
debug!("higher_ranked_match: skol_map={:?}", skol_map);
@ -247,7 +247,8 @@ impl<'a, 'gcx, 'tcx> CombineFields<'a, 'gcx, 'tcx> {
snapshot: &CombinedSnapshot<'a, 'tcx>,
debruijn: ty::DebruijnIndex,
new_vars: &[ty::RegionVid],
a_map: &BTreeMap<ty::BoundRegion, ty::Region<'tcx>>,
a_map: &LazyBTreeMap<ty::BoundRegion,
ty::Region<'tcx>>,
r0: ty::Region<'tcx>)
-> ty::Region<'tcx> {
// Regions that pre-dated the LUB computation stay as they are.
@ -343,7 +344,8 @@ impl<'a, 'gcx, 'tcx> CombineFields<'a, 'gcx, 'tcx> {
snapshot: &CombinedSnapshot<'a, 'tcx>,
debruijn: ty::DebruijnIndex,
new_vars: &[ty::RegionVid],
a_map: &BTreeMap<ty::BoundRegion, ty::Region<'tcx>>,
a_map: &LazyBTreeMap<ty::BoundRegion,
ty::Region<'tcx>>,
a_vars: &[ty::RegionVid],
b_vars: &[ty::RegionVid],
r0: ty::Region<'tcx>)
@ -412,7 +414,7 @@ impl<'a, 'gcx, 'tcx> CombineFields<'a, 'gcx, 'tcx> {
fn rev_lookup<'a, 'gcx, 'tcx>(infcx: &InferCtxt<'a, 'gcx, 'tcx>,
span: Span,
a_map: &BTreeMap<ty::BoundRegion, ty::Region<'tcx>>,
a_map: &LazyBTreeMap<ty::BoundRegion, ty::Region<'tcx>>,
r: ty::Region<'tcx>) -> ty::Region<'tcx>
{
for (a_br, a_r) in a_map {
@ -435,7 +437,7 @@ impl<'a, 'gcx, 'tcx> CombineFields<'a, 'gcx, 'tcx> {
}
fn var_ids<'a, 'gcx, 'tcx>(fields: &CombineFields<'a, 'gcx, 'tcx>,
map: &BTreeMap<ty::BoundRegion, ty::Region<'tcx>>)
map: &LazyBTreeMap<ty::BoundRegion, ty::Region<'tcx>>)
-> Vec<ty::RegionVid> {
map.iter()
.map(|(_, &r)| match *r {
@ -585,14 +587,13 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
///
/// [rustc guide]: https://rust-lang-nursery.github.io/rustc-guide/trait-hrtb.html
pub fn skolemize_late_bound_regions<T>(&self,
binder: &ty::Binder<T>,
snapshot: &CombinedSnapshot<'a, 'tcx>)
binder: &ty::Binder<T>)
-> (T, SkolemizationMap<'tcx>)
where T : TypeFoldable<'tcx>
{
let (result, map) = self.tcx.replace_late_bound_regions(binder, |br| {
self.borrow_region_constraints()
.push_skolemized(self.tcx, br, &snapshot.region_constraints_snapshot)
self.universe.set(self.universe().subuniverse());
self.tcx.mk_region(ty::ReSkolemized(self.universe(), br))
});
debug!("skolemize_bound_regions(binder={:?}, result={:?}, map={:?})",
@ -777,7 +778,8 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
debug!("pop_skolemized({:?})", skol_map);
let skol_regions: FxHashSet<_> = skol_map.values().cloned().collect();
self.borrow_region_constraints()
.pop_skolemized(self.tcx, &skol_regions, &snapshot.region_constraints_snapshot);
.pop_skolemized(self.universe(), &skol_regions, &snapshot.region_constraints_snapshot);
self.universe.set(snapshot.universe);
if !skol_map.is_empty() {
self.projection_cache.borrow_mut().rollback_skolemized(
&snapshot.projection_cache_snapshot);

View File

@ -15,7 +15,7 @@ use infer::RegionVariableOrigin;
use infer::region_constraints::Constraint;
use infer::region_constraints::GenericKind;
use infer::region_constraints::RegionConstraintData;
use infer::region_constraints::VarOrigins;
use infer::region_constraints::VarInfos;
use infer::region_constraints::VerifyBound;
use middle::free_region::RegionRelations;
use rustc_data_structures::indexed_vec::{Idx, IndexVec};
@ -37,7 +37,7 @@ mod graphviz;
/// all the variables as well as a set of errors that must be reported.
pub fn resolve<'tcx>(
region_rels: &RegionRelations<'_, '_, 'tcx>,
var_origins: VarOrigins,
var_infos: VarInfos,
data: RegionConstraintData<'tcx>,
) -> (
LexicalRegionResolutions<'tcx>,
@ -47,7 +47,7 @@ pub fn resolve<'tcx>(
let mut errors = vec![];
let mut resolver = LexicalResolver {
region_rels,
var_origins,
var_infos,
data,
};
let values = resolver.infer_variable_values(&mut errors);
@ -103,7 +103,7 @@ type RegionGraph<'tcx> = graph::Graph<(), Constraint<'tcx>>;
struct LexicalResolver<'cx, 'gcx: 'tcx, 'tcx: 'cx> {
region_rels: &'cx RegionRelations<'cx, 'gcx, 'tcx>,
var_origins: VarOrigins,
var_infos: VarInfos,
data: RegionConstraintData<'tcx>,
}
@ -132,7 +132,7 @@ impl<'cx, 'gcx, 'tcx> LexicalResolver<'cx, 'gcx, 'tcx> {
}
fn num_vars(&self) -> usize {
self.var_origins.len()
self.var_infos.len()
}
/// Initially, the value for all variables is set to `'empty`, the
@ -279,7 +279,7 @@ impl<'cx, 'gcx, 'tcx> LexicalResolver<'cx, 'gcx, 'tcx> {
(&ReVar(v_id), _) | (_, &ReVar(v_id)) => {
span_bug!(
self.var_origins[v_id].span(),
self.var_infos[v_id].origin.span(),
"lub_concrete_regions invoked with non-concrete \
regions: {:?}, {:?}",
a,
@ -576,7 +576,7 @@ impl<'cx, 'gcx, 'tcx> LexicalResolver<'cx, 'gcx, 'tcx> {
if !self.region_rels
.is_subregion_of(lower_bound.region, upper_bound.region)
{
let origin = self.var_origins[node_idx].clone();
let origin = self.var_infos[node_idx].origin.clone();
debug!(
"region inference error at {:?} for {:?}: SubSupConflict sub: {:?} \
sup: {:?}",
@ -598,7 +598,7 @@ impl<'cx, 'gcx, 'tcx> LexicalResolver<'cx, 'gcx, 'tcx> {
}
span_bug!(
self.var_origins[node_idx].span(),
self.var_infos[node_idx].origin.span(),
"collect_error_for_expanding_node() could not find \
error for var {:?}, lower_bounds={:?}, \
upper_bounds={:?}",

View File

@ -28,9 +28,9 @@ use ty::error::{ExpectedFound, TypeError, UnconstrainedNumeric};
use ty::fold::TypeFoldable;
use ty::relate::RelateResult;
use traits::{self, ObligationCause, PredicateObligations};
use rustc_data_structures::lazy_btree_map::LazyBTreeMap;
use rustc_data_structures::unify as ut;
use std::cell::{Cell, RefCell, Ref, RefMut};
use std::collections::BTreeMap;
use std::fmt;
use syntax::ast;
use errors::DiagnosticBuilder;
@ -42,7 +42,7 @@ use arena::SyncDroplessArena;
use self::combine::CombineFields;
use self::higher_ranked::HrMatchResult;
use self::region_constraints::{RegionConstraintCollector, RegionSnapshot};
use self::region_constraints::{GenericKind, VerifyBound, RegionConstraintData, VarOrigins};
use self::region_constraints::{GenericKind, VerifyBound, RegionConstraintData, VarInfos};
use self::lexical_region_resolve::LexicalRegionResolutions;
use self::outlives::env::OutlivesEnvironment;
use self::type_variable::TypeVariableOrigin;
@ -183,11 +183,22 @@ pub struct InferCtxt<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
// obligations within. This is expected to be done 'late enough'
// that all type inference variables have been bound and so forth.
pub region_obligations: RefCell<Vec<(ast::NodeId, RegionObligation<'tcx>)>>,
/// What is the innermost universe we have created? Starts out as
/// `UniverseIndex::root()` but grows from there as we enter
/// universal quantifiers.
///
/// NB: At present, we exclude the universal quantifiers on the
/// item we are type-checking, and just consider those names as
/// part of the root universe. So this would only get incremented
/// when we enter into a higher-ranked (`for<..>`) type or trait
/// bound.
universe: Cell<ty::UniverseIndex>,
}
/// A map returned by `skolemize_late_bound_regions()` indicating the skolemized
/// region that each late-bound region was replaced with.
pub type SkolemizationMap<'tcx> = BTreeMap<ty::BoundRegion, ty::Region<'tcx>>;
pub type SkolemizationMap<'tcx> = LazyBTreeMap<ty::BoundRegion, ty::Region<'tcx>>;
/// See `error_reporting` module for more details
#[derive(Clone, Debug)]
@ -455,6 +466,7 @@ impl<'a, 'gcx, 'tcx> InferCtxtBuilder<'a, 'gcx, 'tcx> {
err_count_on_creation: tcx.sess.err_count(),
in_snapshot: Cell::new(false),
region_obligations: RefCell::new(vec![]),
universe: Cell::new(ty::UniverseIndex::ROOT),
}))
}
}
@ -489,6 +501,7 @@ pub struct CombinedSnapshot<'a, 'tcx:'a> {
float_snapshot: ut::Snapshot<ut::InPlace<ty::FloatVid>>,
region_constraints_snapshot: RegionSnapshot,
region_obligations_snapshot: usize,
universe: ty::UniverseIndex,
was_in_snapshot: bool,
_in_progress_tables: Option<Ref<'a, ty::TypeckTables<'tcx>>>,
}
@ -618,6 +631,7 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
float_snapshot: self.float_unification_table.borrow_mut().snapshot(),
region_constraints_snapshot: self.borrow_region_constraints().start_snapshot(),
region_obligations_snapshot: self.region_obligations.borrow().len(),
universe: self.universe(),
was_in_snapshot: in_snapshot,
// Borrow tables "in progress" (i.e. during typeck)
// to ban writes from within a snapshot to them.
@ -635,10 +649,12 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
float_snapshot,
region_constraints_snapshot,
region_obligations_snapshot,
universe,
was_in_snapshot,
_in_progress_tables } = snapshot;
self.in_snapshot.set(was_in_snapshot);
self.universe.set(universe);
self.projection_cache
.borrow_mut()
@ -667,6 +683,7 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
float_snapshot,
region_constraints_snapshot,
region_obligations_snapshot: _,
universe: _,
was_in_snapshot,
_in_progress_tables } = snapshot;
@ -811,7 +828,7 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
Some(self.commit_if_ok(|snapshot| {
let (ty::SubtypePredicate { a_is_expected, a, b}, skol_map) =
self.skolemize_late_bound_regions(predicate, snapshot);
self.skolemize_late_bound_regions(predicate);
let cause_span = cause.span;
let ok = self.at(cause, param_env).sub_exp(a_is_expected, a, b)?;
@ -828,7 +845,7 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
{
self.commit_if_ok(|snapshot| {
let (ty::OutlivesPredicate(r_a, r_b), skol_map) =
self.skolemize_late_bound_regions(predicate, snapshot);
self.skolemize_late_bound_regions(predicate);
let origin =
SubregionOrigin::from_obligation_cause(cause,
|| RelateRegionParamBound(cause.span));
@ -841,7 +858,7 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
pub fn next_ty_var_id(&self, diverging: bool, origin: TypeVariableOrigin) -> TyVid {
self.type_variables
.borrow_mut()
.new_var(diverging, origin)
.new_var(self.universe(), diverging, origin)
}
pub fn next_ty_var(&self, origin: TypeVariableOrigin) -> Ty<'tcx> {
@ -872,12 +889,14 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
/// during diagnostics / error-reporting.
pub fn next_region_var(&self, origin: RegionVariableOrigin)
-> ty::Region<'tcx> {
self.tcx.mk_region(ty::ReVar(self.borrow_region_constraints().new_region_var(origin)))
let region_var = self.borrow_region_constraints()
.new_region_var(self.universe(), origin);
self.tcx.mk_region(ty::ReVar(region_var))
}
/// Number of region variables created so far.
pub fn num_region_vars(&self) -> usize {
self.borrow_region_constraints().var_origins().len()
self.borrow_region_constraints().num_region_vars()
}
/// Just a convenient wrapper of `next_region_var` for using during NLL.
@ -909,7 +928,8 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
-> Ty<'tcx> {
let ty_var_id = self.type_variables
.borrow_mut()
.new_var(false,
.new_var(self.universe(),
false,
TypeVariableOrigin::TypeParameterDefinition(span, def.name));
self.tcx.mk_var(ty_var_id)
@ -1004,12 +1024,12 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
region_context,
region_map,
outlives_env.free_region_map());
let (var_origins, data) = self.region_constraints.borrow_mut()
let (var_infos, data) = self.region_constraints.borrow_mut()
.take()
.expect("regions already resolved")
.into_origins_and_data();
.into_infos_and_data();
let (lexical_region_resolutions, errors) =
lexical_region_resolve::resolve(region_rels, var_origins, data);
lexical_region_resolve::resolve(region_rels, var_infos, data);
let old_value = self.lexical_region_resolutions.replace(Some(lexical_region_resolutions));
assert!(old_value.is_none());
@ -1057,13 +1077,13 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
/// hence that `resolve_regions_and_report_errors` can never be
/// called. This is used only during NLL processing to "hand off" ownership
/// of the set of region vairables into the NLL region context.
pub fn take_region_var_origins(&self) -> VarOrigins {
let (var_origins, data) = self.region_constraints.borrow_mut()
pub fn take_region_var_origins(&self) -> VarInfos {
let (var_infos, data) = self.region_constraints.borrow_mut()
.take()
.expect("regions already resolved")
.into_origins_and_data();
.into_infos_and_data();
assert!(data.is_empty());
var_origins
var_infos
}
pub fn ty_to_string(&self, t: Ty<'tcx>) -> String {
@ -1216,7 +1236,7 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
span: Span,
lbrct: LateBoundRegionConversionTime,
value: &ty::Binder<T>)
-> (T, BTreeMap<ty::BoundRegion, ty::Region<'tcx>>)
-> (T, LazyBTreeMap<ty::BoundRegion, ty::Region<'tcx>>)
where T : TypeFoldable<'tcx>
{
self.tcx.replace_late_bound_regions(
@ -1356,6 +1376,10 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
self.evaluation_cache.clear();
self.projection_cache.borrow_mut().clear();
}
fn universe(&self) -> ty::UniverseIndex {
self.universe.get()
}
}
impl<'a, 'gcx, 'tcx> TypeTrace<'tcx> {

View File

@ -22,18 +22,16 @@ use rustc_data_structures::unify as ut;
use ty::{self, Ty, TyCtxt};
use ty::{Region, RegionVid};
use ty::ReStatic;
use ty::{BrFresh, ReLateBound, ReSkolemized, ReVar};
use ty::{BrFresh, ReLateBound, ReVar};
use std::collections::BTreeMap;
use std::fmt;
use std::mem;
use std::u32;
use std::{cmp, fmt, mem, u32};
mod taint;
pub struct RegionConstraintCollector<'tcx> {
/// For each `RegionVid`, the corresponding `RegionVariableOrigin`.
var_origins: IndexVec<RegionVid, RegionVariableOrigin>,
var_infos: IndexVec<RegionVid, RegionVariableInfo>,
data: RegionConstraintData<'tcx>,
@ -47,9 +45,6 @@ pub struct RegionConstraintCollector<'tcx> {
/// exist). This prevents us from making many such regions.
glbs: CombineMap<'tcx>,
/// Number of skolemized variables currently active.
skolemization_count: u32,
/// Global counter used during the GLB algorithm to create unique
/// names for fresh bound regions
bound_count: u32,
@ -76,7 +71,7 @@ pub struct RegionConstraintCollector<'tcx> {
unification_table: ut::UnificationTable<ut::InPlace<ty::RegionVid>>,
}
pub type VarOrigins = IndexVec<RegionVid, RegionVariableOrigin>;
pub type VarInfos = IndexVec<RegionVid, RegionVariableInfo>;
/// The full set of region constraints gathered up by the collector.
/// Describes constraints between the region variables and other
@ -230,10 +225,15 @@ enum CombineMapType {
type CombineMap<'tcx> = FxHashMap<TwoRegions<'tcx>, RegionVid>;
#[derive(Debug, Clone, Copy)]
pub struct RegionVariableInfo {
pub origin: RegionVariableOrigin,
pub universe: ty::UniverseIndex,
}
pub struct RegionSnapshot {
length: usize,
region_snapshot: ut::Snapshot<ut::InPlace<ty::RegionVid>>,
skolemization_count: u32,
}
/// When working with skolemized regions, we often wish to find all of
@ -273,19 +273,18 @@ impl TaintDirections {
impl<'tcx> RegionConstraintCollector<'tcx> {
pub fn new() -> RegionConstraintCollector<'tcx> {
RegionConstraintCollector {
var_origins: VarOrigins::default(),
var_infos: VarInfos::default(),
data: RegionConstraintData::default(),
lubs: FxHashMap(),
glbs: FxHashMap(),
skolemization_count: 0,
bound_count: 0,
undo_log: Vec::new(),
unification_table: ut::UnificationTable::new(),
}
}
pub fn var_origins(&self) -> &VarOrigins {
&self.var_origins
pub fn num_region_vars(&self) -> usize {
self.var_infos.len()
}
pub fn region_constraint_data(&self) -> &RegionConstraintData<'tcx> {
@ -295,9 +294,9 @@ impl<'tcx> RegionConstraintCollector<'tcx> {
/// Once all the constraints have been gathered, extract out the final data.
///
/// Not legal during a snapshot.
pub fn into_origins_and_data(self) -> (VarOrigins, RegionConstraintData<'tcx>) {
pub fn into_infos_and_data(self) -> (VarInfos, RegionConstraintData<'tcx>) {
assert!(!self.in_snapshot());
(self.var_origins, self.data)
(self.var_infos, self.data)
}
/// Takes (and clears) the current set of constraints. Note that
@ -319,18 +318,15 @@ impl<'tcx> RegionConstraintCollector<'tcx> {
// should think carefully about whether it needs to be cleared
// or updated in some way.
let RegionConstraintCollector {
var_origins,
var_infos,
data,
lubs,
glbs,
skolemization_count,
bound_count: _,
undo_log: _,
unification_table,
} = self;
assert_eq!(*skolemization_count, 0);
// Clear the tables of (lubs, glbs), so that we will create
// fresh regions if we do a LUB operation. As it happens,
// LUB/GLB are not performed by the MIR type-checker, which is
@ -343,7 +339,7 @@ impl<'tcx> RegionConstraintCollector<'tcx> {
// also insert `a <= b` and a `b <= a` edges, so the
// `RegionConstraintData` contains the relationship here.
*unification_table = ut::UnificationTable::new();
for vid in var_origins.indices() {
for vid in var_infos.indices() {
unification_table.new_key(unify_key::RegionVidKey { min_vid: vid });
}
@ -365,7 +361,6 @@ impl<'tcx> RegionConstraintCollector<'tcx> {
RegionSnapshot {
length,
region_snapshot: self.unification_table.snapshot(),
skolemization_count: self.skolemization_count,
}
}
@ -373,12 +368,6 @@ impl<'tcx> RegionConstraintCollector<'tcx> {
debug!("RegionConstraintCollector: commit({})", snapshot.length);
assert!(self.undo_log.len() > snapshot.length);
assert!(self.undo_log[snapshot.length] == OpenSnapshot);
assert!(
self.skolemization_count == snapshot.skolemization_count,
"failed to pop skolemized regions: {} now vs {} at start",
self.skolemization_count,
snapshot.skolemization_count
);
if snapshot.length == 0 {
self.undo_log.truncate(0);
@ -398,7 +387,6 @@ impl<'tcx> RegionConstraintCollector<'tcx> {
}
let c = self.undo_log.pop().unwrap();
assert!(c == OpenSnapshot);
self.skolemization_count = snapshot.skolemization_count;
self.unification_table.rollback_to(snapshot.region_snapshot);
}
@ -411,8 +399,8 @@ impl<'tcx> RegionConstraintCollector<'tcx> {
// nothing to do here
}
AddVar(vid) => {
self.var_origins.pop().unwrap();
assert_eq!(self.var_origins.len(), vid.index() as usize);
self.var_infos.pop().unwrap();
assert_eq!(self.var_infos.len(), vid.index() as usize);
}
AddConstraint(ref constraint) => {
self.data.constraints.remove(constraint);
@ -433,8 +421,13 @@ impl<'tcx> RegionConstraintCollector<'tcx> {
}
}
pub fn new_region_var(&mut self, origin: RegionVariableOrigin) -> RegionVid {
let vid = self.var_origins.push(origin.clone());
pub fn new_region_var(&mut self,
universe: ty::UniverseIndex,
origin: RegionVariableOrigin) -> RegionVid {
let vid = self.var_infos.push(RegionVariableInfo {
origin,
universe,
});
let u_vid = self.unification_table
.new_key(unify_key::RegionVidKey { min_vid: vid });
@ -450,44 +443,14 @@ impl<'tcx> RegionConstraintCollector<'tcx> {
return vid;
}
/// Returns the origin for the given variable.
pub fn var_origin(&self, vid: RegionVid) -> RegionVariableOrigin {
self.var_origins[vid].clone()
/// Returns the universe for the given variable.
pub fn var_universe(&self, vid: RegionVid) -> ty::UniverseIndex {
self.var_infos[vid].universe
}
/// Creates a new skolemized region. Skolemized regions are fresh
/// regions used when performing higher-ranked computations. They
/// must be used in a very particular way and are never supposed
/// to "escape" out into error messages or the code at large.
///
/// The idea is to always create a snapshot. Skolemized regions
/// can be created in the context of this snapshot, but before the
/// snapshot is committed or rolled back, they must be popped
/// (using `pop_skolemized_regions`), so that their numbers can be
/// recycled. Normally you don't have to think about this: you use
/// the APIs in `higher_ranked/mod.rs`, such as
/// `skolemize_late_bound_regions` and `plug_leaks`, which will
/// guide you on this path (ensure that the `SkolemizationMap` is
/// consumed and you are good). For more info on how skolemization
/// for HRTBs works, see the [rustc guide].
///
/// [rustc guide]: https://rust-lang-nursery.github.io/rustc-guide/trait-hrtb.html
///
/// The `snapshot` argument to this function is not really used;
/// it's just there to make it explicit which snapshot bounds the
/// skolemized region that results. It should always be the top-most snapshot.
pub fn push_skolemized(
&mut self,
tcx: TyCtxt<'_, '_, 'tcx>,
br: ty::BoundRegion,
snapshot: &RegionSnapshot,
) -> Region<'tcx> {
assert!(self.in_snapshot());
assert!(self.undo_log[snapshot.length] == OpenSnapshot);
let sc = self.skolemization_count;
self.skolemization_count = sc + 1;
tcx.mk_region(ReSkolemized(ty::SkolemizedRegionVid { index: sc }, br))
/// Returns the origin for the given variable.
pub fn var_origin(&self, vid: RegionVid) -> RegionVariableOrigin {
self.var_infos[vid].origin
}
/// Removes all the edges to/from the skolemized regions that are
@ -496,7 +459,7 @@ impl<'tcx> RegionConstraintCollector<'tcx> {
/// created in that time.
pub fn pop_skolemized(
&mut self,
_tcx: TyCtxt<'_, '_, 'tcx>,
skolemization_count: ty::UniverseIndex,
skols: &FxHashSet<ty::Region<'tcx>>,
snapshot: &RegionSnapshot,
) {
@ -505,36 +468,28 @@ impl<'tcx> RegionConstraintCollector<'tcx> {
assert!(self.in_snapshot());
assert!(self.undo_log[snapshot.length] == OpenSnapshot);
assert!(
self.skolemization_count as usize >= skols.len(),
skolemization_count.as_usize() >= skols.len(),
"popping more skolemized variables than actually exist, \
sc now = {}, skols.len = {}",
self.skolemization_count,
sc now = {:?}, skols.len = {:?}",
skolemization_count,
skols.len()
);
let last_to_pop = self.skolemization_count;
let first_to_pop = last_to_pop - (skols.len() as u32);
let last_to_pop = skolemization_count.subuniverse();
let first_to_pop = ty::UniverseIndex::from(last_to_pop.as_u32() - skols.len() as u32);
assert!(
first_to_pop >= snapshot.skolemization_count,
"popping more regions than snapshot contains, \
sc now = {}, sc then = {}, skols.len = {}",
self.skolemization_count,
snapshot.skolemization_count,
skols.len()
);
debug_assert! {
skols.iter()
.all(|&k| match *k {
ty::ReSkolemized(index, _) =>
index.index >= first_to_pop &&
index.index < last_to_pop,
ty::ReSkolemized(universe, _) =>
universe >= first_to_pop &&
universe < last_to_pop,
_ =>
false
}),
"invalid skolemization keys or keys out of range ({}..{}): {:?}",
snapshot.skolemization_count,
self.skolemization_count,
"invalid skolemization keys or keys out of range ({:?}..{:?}): {:?}",
first_to_pop,
last_to_pop,
skols
}
@ -551,7 +506,6 @@ impl<'tcx> RegionConstraintCollector<'tcx> {
self.rollback_undo_entry(undo_entry);
}
self.skolemization_count = snapshot.skolemization_count;
return;
fn kill_constraint<'tcx>(
@ -805,7 +759,10 @@ impl<'tcx> RegionConstraintCollector<'tcx> {
if let Some(&c) = self.combine_map(t).get(&vars) {
return tcx.mk_region(ReVar(c));
}
let c = self.new_region_var(MiscVariable(origin.span()));
let a_universe = self.universe(a);
let b_universe = self.universe(b);
let c_universe = cmp::max(a_universe, b_universe);
let c = self.new_region_var(c_universe, MiscVariable(origin.span()));
self.combine_map(t).insert(vars, c);
if self.in_snapshot() {
self.undo_log.push(AddCombination(t, vars));
@ -821,6 +778,24 @@ impl<'tcx> RegionConstraintCollector<'tcx> {
new_r
}
fn universe(&self, region: Region<'tcx>) -> ty::UniverseIndex {
match *region {
ty::ReScope(..) |
ty::ReStatic |
ty::ReEmpty |
ty::ReErased |
ty::ReFree(..) |
ty::ReEarlyBound(..) => ty::UniverseIndex::ROOT,
ty::ReSkolemized(universe, _) => universe,
ty::ReClosureBound(vid) |
ty::ReVar(vid) => self.var_universe(vid),
ty::ReLateBound(..) =>
bug!("universe(): encountered bound region {:?}", region),
ty::ReCanonical(..) =>
bug!("region_universe(): encountered canonical region {:?}", region),
}
}
pub fn vars_created_since_snapshot(&self, mark: &RegionSnapshot) -> Vec<RegionVid> {
self.undo_log[mark.length..]
.iter()
@ -865,12 +840,7 @@ impl<'tcx> RegionConstraintCollector<'tcx> {
impl fmt::Debug for RegionSnapshot {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(
f,
"RegionSnapshot(length={},skolemization={})",
self.length,
self.skolemization_count
)
write!(f, "RegionSnapshot(length={})", self.length)
}
}

View File

@ -78,10 +78,12 @@ struct TypeVariableData {
#[derive(Copy, Clone, Debug)]
pub enum TypeVariableValue<'tcx> {
Known { value: Ty<'tcx> },
Unknown,
Unknown { universe: ty::UniverseIndex },
}
impl<'tcx> TypeVariableValue<'tcx> {
/// If this value is known, returns the type it is known to be.
/// Otherwise, `None`.
pub fn known(&self) -> Option<Ty<'tcx>> {
match *self {
TypeVariableValue::Unknown { .. } => None,
@ -181,10 +183,11 @@ impl<'tcx> TypeVariableTable<'tcx> {
/// The code in this module doesn't care, but it can be useful
/// for improving error messages.
pub fn new_var(&mut self,
universe: ty::UniverseIndex,
diverging: bool,
origin: TypeVariableOrigin)
-> ty::TyVid {
let eq_key = self.eq_relations.new_key(TypeVariableValue::Unknown);
let eq_key = self.eq_relations.new_key(TypeVariableValue::Unknown { universe });
let sub_key = self.sub_relations.new_key(());
assert_eq!(eq_key.vid, sub_key);
@ -437,7 +440,16 @@ impl<'tcx> ut::UnifyValue for TypeVariableValue<'tcx> {
(&TypeVariableValue::Unknown { .. }, &TypeVariableValue::Known { .. }) => Ok(*value2),
// If both sides are *unknown*, it hardly matters, does it?
(&TypeVariableValue::Unknown, &TypeVariableValue::Unknown) => Ok(*value1),
(&TypeVariableValue::Unknown { universe: universe1 },
&TypeVariableValue::Unknown { universe: universe2 }) => {
// If we unify two unbound variables, ?T and ?U, then whatever
// value they wind up taking (which must be the same value) must
// be nameable by both universes. Therefore, the resulting
// universe is the minimum of the two universes, because that is
// the one which contains the fewest names in scope.
let universe = cmp::min(universe1, universe2);
Ok(TypeVariableValue::Unknown { universe })
}
}
}
}

View File

@ -45,8 +45,8 @@
#![feature(const_fn)]
#![feature(core_intrinsics)]
#![feature(drain_filter)]
#![feature(dyn_trait)]
#![feature(entry_or_default)]
#![cfg_attr(stage0, feature(dyn_trait))]
#![feature(from_ref)]
#![feature(fs_read_write)]
#![cfg_attr(windows, feature(libc))]
@ -58,6 +58,7 @@
#![feature(nonzero)]
#![feature(proc_macro_internals)]
#![feature(quote)]
#![feature(optin_builtin_traits)]
#![feature(refcell_replace_swap)]
#![feature(rustc_diagnostic_macros)]
#![feature(slice_patterns)]
@ -68,7 +69,7 @@
#![feature(trusted_len)]
#![feature(catch_expr)]
#![feature(test)]
#![feature(inclusive_range_fields)]
#![feature(inclusive_range_methods)]
#![recursion_limit="512"]
@ -84,7 +85,6 @@ extern crate libc;
extern crate rustc_target;
#[macro_use] extern crate rustc_data_structures;
extern crate serialize;
extern crate rustc_const_math;
extern crate rustc_errors as errors;
#[macro_use] extern crate log;
#[macro_use] extern crate syntax;

View File

@ -198,7 +198,7 @@ impl<'a> LintLevelsBuilder<'a> {
"malformed lint attribute");
};
for attr in attrs {
let level = match attr.name().and_then(|name| Level::from_str(&name.as_str())) {
let level = match Level::from_str(&attr.name().as_str()) {
None => continue,
Some(lvl) => lvl,
};
@ -221,7 +221,7 @@ impl<'a> LintLevelsBuilder<'a> {
continue
}
};
let name = word.ident.name;
let name = word.name();
match store.check_lint_name(&name.as_str()) {
CheckLintNameResult::Ok(ids) => {
let src = LintSource::Node(name, li.span);
@ -260,7 +260,7 @@ impl<'a> LintLevelsBuilder<'a> {
Some(li.span.into()),
&msg);
if name.as_str().chars().any(|c| c.is_uppercase()) {
let name_lower = name.as_str().to_lowercase();
let name_lower = name.as_str().to_lowercase().to_string();
if let CheckLintNameResult::NoLint =
store.check_lint_name(&name_lower) {
db.emit();

View File

@ -507,7 +507,7 @@ pub fn struct_lint_level<'a>(sess: &'a Session,
let explanation = if lint_id == LintId::of(::lint::builtin::UNSTABLE_NAME_COLLISION) {
"once this method is added to the standard library, \
there will be ambiguity here, which will cause a hard error!"
the ambiguity may cause an error or change in behavior!"
.to_owned()
} else if let Some(edition) = future_incompatible.edition {
format!("{} in the {} edition!", STANDARD_MESSAGE, edition)

View File

@ -11,7 +11,6 @@
use hir::def_id::DefId;
use ty::{self, TyCtxt, layout};
use ty::subst::Substs;
use rustc_const_math::*;
use mir::interpret::{Value, PrimVal};
use errors::DiagnosticBuilder;
@ -62,7 +61,6 @@ pub enum ErrKind<'tcx> {
UnimplementedConstVal(&'static str),
IndexOutOfBounds { len: u64, index: u64 },
Math(ConstMathErr),
LayoutError(layout::LayoutError<'tcx>),
TypeckError,
@ -76,15 +74,6 @@ pub struct FrameInfo {
pub location: String,
}
impl<'tcx> From<ConstMathErr> for ErrKind<'tcx> {
fn from(err: ConstMathErr) -> ErrKind<'tcx> {
match err {
ConstMathErr::UnsignedNegation => ErrKind::TypeckError,
_ => ErrKind::Math(err)
}
}
}
#[derive(Clone, Debug)]
pub enum ConstEvalErrDescription<'a, 'tcx: 'a> {
Simple(Cow<'a, str>),
@ -122,7 +111,6 @@ impl<'a, 'gcx, 'tcx> ConstEvalErr<'tcx> {
len, index)
}
Math(ref err) => Simple(err.description().into_cow()),
LayoutError(ref err) => Simple(err.to_string().into_cow()),
TypeckError => simple!("type-checking failed"),

View File

@ -28,6 +28,7 @@ use ty::{self, TyCtxt, adjustment};
use hir::{self, PatKind};
use rustc_data_structures::sync::Lrc;
use std::rc::Rc;
use syntax::ast;
use syntax::ptr::P;
use syntax_pos::Span;
@ -44,7 +45,7 @@ pub trait Delegate<'tcx> {
fn consume(&mut self,
consume_id: ast::NodeId,
consume_span: Span,
cmt: mc::cmt<'tcx>,
cmt: &mc::cmt_<'tcx>,
mode: ConsumeMode);
// The value found at `cmt` has been determined to match the
@ -61,14 +62,14 @@ pub trait Delegate<'tcx> {
// called on a subpart of an input passed to `matched_pat).
fn matched_pat(&mut self,
matched_pat: &hir::Pat,
cmt: mc::cmt<'tcx>,
cmt: &mc::cmt_<'tcx>,
mode: MatchMode);
// The value found at `cmt` is either copied or moved via the
// pattern binding `consume_pat`, depending on mode.
fn consume_pat(&mut self,
consume_pat: &hir::Pat,
cmt: mc::cmt<'tcx>,
cmt: &mc::cmt_<'tcx>,
mode: ConsumeMode);
// The value found at `borrow` is being borrowed at the point
@ -76,7 +77,7 @@ pub trait Delegate<'tcx> {
fn borrow(&mut self,
borrow_id: ast::NodeId,
borrow_span: Span,
cmt: mc::cmt<'tcx>,
cmt: &mc::cmt_<'tcx>,
loan_region: ty::Region<'tcx>,
bk: ty::BorrowKind,
loan_cause: LoanCause);
@ -90,7 +91,7 @@ pub trait Delegate<'tcx> {
fn mutate(&mut self,
assignment_id: ast::NodeId,
assignment_span: Span,
assignee_cmt: mc::cmt<'tcx>,
assignee_cmt: &mc::cmt_<'tcx>,
mode: MutateMode);
}
@ -316,11 +317,11 @@ impl<'a, 'gcx, 'tcx> ExprUseVisitor<'a, 'gcx, 'tcx> {
let fn_body_scope_r =
self.tcx().mk_region(ty::ReScope(region::Scope::Node(body.value.hir_id.local_id)));
let arg_cmt = self.mc.cat_rvalue(
let arg_cmt = Rc::new(self.mc.cat_rvalue(
arg.id,
arg.pat.span,
fn_body_scope_r, // Args live only as long as the fn body.
arg_ty);
arg_ty));
self.walk_irrefutable_pat(arg_cmt, &arg.pat);
}
@ -335,11 +336,11 @@ impl<'a, 'gcx, 'tcx> ExprUseVisitor<'a, 'gcx, 'tcx> {
fn delegate_consume(&mut self,
consume_id: ast::NodeId,
consume_span: Span,
cmt: mc::cmt<'tcx>) {
cmt: &mc::cmt_<'tcx>) {
debug!("delegate_consume(consume_id={}, cmt={:?})",
consume_id, cmt);
let mode = copy_or_move(&self.mc, self.param_env, &cmt, DirectRefMove);
let mode = copy_or_move(&self.mc, self.param_env, cmt, DirectRefMove);
self.delegate.consume(consume_id, consume_span, cmt, mode);
}
@ -353,7 +354,7 @@ impl<'a, 'gcx, 'tcx> ExprUseVisitor<'a, 'gcx, 'tcx> {
debug!("consume_expr(expr={:?})", expr);
let cmt = return_if_err!(self.mc.cat_expr(expr));
self.delegate_consume(expr.id, expr.span, cmt);
self.delegate_consume(expr.id, expr.span, &cmt);
self.walk_expr(expr);
}
@ -362,7 +363,7 @@ impl<'a, 'gcx, 'tcx> ExprUseVisitor<'a, 'gcx, 'tcx> {
expr: &hir::Expr,
mode: MutateMode) {
let cmt = return_if_err!(self.mc.cat_expr(expr));
self.delegate.mutate(assignment_expr.id, assignment_expr.span, cmt, mode);
self.delegate.mutate(assignment_expr.id, assignment_expr.span, &cmt, mode);
self.walk_expr(expr);
}
@ -375,7 +376,7 @@ impl<'a, 'gcx, 'tcx> ExprUseVisitor<'a, 'gcx, 'tcx> {
expr, r, bk);
let cmt = return_if_err!(self.mc.cat_expr(expr));
self.delegate.borrow(expr.id, expr.span, cmt, r, bk, cause);
self.delegate.borrow(expr.id, expr.span, &cmt, r, bk, cause);
self.walk_expr(expr)
}
@ -435,7 +436,7 @@ impl<'a, 'gcx, 'tcx> ExprUseVisitor<'a, 'gcx, 'tcx> {
}
hir::ExprMatch(ref discr, ref arms, _) => {
let discr_cmt = return_if_err!(self.mc.cat_expr(&discr));
let discr_cmt = Rc::new(return_if_err!(self.mc.cat_expr(&discr)));
let r = self.tcx().types.re_empty;
self.borrow_expr(&discr, r, ty::ImmBorrow, MatchDiscriminant);
@ -619,7 +620,7 @@ impl<'a, 'gcx, 'tcx> ExprUseVisitor<'a, 'gcx, 'tcx> {
// "assigns", which is handled by
// `walk_pat`:
self.walk_expr(&expr);
let init_cmt = return_if_err!(self.mc.cat_expr(&expr));
let init_cmt = Rc::new(return_if_err!(self.mc.cat_expr(&expr)));
self.walk_irrefutable_pat(init_cmt, &local.pat);
}
}
@ -652,7 +653,7 @@ impl<'a, 'gcx, 'tcx> ExprUseVisitor<'a, 'gcx, 'tcx> {
None => { return; }
};
let with_cmt = return_if_err!(self.mc.cat_expr(&with_expr));
let with_cmt = Rc::new(return_if_err!(self.mc.cat_expr(&with_expr)));
// Select just those fields of the `with`
// expression that will actually be used
@ -671,7 +672,7 @@ impl<'a, 'gcx, 'tcx> ExprUseVisitor<'a, 'gcx, 'tcx> {
with_field.name,
with_field.ty(self.tcx(), substs)
);
self.delegate_consume(with_expr.id, with_expr.span, cmt_field);
self.delegate_consume(with_expr.id, with_expr.span, &cmt_field);
}
}
}
@ -710,7 +711,7 @@ impl<'a, 'gcx, 'tcx> ExprUseVisitor<'a, 'gcx, 'tcx> {
adjustment::Adjust::Unsize => {
// Creating a closure/fn-pointer or unsizing consumes
// the input and stores it into the resulting rvalue.
self.delegate_consume(expr.id, expr.span, cmt.clone());
self.delegate_consume(expr.id, expr.span, &cmt);
}
adjustment::Adjust::Deref(None) => {}
@ -722,12 +723,11 @@ impl<'a, 'gcx, 'tcx> ExprUseVisitor<'a, 'gcx, 'tcx> {
// this is an autoref of `x`.
adjustment::Adjust::Deref(Some(ref deref)) => {
let bk = ty::BorrowKind::from_mutbl(deref.mutbl);
self.delegate.borrow(expr.id, expr.span, cmt.clone(),
deref.region, bk, AutoRef);
self.delegate.borrow(expr.id, expr.span, &cmt, deref.region, bk, AutoRef);
}
adjustment::Adjust::Borrow(ref autoref) => {
self.walk_autoref(expr, cmt.clone(), autoref);
self.walk_autoref(expr, &cmt, autoref);
}
}
cmt = return_if_err!(self.mc.cat_expr_adjusted(expr, cmt, &adjustment));
@ -739,7 +739,7 @@ impl<'a, 'gcx, 'tcx> ExprUseVisitor<'a, 'gcx, 'tcx> {
/// after all relevant autoderefs have occurred.
fn walk_autoref(&mut self,
expr: &hir::Expr,
cmt_base: mc::cmt<'tcx>,
cmt_base: &mc::cmt_<'tcx>,
autoref: &adjustment::AutoBorrow<'tcx>) {
debug!("walk_autoref(expr.id={} cmt_base={:?} autoref={:?})",
expr.id,
@ -852,7 +852,7 @@ impl<'a, 'gcx, 'tcx> ExprUseVisitor<'a, 'gcx, 'tcx> {
// Each match binding is effectively an assignment to the
// binding being produced.
let def = Def::Local(canonical_id);
if let Ok(binding_cmt) = mc.cat_def(pat.id, pat.span, pat_ty, def) {
if let Ok(ref binding_cmt) = mc.cat_def(pat.id, pat.span, pat_ty, def) {
delegate.mutate(pat.id, pat.span, binding_cmt, MutateMode::Init);
}
@ -861,13 +861,13 @@ impl<'a, 'gcx, 'tcx> ExprUseVisitor<'a, 'gcx, 'tcx> {
ty::BindByReference(m) => {
if let ty::TyRef(r, _) = pat_ty.sty {
let bk = ty::BorrowKind::from_mutbl(m);
delegate.borrow(pat.id, pat.span, cmt_pat, r, bk, RefBinding);
delegate.borrow(pat.id, pat.span, &cmt_pat, r, bk, RefBinding);
}
}
ty::BindByValue(..) => {
let mode = copy_or_move(mc, param_env, &cmt_pat, PatBindingMove);
debug!("walk_pat binding consuming pat");
delegate.consume_pat(pat, cmt_pat, mode);
delegate.consume_pat(pat, &cmt_pat, mode);
}
}
}
@ -891,12 +891,12 @@ impl<'a, 'gcx, 'tcx> ExprUseVisitor<'a, 'gcx, 'tcx> {
let downcast_cmt = mc.cat_downcast_if_needed(pat, cmt_pat, variant_did);
debug!("variant downcast_cmt={:?} pat={:?}", downcast_cmt, pat);
delegate.matched_pat(pat, downcast_cmt, match_mode);
delegate.matched_pat(pat, &downcast_cmt, match_mode);
}
Def::Struct(..) | Def::StructCtor(..) | Def::Union(..) |
Def::TyAlias(..) | Def::AssociatedTy(..) | Def::SelfTy(..) => {
debug!("struct cmt_pat={:?} pat={:?}", cmt_pat, pat);
delegate.matched_pat(pat, cmt_pat, match_mode);
delegate.matched_pat(pat, &cmt_pat, match_mode);
}
_ => {}
}
@ -924,12 +924,12 @@ impl<'a, 'gcx, 'tcx> ExprUseVisitor<'a, 'gcx, 'tcx> {
self.param_env,
&cmt_var,
CaptureMove);
self.delegate.consume(closure_expr.id, freevar.span, cmt_var, mode);
self.delegate.consume(closure_expr.id, freevar.span, &cmt_var, mode);
}
ty::UpvarCapture::ByRef(upvar_borrow) => {
self.delegate.borrow(closure_expr.id,
fn_decl_span,
cmt_var,
&cmt_var,
upvar_borrow.region,
upvar_borrow.kind,
ClosureCapture(freevar.span));
@ -943,7 +943,7 @@ impl<'a, 'gcx, 'tcx> ExprUseVisitor<'a, 'gcx, 'tcx> {
closure_id: ast::NodeId,
closure_span: Span,
upvar: &hir::Freevar)
-> mc::McResult<mc::cmt<'tcx>> {
-> mc::McResult<mc::cmt_<'tcx>> {
// Create the cmt for the variable being borrowed, from the
// caller's perspective
let var_hir_id = self.tcx().hir.node_to_hir_id(upvar.var_id());
@ -954,7 +954,7 @@ impl<'a, 'gcx, 'tcx> ExprUseVisitor<'a, 'gcx, 'tcx> {
fn copy_or_move<'a, 'gcx, 'tcx>(mc: &mc::MemCategorizationContext<'a, 'gcx, 'tcx>,
param_env: ty::ParamEnv<'tcx>,
cmt: &mc::cmt<'tcx>,
cmt: &mc::cmt_<'tcx>,
move_reason: MoveReason)
-> ConsumeMode
{

View File

@ -111,6 +111,7 @@ use ty::{self, TyCtxt};
use lint;
use util::nodemap::{NodeMap, NodeSet};
use std::collections::VecDeque;
use std::{fmt, usize};
use std::io::prelude::*;
use std::io;
@ -412,18 +413,43 @@ fn visit_local<'a, 'tcx>(ir: &mut IrMaps<'a, 'tcx>, local: &'tcx hir::Local) {
}
fn visit_arm<'a, 'tcx>(ir: &mut IrMaps<'a, 'tcx>, arm: &'tcx hir::Arm) {
for pat in &arm.pats {
// for struct patterns, take note of which fields used shorthand (`x` rather than `x: x`)
for mut pat in &arm.pats {
// For struct patterns, take note of which fields used shorthand
// (`x` rather than `x: x`).
//
// FIXME: according to the rust-lang-nursery/rustc-guide book, `NodeId`s are to be phased
// out in favor of `HirId`s; however, we need to match the signature of `each_binding`,
// which uses `NodeIds`.
// FIXME: according to the rust-lang-nursery/rustc-guide book, `NodeId`s are to be
// phased out in favor of `HirId`s; however, we need to match the signature of
// `each_binding`, which uses `NodeIds`.
let mut shorthand_field_ids = NodeSet();
if let hir::PatKind::Struct(_, ref fields, _) = pat.node {
for field in fields {
if field.node.is_shorthand {
shorthand_field_ids.insert(field.node.pat.id);
let mut pats = VecDeque::new();
pats.push_back(pat);
while let Some(pat) = pats.pop_front() {
use hir::PatKind::*;
match pat.node {
Binding(_, _, _, ref inner_pat) => {
pats.extend(inner_pat.iter());
}
Struct(_, ref fields, _) => {
for field in fields {
if field.node.is_shorthand {
shorthand_field_ids.insert(field.node.pat.id);
}
}
}
Ref(ref inner_pat, _) |
Box(ref inner_pat) => {
pats.push_back(inner_pat);
}
TupleStruct(_, ref inner_pats, _) |
Tuple(ref inner_pats, _) => {
pats.extend(inner_pats.iter());
}
Slice(ref pre_pats, ref inner_pat, ref post_pats) => {
pats.extend(pre_pats.iter());
pats.extend(inner_pat.iter());
pats.extend(post_pats.iter());
}
_ => {}
}
}

View File

@ -572,13 +572,13 @@ impl<'a, 'gcx, 'tcx> MemCategorizationContext<'a, 'gcx, 'tcx> {
Ok(ret_ty)
}
pub fn cat_expr(&self, expr: &hir::Expr) -> McResult<cmt<'tcx>> {
pub fn cat_expr(&self, expr: &hir::Expr) -> McResult<cmt_<'tcx>> {
// This recursion helper avoids going through *too many*
// adjustments, since *only* non-overloaded deref recurses.
fn helper<'a, 'gcx, 'tcx>(mc: &MemCategorizationContext<'a, 'gcx, 'tcx>,
expr: &hir::Expr,
adjustments: &[adjustment::Adjustment<'tcx>])
-> McResult<cmt<'tcx>> {
-> McResult<cmt_<'tcx>> {
match adjustments.split_last() {
None => mc.cat_expr_unadjusted(expr),
Some((adjustment, previous)) => {
@ -591,24 +591,24 @@ impl<'a, 'gcx, 'tcx> MemCategorizationContext<'a, 'gcx, 'tcx> {
}
pub fn cat_expr_adjusted(&self, expr: &hir::Expr,
previous: cmt<'tcx>,
previous: cmt_<'tcx>,
adjustment: &adjustment::Adjustment<'tcx>)
-> McResult<cmt<'tcx>> {
-> McResult<cmt_<'tcx>> {
self.cat_expr_adjusted_with(expr, || Ok(previous), adjustment)
}
fn cat_expr_adjusted_with<F>(&self, expr: &hir::Expr,
previous: F,
adjustment: &adjustment::Adjustment<'tcx>)
-> McResult<cmt<'tcx>>
where F: FnOnce() -> McResult<cmt<'tcx>>
-> McResult<cmt_<'tcx>>
where F: FnOnce() -> McResult<cmt_<'tcx>>
{
debug!("cat_expr_adjusted_with({:?}): {:?}", adjustment, expr);
let target = self.resolve_type_vars_if_possible(&adjustment.target);
match adjustment.kind {
adjustment::Adjust::Deref(overloaded) => {
// Equivalent to *expr or something similar.
let base = if let Some(deref) = overloaded {
let base = Rc::new(if let Some(deref) = overloaded {
let ref_ty = self.tcx.mk_ref(deref.region, ty::TypeAndMut {
ty: target,
mutbl: deref.mutbl,
@ -616,7 +616,7 @@ impl<'a, 'gcx, 'tcx> MemCategorizationContext<'a, 'gcx, 'tcx> {
self.cat_rvalue_node(expr.id, expr.span, ref_ty)
} else {
previous()?
};
});
self.cat_deref(expr, base, false)
}
@ -633,7 +633,7 @@ impl<'a, 'gcx, 'tcx> MemCategorizationContext<'a, 'gcx, 'tcx> {
}
}
pub fn cat_expr_unadjusted(&self, expr: &hir::Expr) -> McResult<cmt<'tcx>> {
pub fn cat_expr_unadjusted(&self, expr: &hir::Expr) -> McResult<cmt_<'tcx>> {
debug!("cat_expr: id={} expr={:?}", expr.id, expr);
let expr_ty = self.expr_ty(expr)?;
@ -642,13 +642,13 @@ impl<'a, 'gcx, 'tcx> MemCategorizationContext<'a, 'gcx, 'tcx> {
if self.tables.is_method_call(expr) {
self.cat_overloaded_place(expr, e_base, false)
} else {
let base_cmt = self.cat_expr(&e_base)?;
let base_cmt = Rc::new(self.cat_expr(&e_base)?);
self.cat_deref(expr, base_cmt, false)
}
}
hir::ExprField(ref base, f_name) => {
let base_cmt = self.cat_expr(&base)?;
let base_cmt = Rc::new(self.cat_expr(&base)?);
debug!("cat_expr(cat_field): id={} expr={:?} base={:?}",
expr.id,
expr,
@ -666,7 +666,7 @@ impl<'a, 'gcx, 'tcx> MemCategorizationContext<'a, 'gcx, 'tcx> {
// dereferencing.
self.cat_overloaded_place(expr, base, true)
} else {
let base_cmt = self.cat_expr(&base)?;
let base_cmt = Rc::new(self.cat_expr(&base)?);
self.cat_index(expr, base_cmt, expr_ty, InteriorOffsetKind::Index)
}
}
@ -701,7 +701,7 @@ impl<'a, 'gcx, 'tcx> MemCategorizationContext<'a, 'gcx, 'tcx> {
span: Span,
expr_ty: Ty<'tcx>,
def: Def)
-> McResult<cmt<'tcx>> {
-> McResult<cmt_<'tcx>> {
debug!("cat_def: id={} expr={:?} def={:?}",
id, expr_ty, def);
@ -718,14 +718,14 @@ impl<'a, 'gcx, 'tcx> MemCategorizationContext<'a, 'gcx, 'tcx> {
return Ok(self.cat_rvalue_node(id, span, expr_ty));
}
}
Ok(Rc::new(cmt_ {
Ok(cmt_ {
id:id,
span:span,
cat:Categorization::StaticItem,
mutbl: if mutbl { McDeclared } else { McImmutable},
ty:expr_ty,
note: NoteNone
}))
})
}
Def::Upvar(var_id, _, fn_node_id) => {
@ -733,14 +733,14 @@ impl<'a, 'gcx, 'tcx> MemCategorizationContext<'a, 'gcx, 'tcx> {
}
Def::Local(vid) => {
Ok(Rc::new(cmt_ {
Ok(cmt_ {
id,
span,
cat: Categorization::Local(vid),
mutbl: MutabilityCategory::from_local(self.tcx, self.tables, vid),
ty: expr_ty,
note: NoteNone
}))
})
}
def => span_bug!(span, "unexpected definition in memory categorization: {:?}", def)
@ -754,7 +754,7 @@ impl<'a, 'gcx, 'tcx> MemCategorizationContext<'a, 'gcx, 'tcx> {
span: Span,
var_id: ast::NodeId,
fn_node_id: ast::NodeId)
-> McResult<cmt<'tcx>>
-> McResult<cmt_<'tcx>>
{
let fn_hir_id = self.tcx.hir.node_to_hir_id(fn_node_id);
@ -861,7 +861,7 @@ impl<'a, 'gcx, 'tcx> MemCategorizationContext<'a, 'gcx, 'tcx> {
}
};
let ret = Rc::new(cmt_result);
let ret = cmt_result;
debug!("cat_upvar ret={:?}", ret);
Ok(ret)
}
@ -938,7 +938,7 @@ impl<'a, 'gcx, 'tcx> MemCategorizationContext<'a, 'gcx, 'tcx> {
id: ast::NodeId,
span: Span,
expr_ty: Ty<'tcx>)
-> cmt<'tcx> {
-> cmt_<'tcx> {
let hir_id = self.tcx.hir.node_to_hir_id(id);
let promotable = self.rvalue_promotable_map.as_ref().map(|m| m.contains(&hir_id.local_id))
.unwrap_or(false);
@ -966,15 +966,15 @@ impl<'a, 'gcx, 'tcx> MemCategorizationContext<'a, 'gcx, 'tcx> {
cmt_id: ast::NodeId,
span: Span,
temp_scope: ty::Region<'tcx>,
expr_ty: Ty<'tcx>) -> cmt<'tcx> {
let ret = Rc::new(cmt_ {
expr_ty: Ty<'tcx>) -> cmt_<'tcx> {
let ret = cmt_ {
id:cmt_id,
span:span,
cat:Categorization::Rvalue(temp_scope),
mutbl:McDeclared,
ty:expr_ty,
note: NoteNone
});
};
debug!("cat_rvalue ret {:?}", ret);
ret
}
@ -985,15 +985,15 @@ impl<'a, 'gcx, 'tcx> MemCategorizationContext<'a, 'gcx, 'tcx> {
f_index: usize,
f_name: Name,
f_ty: Ty<'tcx>)
-> cmt<'tcx> {
let ret = Rc::new(cmt_ {
-> cmt_<'tcx> {
let ret = cmt_ {
id: node.id(),
span: node.span(),
mutbl: base_cmt.mutbl.inherit(),
cat: Categorization::Interior(base_cmt, InteriorField(FieldIndex(f_index, f_name))),
ty: f_ty,
note: NoteNone
});
};
debug!("cat_field ret {:?}", ret);
ret
}
@ -1002,7 +1002,7 @@ impl<'a, 'gcx, 'tcx> MemCategorizationContext<'a, 'gcx, 'tcx> {
expr: &hir::Expr,
base: &hir::Expr,
implicit: bool)
-> McResult<cmt<'tcx>> {
-> McResult<cmt_<'tcx>> {
debug!("cat_overloaded_place: implicit={}", implicit);
// Reconstruct the output assuming it's a reference with the
@ -1022,7 +1022,7 @@ impl<'a, 'gcx, 'tcx> MemCategorizationContext<'a, 'gcx, 'tcx> {
mutbl,
});
let base_cmt = self.cat_rvalue_node(expr.id, expr.span, ref_ty);
let base_cmt = Rc::new(self.cat_rvalue_node(expr.id, expr.span, ref_ty));
self.cat_deref(expr, base_cmt, implicit)
}
@ -1030,7 +1030,7 @@ impl<'a, 'gcx, 'tcx> MemCategorizationContext<'a, 'gcx, 'tcx> {
node: &N,
base_cmt: cmt<'tcx>,
implicit: bool)
-> McResult<cmt<'tcx>> {
-> McResult<cmt_<'tcx>> {
debug!("cat_deref: base_cmt={:?}", base_cmt);
let base_cmt_ty = base_cmt.ty;
@ -1052,7 +1052,7 @@ impl<'a, 'gcx, 'tcx> MemCategorizationContext<'a, 'gcx, 'tcx> {
}
ref ty => bug!("unexpected type in cat_deref: {:?}", ty)
};
let ret = Rc::new(cmt_ {
let ret = cmt_ {
id: node.id(),
span: node.span(),
// For unique ptrs, we inherit mutability from the owning reference.
@ -1060,7 +1060,7 @@ impl<'a, 'gcx, 'tcx> MemCategorizationContext<'a, 'gcx, 'tcx> {
cat: Categorization::Deref(base_cmt, ptr),
ty: deref_ty,
note: NoteNone
});
};
debug!("cat_deref ret {:?}", ret);
Ok(ret)
}
@ -1070,7 +1070,7 @@ impl<'a, 'gcx, 'tcx> MemCategorizationContext<'a, 'gcx, 'tcx> {
base_cmt: cmt<'tcx>,
element_ty: Ty<'tcx>,
context: InteriorOffsetKind)
-> McResult<cmt<'tcx>> {
-> McResult<cmt_<'tcx>> {
//! Creates a cmt for an indexing operation (`[]`).
//!
//! One subtle aspect of indexing that may not be
@ -1089,8 +1089,7 @@ impl<'a, 'gcx, 'tcx> MemCategorizationContext<'a, 'gcx, 'tcx> {
//! - `base_cmt`: the cmt of `elt`
let interior_elem = InteriorElement(context);
let ret =
self.cat_imm_interior(elt, base_cmt, element_ty, interior_elem);
let ret = self.cat_imm_interior(elt, base_cmt, element_ty, interior_elem);
debug!("cat_index ret {:?}", ret);
return Ok(ret);
}
@ -1100,15 +1099,15 @@ impl<'a, 'gcx, 'tcx> MemCategorizationContext<'a, 'gcx, 'tcx> {
base_cmt: cmt<'tcx>,
interior_ty: Ty<'tcx>,
interior: InteriorKind)
-> cmt<'tcx> {
let ret = Rc::new(cmt_ {
-> cmt_<'tcx> {
let ret = cmt_ {
id: node.id(),
span: node.span(),
mutbl: base_cmt.mutbl.inherit(),
cat: Categorization::Interior(base_cmt, interior),
ty: interior_ty,
note: NoteNone
});
};
debug!("cat_imm_interior ret={:?}", ret);
ret
}
@ -1232,7 +1231,7 @@ impl<'a, 'gcx, 'tcx> MemCategorizationContext<'a, 'gcx, 'tcx> {
.get(pat.hir_id)
.map(|v| v.len())
.unwrap_or(0) {
cmt = self.cat_deref(pat, cmt, true /* implicit */)?;
cmt = Rc::new(self.cat_deref(pat, cmt, true /* implicit */)?);
}
let cmt = cmt; // lose mutability
@ -1279,7 +1278,7 @@ impl<'a, 'gcx, 'tcx> MemCategorizationContext<'a, 'gcx, 'tcx> {
for (i, subpat) in subpats.iter().enumerate_and_adjust(expected_len, ddpos) {
let subpat_ty = self.pat_ty(&subpat)?; // see (*2)
let interior = InteriorField(FieldIndex(i, Name::intern(&i.to_string())));
let subcmt = self.cat_imm_interior(pat, cmt.clone(), subpat_ty, interior);
let subcmt = Rc::new(self.cat_imm_interior(pat, cmt.clone(), subpat_ty, interior));
self.cat_pattern_(subcmt, &subpat, op)?;
}
}
@ -1302,7 +1301,8 @@ impl<'a, 'gcx, 'tcx> MemCategorizationContext<'a, 'gcx, 'tcx> {
for fp in field_pats {
let field_ty = self.pat_ty(&fp.node.pat)?; // see (*2)
let f_index = self.tcx.field_index(fp.node.id, self.tables);
let cmt_field = self.cat_field(pat, cmt.clone(), f_index, fp.node.name, field_ty);
let cmt_field =
Rc::new(self.cat_field(pat, cmt.clone(), f_index, fp.node.name, field_ty));
self.cat_pattern_(cmt_field, &fp.node.pat, op)?;
}
}
@ -1320,7 +1320,7 @@ impl<'a, 'gcx, 'tcx> MemCategorizationContext<'a, 'gcx, 'tcx> {
for (i, subpat) in subpats.iter().enumerate_and_adjust(expected_len, ddpos) {
let subpat_ty = self.pat_ty(&subpat)?; // see (*2)
let interior = InteriorField(FieldIndex(i, Name::intern(&i.to_string())));
let subcmt = self.cat_imm_interior(pat, cmt.clone(), subpat_ty, interior);
let subcmt = Rc::new(self.cat_imm_interior(pat, cmt.clone(), subpat_ty, interior));
self.cat_pattern_(subcmt, &subpat, op)?;
}
}
@ -1329,7 +1329,7 @@ impl<'a, 'gcx, 'tcx> MemCategorizationContext<'a, 'gcx, 'tcx> {
// box p1, &p1, &mut p1. we can ignore the mutability of
// PatKind::Ref since that information is already contained
// in the type.
let subcmt = self.cat_deref(pat, cmt, false)?;
let subcmt = Rc::new(self.cat_deref(pat, cmt, false)?);
self.cat_pattern_(subcmt, &subpat, op)?;
}
@ -1342,7 +1342,7 @@ impl<'a, 'gcx, 'tcx> MemCategorizationContext<'a, 'gcx, 'tcx> {
}
};
let context = InteriorOffsetKind::Pattern;
let elt_cmt = self.cat_index(pat, cmt, element_ty, context)?;
let elt_cmt = Rc::new(self.cat_index(pat, cmt, element_ty, context)?);
for before_pat in before {
self.cat_pattern_(elt_cmt.clone(), &before_pat, op)?;
}
@ -1379,7 +1379,7 @@ pub enum AliasableReason {
}
impl<'tcx> cmt_<'tcx> {
pub fn guarantor(&self) -> cmt<'tcx> {
pub fn guarantor(&self) -> cmt_<'tcx> {
//! Returns `self` after stripping away any derefs or
//! interior content. The return value is basically the `cmt` which
//! determines how long the value in `self` remains live.
@ -1392,7 +1392,7 @@ impl<'tcx> cmt_<'tcx> {
Categorization::Deref(_, BorrowedPtr(..)) |
Categorization::Deref(_, Implicit(..)) |
Categorization::Upvar(..) => {
Rc::new((*self).clone())
(*self).clone()
}
Categorization::Downcast(ref b, _) |
Categorization::Interior(ref b, _) |
@ -1442,16 +1442,17 @@ impl<'tcx> cmt_<'tcx> {
}
}
// Digs down through one or two layers of deref and grabs the cmt
// for the upvar if a note indicates there is one.
pub fn upvar(&self) -> Option<cmt<'tcx>> {
// Digs down through one or two layers of deref and grabs the
// Categorization of the cmt for the upvar if a note indicates there is
// one.
pub fn upvar_cat(&self) -> Option<&Categorization<'tcx>> {
match self.note {
NoteClosureEnv(..) | NoteUpvarRef(..) => {
Some(match self.cat {
Categorization::Deref(ref inner, _) => {
match inner.cat {
Categorization::Deref(ref inner, _) => inner.clone(),
Categorization::Upvar(..) => inner.clone(),
Categorization::Deref(ref inner, _) => &inner.cat,
Categorization::Upvar(..) => &inner.cat,
_ => bug!()
}
}
@ -1462,7 +1463,6 @@ impl<'tcx> cmt_<'tcx> {
}
}
pub fn descriptive_string(&self, tcx: TyCtxt) -> String {
match self.cat {
Categorization::StaticItem => {
@ -1479,8 +1479,7 @@ impl<'tcx> cmt_<'tcx> {
}
}
Categorization::Deref(_, pk) => {
let upvar = self.upvar();
match upvar.as_ref().map(|i| &i.cat) {
match self.upvar_cat() {
Some(&Categorization::Upvar(ref var)) => {
var.to_string()
}

View File

@ -690,21 +690,22 @@ impl<'tcx> ScopeTree {
// the start. So this algorithm is faster.
let mut ma = Some(scope_a);
let mut mb = Some(scope_b);
let mut seen: SmallVec<[Scope; 32]> = SmallVec::new();
let mut seen_a: SmallVec<[Scope; 32]> = SmallVec::new();
let mut seen_b: SmallVec<[Scope; 32]> = SmallVec::new();
loop {
if let Some(a) = ma {
if seen.iter().position(|s| *s == a).is_some() {
if seen_b.iter().position(|s| *s == a).is_some() {
return a;
}
seen.push(a);
seen_a.push(a);
ma = self.parent_map.get(&a).map(|s| *s);
}
if let Some(b) = mb {
if seen.iter().position(|s| *s == b).is_some() {
if seen_a.iter().position(|s| *s == b).is_some() {
return b;
}
seen.push(b);
seen_b.push(b);
mb = self.parent_map.get(&b).map(|s| *s);
}

View File

@ -205,7 +205,7 @@ impl<'a, 'tcx: 'a> Annotator<'a, 'tcx> {
} else {
// Emit errors for non-staged-api crates.
for attr in attrs {
let tag = unwrap_or!(attr.name(), continue);
let tag = attr.name();
if tag == "unstable" || tag == "stable" || tag == "rustc_deprecated" {
attr::mark_used(attr);
self.tcx.sess.span_err(attr.span(), "stability attributes may not be used \

View File

@ -68,7 +68,7 @@ fn calculate_predecessors(mir: &Mir) -> IndexVec<BasicBlock, Vec<BasicBlock>> {
let mut result = IndexVec::from_elem(vec![], mir.basic_blocks());
for (bb, data) in mir.basic_blocks().iter_enumerated() {
if let Some(ref term) = data.terminator {
for &tgt in term.successors().iter() {
for &tgt in term.successors() {
result[tgt].push(bb);
}
}

View File

@ -1,4 +1,3 @@
use std::error::Error;
use std::{fmt, env};
use mir;
@ -8,18 +7,16 @@ use super::{
MemoryPointer, Lock, AccessKind
};
use rustc_const_math::ConstMathErr;
use syntax::codemap::Span;
use backtrace::Backtrace;
#[derive(Debug, Clone)]
pub struct EvalError<'tcx> {
pub kind: EvalErrorKind<'tcx>,
pub kind: EvalErrorKind<'tcx, u64>,
pub backtrace: Option<Backtrace>,
}
impl<'tcx> From<EvalErrorKind<'tcx>> for EvalError<'tcx> {
fn from(kind: EvalErrorKind<'tcx>) -> Self {
impl<'tcx> From<EvalErrorKind<'tcx, u64>> for EvalError<'tcx> {
fn from(kind: EvalErrorKind<'tcx, u64>) -> Self {
let backtrace = match env::var("MIRI_BACKTRACE") {
Ok(ref val) if !val.is_empty() => Some(Backtrace::new_unresolved()),
_ => None
@ -31,8 +28,10 @@ impl<'tcx> From<EvalErrorKind<'tcx>> for EvalError<'tcx> {
}
}
#[derive(Debug, Clone)]
pub enum EvalErrorKind<'tcx> {
pub type AssertMessage<'tcx> = EvalErrorKind<'tcx, mir::Operand<'tcx>>;
#[derive(Clone, RustcEncodable, RustcDecodable)]
pub enum EvalErrorKind<'tcx, O> {
/// This variant is used by machines to signal their own errors that do not
/// match an existing variant
MachineError(String),
@ -60,10 +59,12 @@ pub enum EvalErrorKind<'tcx> {
Unimplemented(String),
DerefFunctionPointer,
ExecuteMemory,
ArrayIndexOutOfBounds(Span, u64, u64),
Math(Span, ConstMathErr),
BoundsCheck { len: O, index: O },
Overflow(mir::BinOp),
OverflowNeg,
DivisionByZero,
RemainderByZero,
Intrinsic(String),
OverflowingMath,
InvalidChar(u128),
StackFrameLimitReached,
OutOfTls,
@ -121,14 +122,16 @@ pub enum EvalErrorKind<'tcx> {
/// Cannot compute this constant because it depends on another one
/// which already produced an error
ReferencedConstant,
GeneratorResumedAfterReturn,
GeneratorResumedAfterPanic,
}
pub type EvalResult<'tcx, T = ()> = Result<T, EvalError<'tcx>>;
impl<'tcx> Error for EvalError<'tcx> {
fn description(&self) -> &str {
impl<'tcx, O> EvalErrorKind<'tcx, O> {
pub fn description(&self) -> &str {
use self::EvalErrorKind::*;
match self.kind {
match *self {
MachineError(ref inner) => inner,
FunctionPointerTyMismatch(..) =>
"tried to call a function through a function pointer of a different type",
@ -175,14 +178,10 @@ impl<'tcx> Error for EvalError<'tcx> {
"tried to dereference a function pointer",
ExecuteMemory =>
"tried to treat a memory pointer as a function pointer",
ArrayIndexOutOfBounds(..) =>
BoundsCheck{..} =>
"array index out of bounds",
Math(..) =>
"mathematical operation failed",
Intrinsic(..) =>
"intrinsic failed",
OverflowingMath =>
"attempted to do overflowing math",
NoMirFor(..) =>
"mir not found",
InvalidChar(..) =>
@ -232,7 +231,7 @@ impl<'tcx> Error for EvalError<'tcx> {
"the evaluated program panicked",
ReadFromReturnPointer =>
"tried to read from the return pointer",
EvalErrorKind::PathNotFound(_) =>
PathNotFound(_) =>
"a path could not be resolved, maybe the crate is not loaded",
UnimplementedTraitSelection =>
"there were unresolved type arguments during trait selection",
@ -240,14 +239,33 @@ impl<'tcx> Error for EvalError<'tcx> {
"encountered constants with type errors, stopping evaluation",
ReferencedConstant =>
"referenced constant has errors",
Overflow(mir::BinOp::Add) => "attempt to add with overflow",
Overflow(mir::BinOp::Sub) => "attempt to subtract with overflow",
Overflow(mir::BinOp::Mul) => "attempt to multiply with overflow",
Overflow(mir::BinOp::Div) => "attempt to divide with overflow",
Overflow(mir::BinOp::Rem) => "attempt to calculate the remainder with overflow",
OverflowNeg => "attempt to negate with overflow",
Overflow(mir::BinOp::Shr) => "attempt to shift right with overflow",
Overflow(mir::BinOp::Shl) => "attempt to shift left with overflow",
Overflow(op) => bug!("{:?} cannot overflow", op),
DivisionByZero => "attempt to divide by zero",
RemainderByZero => "attempt to calculate the remainder with a divisor of zero",
GeneratorResumedAfterReturn => "generator resumed after completion",
GeneratorResumedAfterPanic => "generator resumed after panicking",
}
}
}
impl<'tcx> fmt::Display for EvalError<'tcx> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{:?}", self.kind)
}
}
impl<'tcx, O: fmt::Debug> fmt::Debug for EvalErrorKind<'tcx, O> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
use self::EvalErrorKind::*;
match self.kind {
match *self {
PointerOutOfBounds { ptr, access, allocation_size } => {
write!(f, "{} at offset {}, outside bounds of allocation {} which has size {}",
if access { "memory access" } else { "pointer computed" },
@ -275,14 +293,12 @@ impl<'tcx> fmt::Display for EvalError<'tcx> {
NoMirFor(ref func) => write!(f, "no mir for `{}`", func),
FunctionPointerTyMismatch(sig, got) =>
write!(f, "tried to call a function with sig {} through a function pointer of type {}", sig, got),
ArrayIndexOutOfBounds(span, len, index) =>
write!(f, "index out of bounds: the len is {} but the index is {} at {:?}", len, index, span),
BoundsCheck { ref len, ref index } =>
write!(f, "index out of bounds: the len is {:?} but the index is {:?}", len, index),
ReallocatedWrongMemoryKind(ref old, ref new) =>
write!(f, "tried to reallocate memory from {} to {}", old, new),
DeallocatedWrongMemoryKind(ref old, ref new) =>
write!(f, "tried to deallocate {} memory but gave {} as the kind", old, new),
Math(_, ref err) =>
write!(f, "{}", err.description()),
Intrinsic(ref err) =>
write!(f, "{}", err),
InvalidChar(c) =>

Some files were not shown because too many files have changed in this diff Show More