Auto merge of #60683 - Centril:rollup-p05qh5d, r=Centril
Rollup of 8 pull requests Successful merges: - #59348 (Clean up and add tests for slice drop shims) - #60188 (Identify when a stmt could have been parsed as an expr) - #60234 (std: Derive `Default` for `io::Cursor`) - #60618 (Comment ext::tt::transcribe) - #60648 (Skip codegen for one UI test with long file path) - #60671 (remove unneeded `extern crate`s from build tools) - #60675 (Remove the old await! macro) - #60676 (Fix async desugaring providing wrong input to procedural macros.) Failed merges: r? @ghost
This commit is contained in:
commit
03bd2f653f
76
Cargo.lock
76
Cargo.lock
@ -29,7 +29,7 @@ version = "1.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"html5ever 0.22.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"lazy_static 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"maplit 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"matches 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"tendril 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
@ -170,13 +170,12 @@ dependencies = [
|
||||
"cmake 0.1.38 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"filetime 0.2.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"getopts 0.2.19 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"lazy_static 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"libc 0.2.54 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"num_cpus 1.8.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"petgraph 0.4.13 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"pretty_assertions 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"serde 1.0.82 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"serde_derive 1.0.81 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"serde_json 1.0.33 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"time 0.1.40 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"toml 0.4.10 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
@ -192,7 +191,6 @@ name = "build-manifest"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"serde 1.0.82 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"serde_derive 1.0.81 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"toml 0.4.10 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
@ -266,7 +264,7 @@ dependencies = [
|
||||
"ignore 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"im-rc 12.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"jobserver 0.1.13 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"lazy_static 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"lazycell 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"libc 0.2.54 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"libgit2-sys 0.7.11 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
@ -374,7 +372,7 @@ dependencies = [
|
||||
"clippy_lints 0.0.212",
|
||||
"compiletest_rs 0.3.22 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"derive-new 0.5.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"lazy_static 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"regex 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rustc-workspace-hack 1.0.0",
|
||||
"rustc_tools_util 0.1.1",
|
||||
@ -394,7 +392,7 @@ dependencies = [
|
||||
"cargo_metadata 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"if_chain 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"itertools 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"lazy_static 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"matches 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"pulldown-cmark 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"quine-mc_cluskey 0.2.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
@ -465,14 +463,13 @@ dependencies = [
|
||||
"env_logger 0.5.13 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"filetime 0.2.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"getopts 0.2.19 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"lazy_static 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"libc 0.2.54 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"miow 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"regex 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rustfix 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"serde 1.0.82 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"serde_derive 1.0.81 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"serde_json 1.0.33 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"walkdir 2.2.7 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"winapi 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
@ -592,7 +589,7 @@ dependencies = [
|
||||
"arrayvec 0.4.7 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"cfg-if 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"crossbeam-utils 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"lazy_static 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"memoffset 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"nodrop 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"scopeguard 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
@ -606,7 +603,7 @@ dependencies = [
|
||||
"arrayvec 0.4.7 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"cfg-if 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"crossbeam-utils 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"lazy_static 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"memoffset 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"scopeguard 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
@ -751,7 +748,7 @@ name = "elasticlunr-rs"
|
||||
version = "2.3.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"lazy_static 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"regex 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"serde 1.0.82 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"serde_derive 1.0.81 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
@ -1032,7 +1029,7 @@ name = "handlebars"
|
||||
version = "0.32.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"lazy_static 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"pest 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"pest_derive 1.0.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
@ -1047,7 +1044,7 @@ name = "handlebars"
|
||||
version = "1.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"lazy_static 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"pest 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"pest_derive 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
@ -1143,7 +1140,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"crossbeam-channel 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"globset 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"lazy_static 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"memchr 2.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"regex 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
@ -1169,7 +1166,7 @@ dependencies = [
|
||||
"clap 2.32.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"error-chain 0.12.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"flate2 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"lazy_static 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rayon 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"tar 0.4.20 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"walkdir 2.2.7 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
@ -1265,7 +1262,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "lazy_static"
|
||||
version = "1.2.0"
|
||||
version = "1.3.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
@ -1354,7 +1351,7 @@ name = "log_settings"
|
||||
version = "0.1.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"lazy_static 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -1441,7 +1438,7 @@ dependencies = [
|
||||
"error-chain 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"handlebars 0.32.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"itertools 0.7.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"lazy_static 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"memchr 2.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"open 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
@ -1469,7 +1466,7 @@ dependencies = [
|
||||
"error-chain 0.12.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"handlebars 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"itertools 0.7.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"lazy_static 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"memchr 2.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"open 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
@ -1703,7 +1700,7 @@ dependencies = [
|
||||
"bitflags 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"cfg-if 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"foreign-types 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"lazy_static 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"libc 0.2.54 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"openssl-sys 0.9.43 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
@ -1971,7 +1968,7 @@ dependencies = [
|
||||
"bit-set 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"bitflags 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"byteorder 1.2.7 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"lazy_static 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"num-traits 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"quick-error 1.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rand 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
@ -2045,7 +2042,7 @@ dependencies = [
|
||||
"derive_more 0.13.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"env_logger 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"humantime 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"lazy_static 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rls-span 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rustc-ap-syntax 407.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
@ -2141,7 +2138,7 @@ version = "1.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"crossbeam-deque 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"lazy_static 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"libc 0.2.54 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"num_cpus 1.8.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rand 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
@ -2243,7 +2240,7 @@ dependencies = [
|
||||
"home 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"itertools 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"jsonrpc-core 10.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"lazy_static 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"lsp-codec 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"lsp-types 0.57.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
@ -2349,7 +2346,7 @@ dependencies = [
|
||||
"fmt_macros 0.0.0",
|
||||
"graphviz 0.0.0",
|
||||
"jobserver 0.1.13 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"lazy_static 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"measureme 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"num_cpus 1.8.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
@ -2402,7 +2399,7 @@ dependencies = [
|
||||
"cfg-if 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"ena 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"jobserver 0.1.13 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"lazy_static 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"parking_lot 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rustc-ap-graphviz 407.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
@ -2532,7 +2529,7 @@ version = "0.1.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"crossbeam-deque 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"lazy_static 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"libc 0.2.54 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"num_cpus 1.8.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
@ -2543,7 +2540,7 @@ version = "0.2.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"crossbeam-deque 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"lazy_static 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"libc 0.2.54 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"num_cpus 1.8.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
@ -2702,7 +2699,7 @@ dependencies = [
|
||||
"ena 0.13.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"graphviz 0.0.0",
|
||||
"jobserver 0.1.13 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"lazy_static 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"parking_lot 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
@ -3097,7 +3094,7 @@ dependencies = [
|
||||
"getopts 0.2.19 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"ignore 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"itertools 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"lazy_static 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"regex 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rustc-ap-rustc_target 407.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
@ -3143,7 +3140,7 @@ name = "schannel"
|
||||
version = "0.1.14"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"lazy_static 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"winapi 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
@ -3313,7 +3310,7 @@ name = "string_cache"
|
||||
version = "0.7.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"lazy_static 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"new_debug_unreachable 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"phf_shared 0.7.22 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"precomputed-hash 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
@ -3404,7 +3401,7 @@ name = "syntax"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"bitflags 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"lazy_static 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rustc_data_structures 0.0.0",
|
||||
"rustc_errors 0.0.0",
|
||||
@ -3549,7 +3546,7 @@ name = "thread_local"
|
||||
version = "0.3.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"lazy_static 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -3558,7 +3555,6 @@ version = "0.1.0"
|
||||
dependencies = [
|
||||
"regex 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"serde 1.0.82 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"serde_derive 1.0.81 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"serde_json 1.0.33 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
@ -3664,7 +3660,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"crossbeam-utils 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"futures 0.1.21 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"lazy_static 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"mio 0.6.16 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"num_cpus 1.8.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
@ -3783,7 +3779,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"error-chain 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"is-match 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"lazy_static 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"regex 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"toml 0.4.10 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
@ -3795,7 +3791,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"error-chain 0.12.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"is-match 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"lazy_static 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"regex 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"toml 0.4.10 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
@ -4160,7 +4156,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
"checksum jsonrpc-core 10.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "7a5152c3fda235dfd68341b3edf4121bc4428642c93acbd6de88c26bf95fc5d7"
|
||||
"checksum kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7507624b29483431c0ba2d82aece8ca6cdba9382bff4ddd0f7490560c056098d"
|
||||
"checksum lazy_static 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)" = "76f033c7ad61445c5b347c7382dd1237847eb1bce590fe50365dcb33d546be73"
|
||||
"checksum lazy_static 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "a374c89b9db55895453a74c1e38861d9deec0b01b405a82516e9d5de4820dea1"
|
||||
"checksum lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "bc5729f27f159ddd61f4df6228e827e86643d4d3e7c32183cb30a1c08f604a14"
|
||||
"checksum lazycell 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "b294d6fa9ee409a054354afc4352b0b9ef7ca222c69b8812cbea9e7d2bf3783f"
|
||||
"checksum libc 0.2.54 (registry+https://github.com/rust-lang/crates.io-index)" = "c6785aa7dd976f5fbf3b71cfd9cd49d7f783c1ff565a858d71031c6c313aa5c6"
|
||||
"checksum libgit2-sys 0.7.11 (registry+https://github.com/rust-lang/crates.io-index)" = "48441cb35dc255da8ae72825689a95368bf510659ae1ad55dc4aa88cb1789bf1"
|
||||
|
@ -42,11 +42,10 @@ num_cpus = "1.0"
|
||||
getopts = "0.2.19"
|
||||
cc = "1.0.35"
|
||||
libc = "0.2"
|
||||
serde = "1.0.8"
|
||||
serde_derive = "1.0.8"
|
||||
serde = { version = "1.0.8", features = ["derive"] }
|
||||
serde_json = "1.0.2"
|
||||
toml = "0.4"
|
||||
lazy_static = "0.2"
|
||||
lazy_static = "1.3.0"
|
||||
time = "0.1"
|
||||
petgraph = "0.4.13"
|
||||
|
||||
|
@ -11,6 +11,8 @@ use std::path::{Path, PathBuf};
|
||||
use std::process::Command;
|
||||
use std::time::{Duration, Instant};
|
||||
|
||||
use build_helper::t;
|
||||
|
||||
use crate::cache::{Cache, Interned, INTERNER};
|
||||
use crate::check;
|
||||
use crate::compile;
|
||||
@ -1308,6 +1310,8 @@ mod __test {
|
||||
use crate::config::Config;
|
||||
use std::thread;
|
||||
|
||||
use pretty_assertions::assert_eq;
|
||||
|
||||
fn configure(host: &[&str], target: &[&str]) -> Config {
|
||||
let mut config = Config::default_opts();
|
||||
// don't save toolstates
|
||||
|
@ -13,6 +13,8 @@ use std::path::{Path, PathBuf};
|
||||
use std::sync::Mutex;
|
||||
use std::cmp::{PartialOrd, Ord, Ordering};
|
||||
|
||||
use lazy_static::lazy_static;
|
||||
|
||||
use crate::builder::Step;
|
||||
|
||||
pub struct Interned<T>(usize, PhantomData<*const T>);
|
||||
|
@ -9,6 +9,8 @@ use std::fs;
|
||||
use std::io::{self, ErrorKind};
|
||||
use std::path::Path;
|
||||
|
||||
use build_helper::t;
|
||||
|
||||
use crate::Build;
|
||||
|
||||
pub fn clean(build: &Build, all: bool) {
|
||||
|
@ -15,8 +15,9 @@ use std::path::{Path, PathBuf};
|
||||
use std::process::{Command, Stdio, exit};
|
||||
use std::str;
|
||||
|
||||
use build_helper::{output, mtime, up_to_date};
|
||||
use build_helper::{output, mtime, t, up_to_date};
|
||||
use filetime::FileTime;
|
||||
use serde::Deserialize;
|
||||
use serde_json;
|
||||
|
||||
use crate::dist;
|
||||
|
@ -10,8 +10,10 @@ use std::path::{Path, PathBuf};
|
||||
use std::process;
|
||||
use std::cmp;
|
||||
|
||||
use build_helper::t;
|
||||
use num_cpus;
|
||||
use toml;
|
||||
use serde::Deserialize;
|
||||
use crate::cache::{INTERNER, Interned};
|
||||
use crate::flags::Flags;
|
||||
pub use crate::flags::Subcommand;
|
||||
|
@ -14,7 +14,7 @@ use std::io::Write;
|
||||
use std::path::{PathBuf, Path};
|
||||
use std::process::{Command, Stdio};
|
||||
|
||||
use build_helper::output;
|
||||
use build_helper::{output, t};
|
||||
|
||||
use crate::{Compiler, Mode, LLVM_TOOLS};
|
||||
use crate::channel;
|
||||
|
@ -13,7 +13,7 @@ use std::io;
|
||||
use std::path::{PathBuf, Path};
|
||||
|
||||
use crate::Mode;
|
||||
use build_helper::up_to_date;
|
||||
use build_helper::{t, up_to_date};
|
||||
|
||||
use crate::util::symlink_dir;
|
||||
use crate::builder::{Builder, Compiler, RunConfig, ShouldRun, Step};
|
||||
|
@ -8,6 +8,8 @@ use std::fs;
|
||||
use std::path::{Path, PathBuf, Component};
|
||||
use std::process::Command;
|
||||
|
||||
use build_helper::t;
|
||||
|
||||
use crate::dist::{self, pkgname, sanitize_sh, tmpdir};
|
||||
|
||||
use crate::builder::{Builder, RunConfig, ShouldRun, Step};
|
||||
|
@ -108,17 +108,6 @@
|
||||
#![feature(core_intrinsics)]
|
||||
#![feature(drain_filter)]
|
||||
|
||||
#[macro_use]
|
||||
extern crate build_helper;
|
||||
#[macro_use]
|
||||
extern crate serde_derive;
|
||||
#[macro_use]
|
||||
extern crate lazy_static;
|
||||
|
||||
#[cfg(test)]
|
||||
#[macro_use]
|
||||
extern crate pretty_assertions;
|
||||
|
||||
use std::cell::{RefCell, Cell};
|
||||
use std::collections::{HashSet, HashMap};
|
||||
use std::env;
|
||||
@ -134,7 +123,9 @@ use std::os::unix::fs::symlink as symlink_file;
|
||||
#[cfg(windows)]
|
||||
use std::os::windows::fs::symlink_file;
|
||||
|
||||
use build_helper::{run_silent, run_suppressed, try_run_silent, try_run_suppressed, output, mtime};
|
||||
use build_helper::{
|
||||
mtime, output, run_silent, run_suppressed, t, try_run_silent, try_run_suppressed,
|
||||
};
|
||||
use filetime::FileTime;
|
||||
|
||||
use crate::util::{exe, libdir, OutputFolder, CiEnv};
|
||||
|
@ -4,6 +4,7 @@ use std::path::PathBuf;
|
||||
use std::collections::HashSet;
|
||||
|
||||
use build_helper::output;
|
||||
use serde::Deserialize;
|
||||
use serde_json;
|
||||
|
||||
use crate::{Build, Crate};
|
||||
|
@ -14,7 +14,7 @@ use std::fs::{self, File};
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::process::Command;
|
||||
|
||||
use build_helper::output;
|
||||
use build_helper::{output, t};
|
||||
use cmake;
|
||||
use cc;
|
||||
|
||||
|
@ -15,7 +15,7 @@ use std::fs;
|
||||
use std::path::PathBuf;
|
||||
use std::process::Command;
|
||||
|
||||
use build_helper::output;
|
||||
use build_helper::{output, t};
|
||||
|
||||
use crate::Build;
|
||||
|
||||
|
@ -11,7 +11,7 @@ use std::iter;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::process::Command;
|
||||
|
||||
use build_helper::{self, output};
|
||||
use build_helper::{self, output, t};
|
||||
|
||||
use crate::builder::{Builder, Compiler, Kind, RunConfig, ShouldRun, Step};
|
||||
use crate::cache::{Interned, INTERNER};
|
||||
|
@ -4,6 +4,8 @@ use std::path::PathBuf;
|
||||
use std::process::{Command, exit};
|
||||
use std::collections::HashSet;
|
||||
|
||||
use build_helper::t;
|
||||
|
||||
use crate::Mode;
|
||||
use crate::Compiler;
|
||||
use crate::builder::{Step, RunConfig, ShouldRun, Builder};
|
||||
|
@ -1,3 +1,5 @@
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Copy, Clone, Debug, Deserialize, Serialize, PartialEq, Eq)]
|
||||
#[serde(rename_all = "kebab-case")]
|
||||
/// Whether a tool can be compiled, tested or neither
|
||||
|
@ -11,6 +11,8 @@ use std::path::{Path, PathBuf};
|
||||
use std::process::Command;
|
||||
use std::time::{SystemTime, Instant};
|
||||
|
||||
use build_helper::t;
|
||||
|
||||
use crate::config::Config;
|
||||
use crate::builder::Builder;
|
||||
|
||||
|
@ -50,7 +50,6 @@ use errors::Applicability;
|
||||
use rustc_data_structures::fx::FxHashSet;
|
||||
use rustc_data_structures::indexed_vec::IndexVec;
|
||||
use rustc_data_structures::thin_vec::ThinVec;
|
||||
use rustc_data_structures::sync::Lrc;
|
||||
|
||||
use std::collections::{BTreeSet, BTreeMap};
|
||||
use std::mem;
|
||||
@ -59,10 +58,10 @@ use syntax::attr;
|
||||
use syntax::ast;
|
||||
use syntax::ast::*;
|
||||
use syntax::errors;
|
||||
use syntax::ext::hygiene::{Mark, SyntaxContext};
|
||||
use syntax::ext::hygiene::Mark;
|
||||
use syntax::print::pprust;
|
||||
use syntax::ptr::P;
|
||||
use syntax::source_map::{self, respan, CompilerDesugaringKind, Spanned};
|
||||
use syntax::source_map::{respan, CompilerDesugaringKind, Spanned};
|
||||
use syntax::std_inject;
|
||||
use syntax::symbol::{keywords, Symbol};
|
||||
use syntax::tokenstream::{TokenStream, TokenTree};
|
||||
@ -854,27 +853,6 @@ impl<'a> LoweringContext<'a> {
|
||||
Ident::with_empty_ctxt(Symbol::gensym(s))
|
||||
}
|
||||
|
||||
/// Reuses the span but adds information like the kind of the desugaring and features that are
|
||||
/// allowed inside this span.
|
||||
fn mark_span_with_reason(
|
||||
&self,
|
||||
reason: CompilerDesugaringKind,
|
||||
span: Span,
|
||||
allow_internal_unstable: Option<Lrc<[Symbol]>>,
|
||||
) -> Span {
|
||||
let mark = Mark::fresh(Mark::root());
|
||||
mark.set_expn_info(source_map::ExpnInfo {
|
||||
call_site: span,
|
||||
def_site: Some(span),
|
||||
format: source_map::CompilerDesugaring(reason),
|
||||
allow_internal_unstable,
|
||||
allow_internal_unsafe: false,
|
||||
local_inner_macros: false,
|
||||
edition: source_map::hygiene::default_edition(),
|
||||
});
|
||||
span.with_ctxt(SyntaxContext::empty().apply_mark(mark))
|
||||
}
|
||||
|
||||
fn with_anonymous_lifetime_mode<R>(
|
||||
&mut self,
|
||||
anonymous_lifetime_mode: AnonymousLifetimeMode,
|
||||
@ -1162,7 +1140,7 @@ impl<'a> LoweringContext<'a> {
|
||||
attrs: ThinVec::new(),
|
||||
};
|
||||
|
||||
let unstable_span = self.mark_span_with_reason(
|
||||
let unstable_span = self.sess.source_map().mark_span_with_reason(
|
||||
CompilerDesugaringKind::Async,
|
||||
span,
|
||||
Some(vec![
|
||||
@ -1569,7 +1547,7 @@ impl<'a> LoweringContext<'a> {
|
||||
// desugaring that explicitly states that we don't want to track that.
|
||||
// Not tracking it makes lints in rustc and clippy very fragile as
|
||||
// frequently opened issues show.
|
||||
let exist_ty_span = self.mark_span_with_reason(
|
||||
let exist_ty_span = self.sess.source_map().mark_span_with_reason(
|
||||
CompilerDesugaringKind::ExistentialReturnType,
|
||||
span,
|
||||
None,
|
||||
@ -2443,7 +2421,7 @@ impl<'a> LoweringContext<'a> {
|
||||
) -> hir::FunctionRetTy {
|
||||
let span = output.span();
|
||||
|
||||
let exist_ty_span = self.mark_span_with_reason(
|
||||
let exist_ty_span = self.sess.source_map().mark_span_with_reason(
|
||||
CompilerDesugaringKind::Async,
|
||||
span,
|
||||
None,
|
||||
@ -4179,7 +4157,7 @@ impl<'a> LoweringContext<'a> {
|
||||
}),
|
||||
ExprKind::TryBlock(ref body) => {
|
||||
self.with_catch_scope(body.id, |this| {
|
||||
let unstable_span = this.mark_span_with_reason(
|
||||
let unstable_span = this.sess.source_map().mark_span_with_reason(
|
||||
CompilerDesugaringKind::TryBlock,
|
||||
body.span,
|
||||
Some(vec![
|
||||
@ -4612,7 +4590,7 @@ impl<'a> LoweringContext<'a> {
|
||||
// expand <head>
|
||||
let mut head = self.lower_expr(head);
|
||||
let head_sp = head.span;
|
||||
let desugared_span = self.mark_span_with_reason(
|
||||
let desugared_span = self.sess.source_map().mark_span_with_reason(
|
||||
CompilerDesugaringKind::ForLoop,
|
||||
head_sp,
|
||||
None,
|
||||
@ -4773,7 +4751,7 @@ impl<'a> LoweringContext<'a> {
|
||||
// return Try::from_error(From::from(err)),
|
||||
// }
|
||||
|
||||
let unstable_span = self.mark_span_with_reason(
|
||||
let unstable_span = self.sess.source_map().mark_span_with_reason(
|
||||
CompilerDesugaringKind::QuestionMark,
|
||||
e.span,
|
||||
Some(vec![
|
||||
@ -4781,7 +4759,7 @@ impl<'a> LoweringContext<'a> {
|
||||
].into()),
|
||||
);
|
||||
let try_span = self.sess.source_map().end_point(e.span);
|
||||
let try_span = self.mark_span_with_reason(
|
||||
let try_span = self.sess.source_map().mark_span_with_reason(
|
||||
CompilerDesugaringKind::QuestionMark,
|
||||
try_span,
|
||||
Some(vec![
|
||||
@ -5566,12 +5544,12 @@ impl<'a> LoweringContext<'a> {
|
||||
);
|
||||
self.sess.abort_if_errors();
|
||||
}
|
||||
let span = self.mark_span_with_reason(
|
||||
let span = self.sess.source_map().mark_span_with_reason(
|
||||
CompilerDesugaringKind::Await,
|
||||
await_span,
|
||||
None,
|
||||
);
|
||||
let gen_future_span = self.mark_span_with_reason(
|
||||
let gen_future_span = self.sess.source_map().mark_span_with_reason(
|
||||
CompilerDesugaringKind::Await,
|
||||
await_span,
|
||||
Some(vec![Symbol::intern("gen_future")].into()),
|
||||
|
@ -10,7 +10,7 @@ use rustc::ty::util::IntTypeExt;
|
||||
use rustc_data_structures::indexed_vec::Idx;
|
||||
use crate::util::patch::MirPatch;
|
||||
|
||||
use std::u32;
|
||||
use std::convert::TryInto;
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Copy, Clone)]
|
||||
pub enum DropFlagState {
|
||||
@ -545,10 +545,9 @@ impl<'l, 'b, 'tcx, D> DropCtxt<'l, 'b, 'tcx, D>
|
||||
self.elaborator.patch().new_block(result)
|
||||
}
|
||||
|
||||
/// create a loop that drops an array:
|
||||
///
|
||||
|
||||
/// Create a loop that drops an array:
|
||||
///
|
||||
/// ```text
|
||||
/// loop-block:
|
||||
/// can_go = cur == length_or_end
|
||||
/// if can_go then succ else drop-block
|
||||
@ -561,15 +560,16 @@ impl<'l, 'b, 'tcx, D> DropCtxt<'l, 'b, 'tcx, D>
|
||||
/// cur = cur + 1
|
||||
/// }
|
||||
/// drop(ptr)
|
||||
fn drop_loop(&mut self,
|
||||
succ: BasicBlock,
|
||||
cur: Local,
|
||||
length_or_end: &Place<'tcx>,
|
||||
ety: Ty<'tcx>,
|
||||
unwind: Unwind,
|
||||
ptr_based: bool)
|
||||
-> BasicBlock
|
||||
{
|
||||
/// ```
|
||||
fn drop_loop(
|
||||
&mut self,
|
||||
succ: BasicBlock,
|
||||
cur: Local,
|
||||
length_or_end: &Place<'tcx>,
|
||||
ety: Ty<'tcx>,
|
||||
unwind: Unwind,
|
||||
ptr_based: bool,
|
||||
) -> BasicBlock {
|
||||
let copy = |place: &Place<'tcx>| Operand::Copy(place.clone());
|
||||
let move_ = |place: &Place<'tcx>| Operand::Move(place.clone());
|
||||
let tcx = self.tcx();
|
||||
@ -591,13 +591,13 @@ impl<'l, 'b, 'tcx, D> DropCtxt<'l, 'b, 'tcx, D>
|
||||
elem: ProjectionElem::Deref,
|
||||
}))
|
||||
),
|
||||
Rvalue::BinaryOp(BinOp::Offset, copy(&Place::Base(PlaceBase::Local(cur))), one))
|
||||
Rvalue::BinaryOp(BinOp::Offset, move_(&Place::Base(PlaceBase::Local(cur))), one))
|
||||
} else {
|
||||
(Rvalue::Ref(
|
||||
tcx.lifetimes.re_erased,
|
||||
BorrowKind::Mut { allow_two_phase_borrow: false },
|
||||
self.place.clone().index(cur)),
|
||||
Rvalue::BinaryOp(BinOp::Add, copy(&Place::Base(PlaceBase::Local(cur))), one))
|
||||
Rvalue::BinaryOp(BinOp::Add, move_(&Place::Base(PlaceBase::Local(cur))), one))
|
||||
};
|
||||
|
||||
let drop_block = BasicBlockData {
|
||||
@ -647,9 +647,9 @@ impl<'l, 'b, 'tcx, D> DropCtxt<'l, 'b, 'tcx, D>
|
||||
// }
|
||||
|
||||
if let Some(size) = opt_size {
|
||||
assert!(size <= (u32::MAX as u64),
|
||||
"move out check doesn't implemented for array bigger then u32");
|
||||
let size = size as u32;
|
||||
let size: u32 = size.try_into().unwrap_or_else(|_| {
|
||||
bug!("move out check isn't implemented for array sizes bigger than u32::MAX");
|
||||
});
|
||||
let fields: Vec<(Place<'tcx>, Option<D::Path>)> = (0..size).map(|i| {
|
||||
(self.place.clone().elem(ProjectionElem::ConstantIndex{
|
||||
offset: i,
|
||||
@ -667,33 +667,42 @@ impl<'l, 'b, 'tcx, D> DropCtxt<'l, 'b, 'tcx, D>
|
||||
|
||||
let move_ = |place: &Place<'tcx>| Operand::Move(place.clone());
|
||||
let tcx = self.tcx();
|
||||
let size = &Place::Base(PlaceBase::Local(self.new_temp(tcx.types.usize)));
|
||||
let size_is_zero = &Place::Base(PlaceBase::Local(self.new_temp(tcx.types.bool)));
|
||||
let elem_size = &Place::Base(PlaceBase::Local(self.new_temp(tcx.types.usize)));
|
||||
let len = &Place::Base(PlaceBase::Local(self.new_temp(tcx.types.usize)));
|
||||
|
||||
static USIZE_SWITCH_ZERO: &[u128] = &[0];
|
||||
|
||||
let base_block = BasicBlockData {
|
||||
statements: vec![
|
||||
self.assign(size, Rvalue::NullaryOp(NullOp::SizeOf, ety)),
|
||||
self.assign(size_is_zero, Rvalue::BinaryOp(BinOp::Eq,
|
||||
move_(size),
|
||||
self.constant_usize(0)))
|
||||
self.assign(elem_size, Rvalue::NullaryOp(NullOp::SizeOf, ety)),
|
||||
self.assign(len, Rvalue::Len(self.place.clone())),
|
||||
],
|
||||
is_cleanup: self.unwind.is_cleanup(),
|
||||
terminator: Some(Terminator {
|
||||
source_info: self.source_info,
|
||||
kind: TerminatorKind::if_(
|
||||
tcx,
|
||||
move_(size_is_zero),
|
||||
self.drop_loop_pair(ety, false),
|
||||
self.drop_loop_pair(ety, true)
|
||||
)
|
||||
kind: TerminatorKind::SwitchInt {
|
||||
discr: move_(elem_size),
|
||||
switch_ty: tcx.types.usize,
|
||||
values: From::from(USIZE_SWITCH_ZERO),
|
||||
targets: vec![
|
||||
self.drop_loop_pair(ety, false, len.clone()),
|
||||
self.drop_loop_pair(ety, true, len.clone()),
|
||||
],
|
||||
},
|
||||
})
|
||||
};
|
||||
self.elaborator.patch().new_block(base_block)
|
||||
}
|
||||
|
||||
// create a pair of drop-loops of `place`, which drops its contents
|
||||
// even in the case of 1 panic. If `ptr_based`, create a pointer loop,
|
||||
// otherwise create an index loop.
|
||||
fn drop_loop_pair(&mut self, ety: Ty<'tcx>, ptr_based: bool) -> BasicBlock {
|
||||
/// Ceates a pair of drop-loops of `place`, which drops its contents, even
|
||||
/// in the case of 1 panic. If `ptr_based`, creates a pointer loop,
|
||||
/// otherwise create an index loop.
|
||||
fn drop_loop_pair(
|
||||
&mut self,
|
||||
ety: Ty<'tcx>,
|
||||
ptr_based: bool,
|
||||
length: Place<'tcx>,
|
||||
) -> BasicBlock {
|
||||
debug!("drop_loop_pair({:?}, {:?})", ety, ptr_based);
|
||||
let tcx = self.tcx();
|
||||
let iter_ty = if ptr_based {
|
||||
@ -703,7 +712,6 @@ impl<'l, 'b, 'tcx, D> DropCtxt<'l, 'b, 'tcx, D>
|
||||
};
|
||||
|
||||
let cur = self.new_temp(iter_ty);
|
||||
let length = Place::Base(PlaceBase::Local(self.new_temp(tcx.types.usize)));
|
||||
let length_or_end = if ptr_based {
|
||||
// FIXME check if we want to make it return a `Place` directly
|
||||
// if all use sites want a `Place::Base` anyway.
|
||||
@ -722,9 +730,8 @@ impl<'l, 'b, 'tcx, D> DropCtxt<'l, 'b, 'tcx, D>
|
||||
ptr_based)
|
||||
});
|
||||
|
||||
let succ = self.succ; // FIXME(#43234)
|
||||
let loop_block = self.drop_loop(
|
||||
succ,
|
||||
self.succ,
|
||||
cur,
|
||||
&length_or_end,
|
||||
ety,
|
||||
@ -732,31 +739,32 @@ impl<'l, 'b, 'tcx, D> DropCtxt<'l, 'b, 'tcx, D>
|
||||
ptr_based);
|
||||
|
||||
let cur = Place::Base(PlaceBase::Local(cur));
|
||||
let zero = self.constant_usize(0);
|
||||
let mut drop_block_stmts = vec![];
|
||||
drop_block_stmts.push(self.assign(&length, Rvalue::Len(self.place.clone())));
|
||||
if ptr_based {
|
||||
let drop_block_stmts = if ptr_based {
|
||||
let tmp_ty = tcx.mk_mut_ptr(self.place_ty(self.place));
|
||||
let tmp = Place::Base(PlaceBase::Local(self.new_temp(tmp_ty)));
|
||||
// tmp = &mut P;
|
||||
// cur = tmp as *mut T;
|
||||
// end = Offset(cur, len);
|
||||
drop_block_stmts.push(self.assign(&tmp, Rvalue::Ref(
|
||||
tcx.lifetimes.re_erased,
|
||||
BorrowKind::Mut { allow_two_phase_borrow: false },
|
||||
self.place.clone()
|
||||
)));
|
||||
drop_block_stmts.push(self.assign(&cur, Rvalue::Cast(
|
||||
CastKind::Misc, Operand::Move(tmp), iter_ty
|
||||
)));
|
||||
drop_block_stmts.push(self.assign(&length_or_end,
|
||||
Rvalue::BinaryOp(BinOp::Offset,
|
||||
Operand::Copy(cur), Operand::Move(length)
|
||||
)));
|
||||
vec![
|
||||
self.assign(&tmp, Rvalue::Ref(
|
||||
tcx.lifetimes.re_erased,
|
||||
BorrowKind::Mut { allow_two_phase_borrow: false },
|
||||
self.place.clone()
|
||||
)),
|
||||
self.assign(
|
||||
&cur,
|
||||
Rvalue::Cast(CastKind::Misc, Operand::Move(tmp), iter_ty),
|
||||
),
|
||||
self.assign(
|
||||
&length_or_end,
|
||||
Rvalue::BinaryOp(BinOp::Offset, Operand::Copy(cur), Operand::Move(length)
|
||||
)),
|
||||
]
|
||||
} else {
|
||||
// index = 0 (length already pushed)
|
||||
drop_block_stmts.push(self.assign(&cur, Rvalue::Use(zero)));
|
||||
}
|
||||
// cur = 0 (length already pushed)
|
||||
let zero = self.constant_usize(0);
|
||||
vec![self.assign(&cur, Rvalue::Use(zero))]
|
||||
};
|
||||
let drop_block = self.elaborator.patch().new_block(BasicBlockData {
|
||||
statements: drop_block_stmts,
|
||||
is_cleanup: unwind.is_cleanup(),
|
||||
@ -768,7 +776,7 @@ impl<'l, 'b, 'tcx, D> DropCtxt<'l, 'b, 'tcx, D>
|
||||
|
||||
// FIXME(#34708): handle partially-dropped array/slice elements.
|
||||
let reset_block = self.drop_flag_reset_block(DropFlagMode::Deep, drop_block, unwind);
|
||||
self.drop_flag_test_block(reset_block, succ, unwind)
|
||||
self.drop_flag_test_block(reset_block, self.succ, unwind)
|
||||
}
|
||||
|
||||
/// The slow-path - create an "open", elaborated drop for a type
|
||||
|
@ -4168,9 +4168,25 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> {
|
||||
oprnd_t = self.make_overloaded_place_return_type(method).ty;
|
||||
self.write_method_call(expr.hir_id, method);
|
||||
} else {
|
||||
type_error_struct!(tcx.sess, expr.span, oprnd_t, E0614,
|
||||
"type `{}` cannot be dereferenced",
|
||||
oprnd_t).emit();
|
||||
let mut err = type_error_struct!(
|
||||
tcx.sess,
|
||||
expr.span,
|
||||
oprnd_t,
|
||||
E0614,
|
||||
"type `{}` cannot be dereferenced",
|
||||
oprnd_t,
|
||||
);
|
||||
let sp = tcx.sess.source_map().start_point(expr.span);
|
||||
if let Some(sp) = tcx.sess.parse_sess.ambiguous_block_expr_parse
|
||||
.borrow().get(&sp)
|
||||
{
|
||||
tcx.sess.parse_sess.expr_parentheses_needed(
|
||||
&mut err,
|
||||
*sp,
|
||||
None,
|
||||
);
|
||||
}
|
||||
err.emit();
|
||||
oprnd_t = tcx.types.err;
|
||||
}
|
||||
}
|
||||
|
@ -72,7 +72,7 @@ use core::convert::TryInto;
|
||||
/// }
|
||||
/// ```
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
#[derive(Clone, Debug)]
|
||||
#[derive(Clone, Debug, Default)]
|
||||
pub struct Cursor<T> {
|
||||
inner: T,
|
||||
pos: u64,
|
||||
|
@ -357,29 +357,6 @@ macro_rules! dbg {
|
||||
};
|
||||
}
|
||||
|
||||
/// Awaits the completion of an async call.
|
||||
#[macro_export]
|
||||
#[unstable(feature = "await_macro", issue = "50547")]
|
||||
#[allow_internal_unstable(gen_future, generators)]
|
||||
#[allow_internal_unsafe]
|
||||
macro_rules! r#await {
|
||||
($e:expr) => { {
|
||||
let mut pinned = $e;
|
||||
loop {
|
||||
if let $crate::task::Poll::Ready(x) =
|
||||
$crate::future::poll_with_tls_context(unsafe {
|
||||
$crate::pin::Pin::new_unchecked(&mut pinned)
|
||||
})
|
||||
{
|
||||
break x;
|
||||
}
|
||||
// FIXME(cramertj) prior to stabilizing await, we have to ensure that this
|
||||
// can't be used to create a generator on stable via `|| await!()`.
|
||||
yield
|
||||
}
|
||||
} }
|
||||
}
|
||||
|
||||
/// Selects the first successful receive event from a number of receivers.
|
||||
///
|
||||
/// This macro is used to wait for the first event to occur on a number of
|
||||
|
@ -554,7 +554,10 @@ fn inner_parse_loop<'root, 'tt>(
|
||||
match item.top_elts.get_tt(idx) {
|
||||
// Need to descend into a sequence
|
||||
TokenTree::Sequence(sp, seq) => {
|
||||
// Examine the case where there are 0 matches of this sequence
|
||||
// Examine the case where there are 0 matches of this sequence. We are
|
||||
// implicitly disallowing OneOrMore from having 0 matches here. Thus, that will
|
||||
// result in a "no rules expected token" error by virtue of this matcher not
|
||||
// working.
|
||||
if seq.op == quoted::KleeneOp::ZeroOrMore
|
||||
|| seq.op == quoted::KleeneOp::ZeroOrOne
|
||||
{
|
||||
|
@ -151,7 +151,7 @@ fn generic_extension<'cx>(cx: &'cx mut ExtCtxt<'_>,
|
||||
|
||||
let rhs_spans = rhs.iter().map(|t| t.span()).collect::<Vec<_>>();
|
||||
// rhs has holes ( `$id` and `$(...)` that need filled)
|
||||
let mut tts = transcribe(cx, Some(named_matches), rhs);
|
||||
let mut tts = transcribe(cx, &named_matches, rhs);
|
||||
|
||||
// Replace all the tokens for the corresponding positions in the macro, to maintain
|
||||
// proper positions in error reporting, while maintaining the macro_backtrace.
|
||||
|
@ -73,6 +73,7 @@ pub enum KleeneOp {
|
||||
ZeroOrMore,
|
||||
/// Kleene plus (`+`) for one or more repetitions
|
||||
OneOrMore,
|
||||
/// Kleene optional (`?`) for zero or one reptitions
|
||||
ZeroOrOne,
|
||||
}
|
||||
|
||||
|
@ -1,10 +1,10 @@
|
||||
use crate::ast::Ident;
|
||||
use crate::ext::base::ExtCtxt;
|
||||
use crate::ext::expand::Marker;
|
||||
use crate::ext::tt::macro_parser::{NamedMatch, MatchedSeq, MatchedNonterminal};
|
||||
use crate::ext::tt::macro_parser::{MatchedNonterminal, MatchedSeq, NamedMatch};
|
||||
use crate::ext::tt::quoted;
|
||||
use crate::mut_visit::noop_visit_tt;
|
||||
use crate::parse::token::{self, Token, NtTT};
|
||||
use crate::parse::token::{self, NtTT, Token};
|
||||
use crate::tokenstream::{DelimSpan, TokenStream, TokenTree, TreeAndJoint};
|
||||
|
||||
use smallvec::{smallvec, SmallVec};
|
||||
@ -13,24 +13,16 @@ use syntax_pos::DUMMY_SP;
|
||||
use rustc_data_structures::fx::FxHashMap;
|
||||
use rustc_data_structures::sync::Lrc;
|
||||
use std::mem;
|
||||
use std::ops::Add;
|
||||
use std::rc::Rc;
|
||||
|
||||
// An iterator over the token trees in a delimited token tree (`{ ... }`) or a sequence (`$(...)`).
|
||||
/// An iterator over the token trees in a delimited token tree (`{ ... }`) or a sequence (`$(...)`).
|
||||
enum Frame {
|
||||
Delimited {
|
||||
forest: Lrc<quoted::Delimited>,
|
||||
idx: usize,
|
||||
span: DelimSpan,
|
||||
},
|
||||
Sequence {
|
||||
forest: Lrc<quoted::SequenceRepetition>,
|
||||
idx: usize,
|
||||
sep: Option<Token>,
|
||||
},
|
||||
Delimited { forest: Lrc<quoted::Delimited>, idx: usize, span: DelimSpan },
|
||||
Sequence { forest: Lrc<quoted::SequenceRepetition>, idx: usize, sep: Option<Token> },
|
||||
}
|
||||
|
||||
impl Frame {
|
||||
/// Construct a new frame around the delimited set of tokens.
|
||||
fn new(tts: Vec<quoted::TokenTree>) -> Frame {
|
||||
let forest = Lrc::new(quoted::Delimited { delim: token::NoDelim, tts: tts });
|
||||
Frame::Delimited { forest: forest, idx: 0, span: DelimSpan::dummy() }
|
||||
@ -54,84 +46,161 @@ impl Iterator for Frame {
|
||||
}
|
||||
}
|
||||
|
||||
/// This can do Macro-By-Example transcription. On the other hand, if
|
||||
/// `src` contains no `TokenTree::{Sequence, MetaVar, MetaVarDecl}`s, `interp` can
|
||||
/// (and should) be None.
|
||||
pub fn transcribe(cx: &ExtCtxt<'_>,
|
||||
interp: Option<FxHashMap<Ident, Rc<NamedMatch>>>,
|
||||
src: Vec<quoted::TokenTree>)
|
||||
-> TokenStream {
|
||||
/// This can do Macro-By-Example transcription.
|
||||
/// - `interp` is a map of meta-variables to the tokens (non-terminals) they matched in the
|
||||
/// invocation. We are assuming we already know there is a match.
|
||||
/// - `src` is the RHS of the MBE, that is, the "example" we are filling in.
|
||||
///
|
||||
/// For example,
|
||||
///
|
||||
/// ```rust
|
||||
/// macro_rules! foo {
|
||||
/// ($id:ident) => { println!("{}", stringify!($id)); }
|
||||
/// }
|
||||
///
|
||||
/// foo!(bar);
|
||||
/// ```
|
||||
///
|
||||
/// `interp` would contain `$id => bar` and `src` would contain `println!("{}", stringify!($id));`.
|
||||
///
|
||||
/// `transcribe` would return a `TokenStream` containing `println!("{}", stringify!(bar));`.
|
||||
///
|
||||
/// Along the way, we do some additional error checking.
|
||||
pub fn transcribe(
|
||||
cx: &ExtCtxt<'_>,
|
||||
interp: &FxHashMap<Ident, Rc<NamedMatch>>,
|
||||
src: Vec<quoted::TokenTree>,
|
||||
) -> TokenStream {
|
||||
// Nothing for us to transcribe...
|
||||
if src.is_empty() {
|
||||
return TokenStream::empty();
|
||||
}
|
||||
|
||||
// We descend into the RHS (`src`), expanding things as we go. This stack contains the things
|
||||
// we have yet to expand/are still expanding. We start the stack off with the whole RHS.
|
||||
let mut stack: SmallVec<[Frame; 1]> = smallvec![Frame::new(src)];
|
||||
let interpolations = interp.unwrap_or_else(FxHashMap::default); /* just a convenience */
|
||||
|
||||
// As we descend in the RHS, we will need to be able to match nested sequences of matchers.
|
||||
// `repeats` keeps track of where we are in matching at each level, with the last element being
|
||||
// the most deeply nested sequence. This is used as a stack.
|
||||
let mut repeats = Vec::new();
|
||||
|
||||
// `result` contains resulting token stream from the TokenTree we just finished processing. At
|
||||
// the end, this will contain the full result of transcription, but at arbitrary points during
|
||||
// `transcribe`, `result` will contain subsets of the final result.
|
||||
//
|
||||
// Specifically, as we descend into each TokenTree, we will push the existing results onto the
|
||||
// `result_stack` and clear `results`. We will then produce the results of transcribing the
|
||||
// TokenTree into `results`. Then, as we unwind back out of the `TokenTree`, we will pop the
|
||||
// `result_stack` and append `results` too it to produce the new `results` up to that point.
|
||||
//
|
||||
// Thus, if we try to pop the `result_stack` and it is empty, we have reached the top-level
|
||||
// again, and we are done transcribing.
|
||||
let mut result: Vec<TreeAndJoint> = Vec::new();
|
||||
let mut result_stack = Vec::new();
|
||||
|
||||
loop {
|
||||
// Look at the last frame on the stack.
|
||||
let tree = if let Some(tree) = stack.last_mut().unwrap().next() {
|
||||
// If it still has a TokenTree we have not looked at yet, use that tree.
|
||||
tree
|
||||
} else {
|
||||
}
|
||||
// The else-case never produces a value for `tree` (it `continue`s or `return`s).
|
||||
else {
|
||||
// Otherwise, if we have just reached the end of a sequence and we can keep repeating,
|
||||
// go back to the beginning of the sequence.
|
||||
if let Frame::Sequence { ref mut idx, ref sep, .. } = *stack.last_mut().unwrap() {
|
||||
let (ref mut repeat_idx, repeat_len) = *repeats.last_mut().unwrap();
|
||||
*repeat_idx += 1;
|
||||
if *repeat_idx < repeat_len {
|
||||
*idx = 0;
|
||||
if let Some(sep) = sep.clone() {
|
||||
// repeat same span, I guess
|
||||
let prev_span = match result.last() {
|
||||
Some((tt, _)) => tt.span(),
|
||||
None => DUMMY_SP,
|
||||
};
|
||||
result.push(TokenTree::Token(prev_span, sep).into());
|
||||
}
|
||||
continue
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
// We are done with the top of the stack. Pop it. Depending on what it was, we do
|
||||
// different things. Note that the outermost item must be the delimited, wrapped RHS
|
||||
// that was passed in originally to `transcribe`.
|
||||
match stack.pop().unwrap() {
|
||||
// Done with a sequence. Pop from repeats.
|
||||
Frame::Sequence { .. } => {
|
||||
repeats.pop();
|
||||
}
|
||||
|
||||
// We are done processing a Delimited. If this is the top-level delimited, we are
|
||||
// done. Otherwise, we unwind the result_stack to append what we have produced to
|
||||
// any previous results.
|
||||
Frame::Delimited { forest, span, .. } => {
|
||||
if result_stack.is_empty() {
|
||||
// No results left to compute! We are back at the top-level.
|
||||
return TokenStream::new(result);
|
||||
}
|
||||
let tree = TokenTree::Delimited(
|
||||
span,
|
||||
forest.delim,
|
||||
TokenStream::new(result).into(),
|
||||
);
|
||||
|
||||
// Step back into the parent Delimited.
|
||||
let tree =
|
||||
TokenTree::Delimited(span, forest.delim, TokenStream::new(result).into());
|
||||
result = result_stack.pop().unwrap();
|
||||
result.push(tree.into());
|
||||
}
|
||||
}
|
||||
continue
|
||||
continue;
|
||||
};
|
||||
|
||||
// At this point, we know we are in the middle of a TokenTree (the last one on `stack`).
|
||||
// `tree` contains the next `TokenTree` to be processed.
|
||||
match tree {
|
||||
quoted::TokenTree::Sequence(sp, seq) => {
|
||||
// FIXME(pcwalton): Bad copy.
|
||||
match lockstep_iter_size("ed::TokenTree::Sequence(sp, seq.clone()),
|
||||
&interpolations,
|
||||
&repeats) {
|
||||
// We are descending into a sequence. We first make sure that the matchers in the RHS
|
||||
// and the matches in `interp` have the same shape. Otherwise, either the caller or the
|
||||
// macro writer has made a mistake.
|
||||
seq @ quoted::TokenTree::Sequence(..) => {
|
||||
match lockstep_iter_size(&seq, interp, &repeats) {
|
||||
LockstepIterSize::Unconstrained => {
|
||||
cx.span_fatal(sp.entire(), /* blame macro writer */
|
||||
"attempted to repeat an expression \
|
||||
containing no syntax \
|
||||
variables matched as repeating at this depth");
|
||||
cx.span_fatal(
|
||||
seq.span(), /* blame macro writer */
|
||||
"attempted to repeat an expression containing no syntax variables \
|
||||
matched as repeating at this depth",
|
||||
);
|
||||
}
|
||||
|
||||
LockstepIterSize::Contradiction(ref msg) => {
|
||||
// FIXME: this should be impossible. I (mark-i-m) believe it would
|
||||
// represent a bug in the macro_parser.
|
||||
// FIXME #2887 blame macro invoker instead
|
||||
cx.span_fatal(sp.entire(), &msg[..]);
|
||||
cx.span_fatal(seq.span(), &msg[..]);
|
||||
}
|
||||
|
||||
LockstepIterSize::Constraint(len, _) => {
|
||||
// We do this to avoid an extra clone above. We know that this is a
|
||||
// sequence already.
|
||||
let (sp, seq) = if let quoted::TokenTree::Sequence(sp, seq) = seq {
|
||||
(sp, seq)
|
||||
} else {
|
||||
unreachable!()
|
||||
};
|
||||
|
||||
// Is the repetition empty?
|
||||
if len == 0 {
|
||||
if seq.op == quoted::KleeneOp::OneOrMore {
|
||||
// FIXME: this should be impossible because we check for this in
|
||||
// macro_parser.rs
|
||||
// FIXME #2887 blame invoker
|
||||
cx.span_fatal(sp.entire(), "this must repeat at least once");
|
||||
}
|
||||
} else {
|
||||
// 0 is the initial counter (we have done 0 repretitions so far). `len`
|
||||
// is the total number of reptitions we should generate.
|
||||
repeats.push((0, len));
|
||||
|
||||
// The first time we encounter the sequence we push it to the stack. It
|
||||
// then gets reused (see the beginning of the loop) until we are done
|
||||
// repeating.
|
||||
stack.push(Frame::Sequence {
|
||||
idx: 0,
|
||||
sep: seq.separator.clone(),
|
||||
@ -141,10 +210,16 @@ pub fn transcribe(cx: &ExtCtxt<'_>,
|
||||
}
|
||||
}
|
||||
}
|
||||
// FIXME #2887: think about span stuff here
|
||||
|
||||
// Replace the meta-var with the matched token tree from the invocation.
|
||||
quoted::TokenTree::MetaVar(mut sp, ident) => {
|
||||
if let Some(cur_matched) = lookup_cur_matched(ident, &interpolations, &repeats) {
|
||||
// Find the matched nonterminal from the macro invocation, and use it to replace
|
||||
// the meta-var.
|
||||
if let Some(cur_matched) = lookup_cur_matched(ident, interp, &repeats) {
|
||||
if let MatchedNonterminal(ref nt) = *cur_matched {
|
||||
// FIXME #2887: why do we apply a mark when matching a token tree meta-var
|
||||
// (e.g. `$x:tt`), but not when we are matching any other type of token
|
||||
// tree?
|
||||
if let NtTT(ref tt) = **nt {
|
||||
result.push(tt.clone().into());
|
||||
} else {
|
||||
@ -153,10 +228,15 @@ pub fn transcribe(cx: &ExtCtxt<'_>,
|
||||
result.push(token.into());
|
||||
}
|
||||
} else {
|
||||
cx.span_fatal(sp, /* blame the macro writer */
|
||||
&format!("variable '{}' is still repeating at this depth", ident));
|
||||
// We were unable to descend far enough. This is an error.
|
||||
cx.span_fatal(
|
||||
sp, /* blame the macro writer */
|
||||
&format!("variable '{}' is still repeating at this depth", ident),
|
||||
);
|
||||
}
|
||||
} else {
|
||||
// If we aren't able to match the meta-var, we push it back into the result but
|
||||
// with modified syntax context. (I believe this supports nested macros).
|
||||
let ident =
|
||||
Ident::new(ident.name, ident.span.apply_mark(cx.current_expansion.mark));
|
||||
sp = sp.apply_mark(cx.current_expansion.mark);
|
||||
@ -164,26 +244,44 @@ pub fn transcribe(cx: &ExtCtxt<'_>,
|
||||
result.push(TokenTree::Token(sp, token::Token::from_ast_ident(ident)).into());
|
||||
}
|
||||
}
|
||||
|
||||
// If we are entering a new delimiter, we push its contents to the `stack` to be
|
||||
// processed, and we push all of the currently produced results to the `result_stack`.
|
||||
// We will produce all of the results of the inside of the `Delimited` and then we will
|
||||
// jump back out of the Delimited, pop the result_stack and add the new results back to
|
||||
// the previous results (from outside the Delimited).
|
||||
quoted::TokenTree::Delimited(mut span, delimited) => {
|
||||
span = span.apply_mark(cx.current_expansion.mark);
|
||||
stack.push(Frame::Delimited { forest: delimited, idx: 0, span: span });
|
||||
result_stack.push(mem::replace(&mut result, Vec::new()));
|
||||
}
|
||||
|
||||
// Nothing much to do here. Just push the token to the result, being careful to
|
||||
// preserve syntax context.
|
||||
quoted::TokenTree::Token(sp, tok) => {
|
||||
let mut marker = Marker(cx.current_expansion.mark);
|
||||
let mut tt = TokenTree::Token(sp, tok);
|
||||
noop_visit_tt(&mut tt, &mut marker);
|
||||
result.push(tt.into());
|
||||
}
|
||||
|
||||
// There should be no meta-var declarations in the invocation of a macro.
|
||||
quoted::TokenTree::MetaVarDecl(..) => panic!("unexpected `TokenTree::MetaVarDecl"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn lookup_cur_matched(ident: Ident,
|
||||
interpolations: &FxHashMap<Ident, Rc<NamedMatch>>,
|
||||
repeats: &[(usize, usize)])
|
||||
-> Option<Rc<NamedMatch>> {
|
||||
/// Lookup the meta-var named `ident` and return the matched token tree from the invocation using
|
||||
/// the set of matches `interpolations`.
|
||||
///
|
||||
/// See the definition of `repeats` in the `transcribe` function. `repeats` is used to descend
|
||||
/// into the right place in nested matchers. If we attempt to descend too far, the macro writer has
|
||||
/// made a mistake, and we return `None`.
|
||||
fn lookup_cur_matched(
|
||||
ident: Ident,
|
||||
interpolations: &FxHashMap<Ident, Rc<NamedMatch>>,
|
||||
repeats: &[(usize, usize)],
|
||||
) -> Option<Rc<NamedMatch>> {
|
||||
interpolations.get(&ident).map(|matched| {
|
||||
let mut matched = matched.clone();
|
||||
for &(idx, _) in repeats {
|
||||
@ -198,17 +296,30 @@ fn lookup_cur_matched(ident: Ident,
|
||||
})
|
||||
}
|
||||
|
||||
/// An accumulator over a TokenTree to be used with `fold`. During transcription, we need to make
|
||||
/// sure that the size of each sequence and all of its nested sequences are the same as the sizes
|
||||
/// of all the matched (nested) sequences in the macro invocation. If they don't match, somebody
|
||||
/// has made a mistake (either the macro writer or caller).
|
||||
#[derive(Clone)]
|
||||
enum LockstepIterSize {
|
||||
/// No constraints on length of matcher. This is true for any TokenTree variants except a
|
||||
/// `MetaVar` with an actual `MatchedSeq` (as opposed to a `MatchedNonterminal`).
|
||||
Unconstrained,
|
||||
|
||||
/// A `MetaVar` with an actual `MatchedSeq`. The length of the match and the name of the
|
||||
/// meta-var are returned.
|
||||
Constraint(usize, Ident),
|
||||
|
||||
/// Two `Constraint`s on the same sequence had different lengths. This is an error.
|
||||
Contradiction(String),
|
||||
}
|
||||
|
||||
impl Add for LockstepIterSize {
|
||||
type Output = LockstepIterSize;
|
||||
|
||||
fn add(self, other: LockstepIterSize) -> LockstepIterSize {
|
||||
impl LockstepIterSize {
|
||||
/// Find incompatibilities in matcher/invocation sizes.
|
||||
/// - `Unconstrained` is compatible with everything.
|
||||
/// - `Contradiction` is incompatible with everything.
|
||||
/// - `Constraint(len)` is only compatible with other constraints of the same length.
|
||||
fn with(self, other: LockstepIterSize) -> LockstepIterSize {
|
||||
match self {
|
||||
LockstepIterSize::Unconstrained => other,
|
||||
LockstepIterSize::Contradiction(_) => self,
|
||||
@ -217,9 +328,11 @@ impl Add for LockstepIterSize {
|
||||
LockstepIterSize::Contradiction(_) => other,
|
||||
LockstepIterSize::Constraint(r_len, _) if l_len == r_len => self,
|
||||
LockstepIterSize::Constraint(r_len, r_id) => {
|
||||
let msg = format!("inconsistent lockstep iteration: \
|
||||
'{}' has {} items, but '{}' has {}",
|
||||
l_id, l_len, r_id, r_len);
|
||||
let msg = format!(
|
||||
"inconsistent lockstep iteration: \
|
||||
'{}' has {} items, but '{}' has {}",
|
||||
l_id, l_len, r_id, r_len
|
||||
);
|
||||
LockstepIterSize::Contradiction(msg)
|
||||
}
|
||||
},
|
||||
@ -227,30 +340,38 @@ impl Add for LockstepIterSize {
|
||||
}
|
||||
}
|
||||
|
||||
fn lockstep_iter_size(tree: "ed::TokenTree,
|
||||
interpolations: &FxHashMap<Ident, Rc<NamedMatch>>,
|
||||
repeats: &[(usize, usize)])
|
||||
-> LockstepIterSize {
|
||||
/// Given a `tree`, make sure that all sequences have the same length as the matches for the
|
||||
/// appropriate meta-vars in `interpolations`.
|
||||
///
|
||||
/// Note that if `repeats` does not match the exact correct depth of a meta-var,
|
||||
/// `lookup_cur_matched` will return `None`, which is why this still works even in the presnece of
|
||||
/// multiple nested matcher sequences.
|
||||
fn lockstep_iter_size(
|
||||
tree: "ed::TokenTree,
|
||||
interpolations: &FxHashMap<Ident, Rc<NamedMatch>>,
|
||||
repeats: &[(usize, usize)],
|
||||
) -> LockstepIterSize {
|
||||
use quoted::TokenTree;
|
||||
match *tree {
|
||||
TokenTree::Delimited(_, ref delimed) => {
|
||||
delimed.tts.iter().fold(LockstepIterSize::Unconstrained, |size, tt| {
|
||||
size + lockstep_iter_size(tt, interpolations, repeats)
|
||||
size.with(lockstep_iter_size(tt, interpolations, repeats))
|
||||
})
|
||||
},
|
||||
}
|
||||
TokenTree::Sequence(_, ref seq) => {
|
||||
seq.tts.iter().fold(LockstepIterSize::Unconstrained, |size, tt| {
|
||||
size + lockstep_iter_size(tt, interpolations, repeats)
|
||||
size.with(lockstep_iter_size(tt, interpolations, repeats))
|
||||
})
|
||||
},
|
||||
TokenTree::MetaVar(_, name) | TokenTree::MetaVarDecl(_, name, _) =>
|
||||
}
|
||||
TokenTree::MetaVar(_, name) | TokenTree::MetaVarDecl(_, name, _) => {
|
||||
match lookup_cur_matched(name, interpolations, repeats) {
|
||||
Some(matched) => match *matched {
|
||||
MatchedNonterminal(_) => LockstepIterSize::Unconstrained,
|
||||
MatchedSeq(ref ads, _) => LockstepIterSize::Constraint(ads.len(), name),
|
||||
},
|
||||
_ => LockstepIterSize::Unconstrained
|
||||
},
|
||||
_ => LockstepIterSize::Unconstrained,
|
||||
}
|
||||
}
|
||||
TokenTree::Token(..) => LockstepIterSize::Unconstrained,
|
||||
}
|
||||
}
|
||||
|
@ -1598,7 +1598,7 @@ mod tests {
|
||||
use std::io;
|
||||
use std::path::PathBuf;
|
||||
use syntax_pos::{BytePos, Span, NO_EXPANSION};
|
||||
use rustc_data_structures::fx::FxHashSet;
|
||||
use rustc_data_structures::fx::{FxHashSet, FxHashMap};
|
||||
use rustc_data_structures::sync::Lock;
|
||||
|
||||
fn mk_sess(sm: Lrc<SourceMap>) -> ParseSess {
|
||||
@ -1617,6 +1617,7 @@ mod tests {
|
||||
raw_identifier_spans: Lock::new(Vec::new()),
|
||||
registered_diagnostics: Lock::new(ErrorMap::new()),
|
||||
buffered_lints: Lock::new(vec![]),
|
||||
ambiguous_block_expr_parse: Lock::new(FxHashMap::default()),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -11,12 +11,12 @@ use crate::tokenstream::{TokenStream, TokenTree};
|
||||
use crate::diagnostics::plugin::ErrorMap;
|
||||
use crate::print::pprust::token_to_string;
|
||||
|
||||
use errors::{FatalError, Level, Handler, ColorConfig, Diagnostic, DiagnosticBuilder};
|
||||
use errors::{Applicability, FatalError, Level, Handler, ColorConfig, Diagnostic, DiagnosticBuilder};
|
||||
use rustc_data_structures::sync::{Lrc, Lock};
|
||||
use syntax_pos::{Span, SourceFile, FileName, MultiSpan};
|
||||
use log::debug;
|
||||
|
||||
use rustc_data_structures::fx::FxHashSet;
|
||||
use rustc_data_structures::fx::{FxHashSet, FxHashMap};
|
||||
use std::borrow::Cow;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::str;
|
||||
@ -52,6 +52,10 @@ pub struct ParseSess {
|
||||
included_mod_stack: Lock<Vec<PathBuf>>,
|
||||
source_map: Lrc<SourceMap>,
|
||||
pub buffered_lints: Lock<Vec<BufferedEarlyLint>>,
|
||||
/// Contains the spans of block expressions that could have been incomplete based on the
|
||||
/// operation token that followed it, but that the parser cannot identify without further
|
||||
/// analysis.
|
||||
pub ambiguous_block_expr_parse: Lock<FxHashMap<Span, Span>>,
|
||||
}
|
||||
|
||||
impl ParseSess {
|
||||
@ -75,6 +79,7 @@ impl ParseSess {
|
||||
included_mod_stack: Lock::new(vec![]),
|
||||
source_map,
|
||||
buffered_lints: Lock::new(vec![]),
|
||||
ambiguous_block_expr_parse: Lock::new(FxHashMap::default()),
|
||||
}
|
||||
}
|
||||
|
||||
@ -98,6 +103,24 @@ impl ParseSess {
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/// Extend an error with a suggestion to wrap an expression with parentheses to allow the
|
||||
/// parser to continue parsing the following operation as part of the same expression.
|
||||
pub fn expr_parentheses_needed(
|
||||
&self,
|
||||
err: &mut DiagnosticBuilder<'_>,
|
||||
span: Span,
|
||||
alt_snippet: Option<String>,
|
||||
) {
|
||||
if let Some(snippet) = self.source_map().span_to_snippet(span).ok().or(alt_snippet) {
|
||||
err.span_suggestion(
|
||||
span,
|
||||
"parentheses are required to parse this as an expression",
|
||||
format!("({})", snippet),
|
||||
Applicability::MachineApplicable,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
|
@ -50,7 +50,10 @@ use crate::symbol::{Symbol, keywords};
|
||||
|
||||
use errors::{Applicability, DiagnosticBuilder, DiagnosticId, FatalError};
|
||||
use rustc_target::spec::abi::{self, Abi};
|
||||
use syntax_pos::{Span, MultiSpan, BytePos, FileName};
|
||||
use syntax_pos::{
|
||||
Span, MultiSpan, BytePos, FileName,
|
||||
hygiene::CompilerDesugaringKind,
|
||||
};
|
||||
use log::{debug, trace};
|
||||
|
||||
use std::borrow::Cow;
|
||||
@ -186,6 +189,7 @@ enum PrevTokenKind {
|
||||
Interpolated,
|
||||
Eof,
|
||||
Ident,
|
||||
BitOr,
|
||||
Other,
|
||||
}
|
||||
|
||||
@ -1375,6 +1379,7 @@ impl<'a> Parser<'a> {
|
||||
token::DocComment(..) => PrevTokenKind::DocComment,
|
||||
token::Comma => PrevTokenKind::Comma,
|
||||
token::BinOp(token::Plus) => PrevTokenKind::Plus,
|
||||
token::BinOp(token::Or) => PrevTokenKind::BitOr,
|
||||
token::Interpolated(..) => PrevTokenKind::Interpolated,
|
||||
token::Eof => PrevTokenKind::Eof,
|
||||
token::Ident(..) => PrevTokenKind::Ident,
|
||||
@ -2806,6 +2811,12 @@ impl<'a> Parser<'a> {
|
||||
let msg = format!("expected expression, found {}",
|
||||
self.this_token_descr());
|
||||
let mut err = self.fatal(&msg);
|
||||
let sp = self.sess.source_map().start_point(self.span);
|
||||
if let Some(sp) = self.sess.ambiguous_block_expr_parse.borrow()
|
||||
.get(&sp)
|
||||
{
|
||||
self.sess.expr_parentheses_needed(&mut err, *sp, None);
|
||||
}
|
||||
err.span_label(self.span, "expected expression");
|
||||
return Err(err);
|
||||
}
|
||||
@ -2845,7 +2856,7 @@ impl<'a> Parser<'a> {
|
||||
"struct literals are not allowed here",
|
||||
);
|
||||
err.multipart_suggestion(
|
||||
"surround the struct literal with parenthesis",
|
||||
"surround the struct literal with parentheses",
|
||||
vec![
|
||||
(lo.shrink_to_lo(), "(".to_string()),
|
||||
(expr.span.shrink_to_hi(), ")".to_string()),
|
||||
@ -3506,9 +3517,42 @@ impl<'a> Parser<'a> {
|
||||
}
|
||||
};
|
||||
|
||||
if self.expr_is_complete(&lhs) {
|
||||
// Semi-statement forms are odd. See https://github.com/rust-lang/rust/issues/29071
|
||||
return Ok(lhs);
|
||||
match (self.expr_is_complete(&lhs), AssocOp::from_token(&self.token)) {
|
||||
(true, None) => {
|
||||
// Semi-statement forms are odd. See https://github.com/rust-lang/rust/issues/29071
|
||||
return Ok(lhs);
|
||||
}
|
||||
(false, _) => {} // continue parsing the expression
|
||||
// An exhaustive check is done in the following block, but these are checked first
|
||||
// because they *are* ambiguous but also reasonable looking incorrect syntax, so we
|
||||
// want to keep their span info to improve diagnostics in these cases in a later stage.
|
||||
(true, Some(AssocOp::Multiply)) | // `{ 42 } *foo = bar;` or `{ 42 } * 3`
|
||||
(true, Some(AssocOp::Subtract)) | // `{ 42 } -5`
|
||||
(true, Some(AssocOp::Add)) => { // `{ 42 } + 42
|
||||
// These cases are ambiguous and can't be identified in the parser alone
|
||||
let sp = self.sess.source_map().start_point(self.span);
|
||||
self.sess.ambiguous_block_expr_parse.borrow_mut().insert(sp, lhs.span);
|
||||
return Ok(lhs);
|
||||
}
|
||||
(true, Some(ref op)) if !op.can_continue_expr_unambiguously() => {
|
||||
return Ok(lhs);
|
||||
}
|
||||
(true, Some(_)) => {
|
||||
// We've found an expression that would be parsed as a statement, but the next
|
||||
// token implies this should be parsed as an expression.
|
||||
// For example: `if let Some(x) = x { x } else { 0 } / 2`
|
||||
let mut err = self.sess.span_diagnostic.struct_span_err(self.span, &format!(
|
||||
"expected expression, found `{}`",
|
||||
pprust::token_to_string(&self.token),
|
||||
));
|
||||
err.span_label(self.span, "expected expression");
|
||||
self.sess.expr_parentheses_needed(
|
||||
&mut err,
|
||||
lhs.span,
|
||||
Some(pprust::expr_to_string(&lhs),
|
||||
));
|
||||
err.emit();
|
||||
}
|
||||
}
|
||||
self.expected_tokens.push(TokenType::Operator);
|
||||
while let Some(op) = AssocOp::from_token(&self.token) {
|
||||
@ -4819,6 +4863,10 @@ impl<'a> Parser<'a> {
|
||||
);
|
||||
let mut err = self.fatal(&msg);
|
||||
err.span_label(self.span, format!("expected {}", expected));
|
||||
let sp = self.sess.source_map().start_point(self.span);
|
||||
if let Some(sp) = self.sess.ambiguous_block_expr_parse.borrow().get(&sp) {
|
||||
self.sess.expr_parentheses_needed(&mut err, *sp, None);
|
||||
}
|
||||
return Err(err);
|
||||
}
|
||||
}
|
||||
@ -8727,6 +8775,10 @@ impl<'a> Parser<'a> {
|
||||
/// The arguments of the function are replaced in HIR lowering with the arguments created by
|
||||
/// this function and the statements created here are inserted at the top of the closure body.
|
||||
fn construct_async_arguments(&mut self, asyncness: &mut Spanned<IsAsync>, decl: &mut FnDecl) {
|
||||
// FIXME(davidtwco): This function should really live in the HIR lowering but because
|
||||
// the types constructed here need to be used in parts of resolve so that the correct
|
||||
// locals are considered upvars, it is currently easier for it to live here in the parser,
|
||||
// where it can be constructed once.
|
||||
if let IsAsync::Async { ref mut arguments, .. } = asyncness.node {
|
||||
for (index, input) in decl.inputs.iter_mut().enumerate() {
|
||||
let id = ast::DUMMY_NODE_ID;
|
||||
@ -8741,6 +8793,15 @@ impl<'a> Parser<'a> {
|
||||
// statement.
|
||||
let (binding_mode, ident, is_simple_pattern) = match input.pat.node {
|
||||
PatKind::Ident(binding_mode @ BindingMode::ByValue(_), ident, _) => {
|
||||
// Simple patterns like this don't have a generated argument, but they are
|
||||
// moved into the closure with a statement, so any `mut` bindings on the
|
||||
// argument will be unused. This binding mode can't be removed, because
|
||||
// this would affect the input to procedural macros, but they can have
|
||||
// their span marked as being the result of a compiler desugaring so
|
||||
// that they aren't linted against.
|
||||
input.pat.span = self.sess.source_map().mark_span_with_reason(
|
||||
CompilerDesugaringKind::Async, span, None);
|
||||
|
||||
(binding_mode, ident, true)
|
||||
}
|
||||
_ => (BindingMode::ByValue(Mutability::Mutable), ident, false),
|
||||
@ -8810,15 +8871,6 @@ impl<'a> Parser<'a> {
|
||||
})
|
||||
};
|
||||
|
||||
// Remove mutability from arguments. If this is not a simple pattern,
|
||||
// those arguments are replaced by `__argN`, so there is no need to do this.
|
||||
if let PatKind::Ident(BindingMode::ByValue(mutability @ Mutability::Mutable), ..) =
|
||||
&mut input.pat.node
|
||||
{
|
||||
assert!(is_simple_pattern);
|
||||
*mutability = Mutability::Immutable;
|
||||
}
|
||||
|
||||
let move_stmt = Stmt { id, node: StmtKind::Local(P(move_local)), span };
|
||||
arguments.push(AsyncArgument { ident, arg, pat_stmt, move_stmt });
|
||||
}
|
||||
|
@ -930,6 +930,27 @@ impl SourceMap {
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
/// Reuses the span but adds information like the kind of the desugaring and features that are
|
||||
/// allowed inside this span.
|
||||
pub fn mark_span_with_reason(
|
||||
&self,
|
||||
reason: hygiene::CompilerDesugaringKind,
|
||||
span: Span,
|
||||
allow_internal_unstable: Option<Lrc<[symbol::Symbol]>>,
|
||||
) -> Span {
|
||||
let mark = Mark::fresh(Mark::root());
|
||||
mark.set_expn_info(ExpnInfo {
|
||||
call_site: span,
|
||||
def_site: Some(span),
|
||||
format: CompilerDesugaring(reason),
|
||||
allow_internal_unstable,
|
||||
allow_internal_unsafe: false,
|
||||
local_inner_macros: false,
|
||||
edition: hygiene::default_edition(),
|
||||
});
|
||||
span.with_ctxt(SyntaxContext::empty().apply_mark(mark))
|
||||
}
|
||||
}
|
||||
|
||||
impl SourceMapper for SourceMap {
|
||||
|
@ -207,6 +207,31 @@ impl AssocOp {
|
||||
ObsoleteInPlace | Assign | AssignOp(_) | As | DotDot | DotDotEq | Colon => None
|
||||
}
|
||||
}
|
||||
|
||||
/// This operator could be used to follow a block unambiguously.
|
||||
///
|
||||
/// This is used for error recovery at the moment, providing a suggestion to wrap blocks with
|
||||
/// parentheses while having a high degree of confidence on the correctness of the suggestion.
|
||||
pub fn can_continue_expr_unambiguously(&self) -> bool {
|
||||
use AssocOp::*;
|
||||
match self {
|
||||
BitXor | // `{ 42 } ^ 3`
|
||||
Assign | // `{ 42 } = { 42 }`
|
||||
Divide | // `{ 42 } / 42`
|
||||
Modulus | // `{ 42 } % 2`
|
||||
ShiftRight | // `{ 42 } >> 2`
|
||||
LessEqual | // `{ 42 } <= 3`
|
||||
Greater | // `{ 42 } > 3`
|
||||
GreaterEqual | // `{ 42 } >= 3`
|
||||
AssignOp(_) | // `{ 42 } +=`
|
||||
LAnd | // `{ 42 } &&foo`
|
||||
As | // `{ 42 } as usize`
|
||||
// Equal | // `{ 42 } == { 42 }` Accepting these here would regress incorrect
|
||||
// NotEqual | // `{ 42 } != { 42 } struct literals parser recovery.
|
||||
Colon => true, // `{ 42 }: usize`
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub const PREC_RESET: i8 = -100;
|
||||
|
88
src/test/mir-opt/slice-drop-shim.rs
Normal file
88
src/test/mir-opt/slice-drop-shim.rs
Normal file
@ -0,0 +1,88 @@
|
||||
fn main() {
|
||||
std::ptr::drop_in_place::<[String]> as unsafe fn(_);
|
||||
}
|
||||
|
||||
// END RUST SOURCE
|
||||
|
||||
// START rustc.ptr-real_drop_in_place.[std__string__String].AddMovesForPackedDrops.before.mir
|
||||
// let mut _2: usize;
|
||||
// let mut _3: usize;
|
||||
// let mut _4: usize;
|
||||
// let mut _5: &mut std::string::String;
|
||||
// let mut _6: bool;
|
||||
// let mut _7: &mut std::string::String;
|
||||
// let mut _8: bool;
|
||||
// let mut _9: *mut std::string::String;
|
||||
// let mut _10: *mut std::string::String;
|
||||
// let mut _11: &mut std::string::String;
|
||||
// let mut _12: bool;
|
||||
// let mut _13: &mut std::string::String;
|
||||
// let mut _14: bool;
|
||||
// let mut _15: *mut [std::string::String];
|
||||
// bb0: {
|
||||
// goto -> bb15;
|
||||
// }
|
||||
// bb1: {
|
||||
// return;
|
||||
// }
|
||||
// bb2 (cleanup): {
|
||||
// resume;
|
||||
// }
|
||||
// bb3 (cleanup): {
|
||||
// _5 = &mut (*_1)[_4];
|
||||
// _4 = Add(move _4, const 1usize);
|
||||
// drop((*_5)) -> bb4;
|
||||
// }
|
||||
// bb4 (cleanup): {
|
||||
// _6 = Eq(_4, _3);
|
||||
// switchInt(move _6) -> [false: bb3, otherwise: bb2];
|
||||
// }
|
||||
// bb5: {
|
||||
// _7 = &mut (*_1)[_4];
|
||||
// _4 = Add(move _4, const 1usize);
|
||||
// drop((*_7)) -> [return: bb6, unwind: bb4];
|
||||
// }
|
||||
// bb6: {
|
||||
// _8 = Eq(_4, _3);
|
||||
// switchInt(move _8) -> [false: bb5, otherwise: bb1];
|
||||
// }
|
||||
// bb7: {
|
||||
// _4 = const 0usize;
|
||||
// goto -> bb6;
|
||||
// }
|
||||
// bb8: {
|
||||
// goto -> bb7;
|
||||
// }
|
||||
// bb9 (cleanup): {
|
||||
// _11 = &mut (*_9);
|
||||
// _9 = Offset(move _9, const 1usize);
|
||||
// drop((*_11)) -> bb10;
|
||||
// }
|
||||
// bb10 (cleanup): {
|
||||
// _12 = Eq(_9, _10);
|
||||
// switchInt(move _12) -> [false: bb9, otherwise: bb2];
|
||||
// }
|
||||
// bb11: {
|
||||
// _13 = &mut (*_9);
|
||||
// _9 = Offset(move _9, const 1usize);
|
||||
// drop((*_13)) -> [return: bb12, unwind: bb10];
|
||||
// }
|
||||
// bb12: {
|
||||
// _14 = Eq(_9, _10);
|
||||
// switchInt(move _14) -> [false: bb11, otherwise: bb1];
|
||||
// }
|
||||
// bb13: {
|
||||
// _15 = &mut (*_1);
|
||||
// _9 = move _15 as *mut std::string::String (Misc);
|
||||
// _10 = Offset(_9, move _3);
|
||||
// goto -> bb12;
|
||||
// }
|
||||
// bb14: {
|
||||
// goto -> bb13;
|
||||
// }
|
||||
// bb15: {
|
||||
// _2 = SizeOf(std::string::String);
|
||||
// _3 = Len((*_1));
|
||||
// switchInt(move _2) -> [0usize: bb8, otherwise: bb14];
|
||||
// }
|
||||
// END rustc.ptr-real_drop_in_place.[std__string__String].AddMovesForPackedDrops.before.mir
|
12
src/test/ui/async-await/auxiliary/issue-60674.rs
Normal file
12
src/test/ui/async-await/auxiliary/issue-60674.rs
Normal file
@ -0,0 +1,12 @@
|
||||
// force-host
|
||||
// no-prefer-dynamic
|
||||
#![crate_type = "proc-macro"]
|
||||
|
||||
extern crate proc_macro;
|
||||
use proc_macro::TokenStream;
|
||||
|
||||
#[proc_macro_attribute]
|
||||
pub fn attr(_args: TokenStream, input: TokenStream) -> TokenStream {
|
||||
println!("{}", input);
|
||||
TokenStream::new()
|
||||
}
|
14
src/test/ui/async-await/issue-60674.rs
Normal file
14
src/test/ui/async-await/issue-60674.rs
Normal file
@ -0,0 +1,14 @@
|
||||
// aux-build:issue-60674.rs
|
||||
// compile-pass
|
||||
// edition:2018
|
||||
#![feature(async_await)]
|
||||
|
||||
// This is a regression test that ensures that `mut` patterns are not lost when provided as input
|
||||
// to a proc macro.
|
||||
|
||||
extern crate issue_60674;
|
||||
|
||||
#[issue_60674::attr]
|
||||
async fn f(mut x: u8) {}
|
||||
|
||||
fn main() {}
|
1
src/test/ui/async-await/issue-60674.stdout
Normal file
1
src/test/ui/async-await/issue-60674.stdout
Normal file
@ -0,0 +1 @@
|
||||
async fn f(mut x: u8) { }
|
@ -3,7 +3,7 @@ error: struct literals are not allowed here
|
||||
|
|
||||
LL | if let S { x: _x, y: 2 } = S { x: 1, y: 2 } { println!("Ok"); }
|
||||
| ^^^^^^^^^^^^^^^^
|
||||
help: surround the struct literal with parenthesis
|
||||
help: surround the struct literal with parentheses
|
||||
|
|
||||
LL | if let S { x: _x, y: 2 } = (S { x: 1, y: 2 }) { println!("Ok"); }
|
||||
| ^ ^
|
||||
@ -19,7 +19,7 @@ error: struct literals are not allowed here
|
||||
|
|
||||
LL | for _ in std::ops::Range { start: 0, end: 10 } {}
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
help: surround the struct literal with parenthesis
|
||||
help: surround the struct literal with parentheses
|
||||
|
|
||||
LL | for _ in (std::ops::Range { start: 0, end: 10 }) {}
|
||||
| ^ ^
|
||||
|
@ -3,8 +3,7 @@
|
||||
|
||||
// compile-flags:-Zborrowck=mir -Zverbose
|
||||
// compile-pass
|
||||
|
||||
#![allow(warnings)]
|
||||
// skip-codegen
|
||||
|
||||
fn foo<'a, 'b>(x: &'a &'b u32) -> &'a u32 {
|
||||
&**x
|
||||
|
40
src/test/ui/parser/expr-as-stmt.fixed
Normal file
40
src/test/ui/parser/expr-as-stmt.fixed
Normal file
@ -0,0 +1,40 @@
|
||||
// run-rustfix
|
||||
#![allow(unused_variables)]
|
||||
#![allow(dead_code)]
|
||||
#![allow(unused_must_use)]
|
||||
|
||||
fn foo() -> i32 {
|
||||
({2}) + {2} //~ ERROR expected expression, found `+`
|
||||
//~^ ERROR mismatched types
|
||||
}
|
||||
|
||||
fn bar() -> i32 {
|
||||
({2}) + 2 //~ ERROR expected expression, found `+`
|
||||
//~^ ERROR mismatched types
|
||||
}
|
||||
|
||||
fn zul() -> u32 {
|
||||
let foo = 3;
|
||||
({ 42 }) + foo; //~ ERROR expected expression, found `+`
|
||||
//~^ ERROR mismatched types
|
||||
32
|
||||
}
|
||||
|
||||
fn baz() -> i32 {
|
||||
({ 3 }) * 3 //~ ERROR type `{integer}` cannot be dereferenced
|
||||
//~^ ERROR mismatched types
|
||||
}
|
||||
|
||||
fn qux(a: Option<u32>, b: Option<u32>) -> bool {
|
||||
(if let Some(x) = a { true } else { false })
|
||||
&& //~ ERROR expected expression
|
||||
if let Some(y) = a { true } else { false }
|
||||
}
|
||||
|
||||
fn moo(x: u32) -> bool {
|
||||
(match x {
|
||||
_ => 1,
|
||||
}) > 0 //~ ERROR expected expression
|
||||
}
|
||||
|
||||
fn main() {}
|
40
src/test/ui/parser/expr-as-stmt.rs
Normal file
40
src/test/ui/parser/expr-as-stmt.rs
Normal file
@ -0,0 +1,40 @@
|
||||
// run-rustfix
|
||||
#![allow(unused_variables)]
|
||||
#![allow(dead_code)]
|
||||
#![allow(unused_must_use)]
|
||||
|
||||
fn foo() -> i32 {
|
||||
{2} + {2} //~ ERROR expected expression, found `+`
|
||||
//~^ ERROR mismatched types
|
||||
}
|
||||
|
||||
fn bar() -> i32 {
|
||||
{2} + 2 //~ ERROR expected expression, found `+`
|
||||
//~^ ERROR mismatched types
|
||||
}
|
||||
|
||||
fn zul() -> u32 {
|
||||
let foo = 3;
|
||||
{ 42 } + foo; //~ ERROR expected expression, found `+`
|
||||
//~^ ERROR mismatched types
|
||||
32
|
||||
}
|
||||
|
||||
fn baz() -> i32 {
|
||||
{ 3 } * 3 //~ ERROR type `{integer}` cannot be dereferenced
|
||||
//~^ ERROR mismatched types
|
||||
}
|
||||
|
||||
fn qux(a: Option<u32>, b: Option<u32>) -> bool {
|
||||
if let Some(x) = a { true } else { false }
|
||||
&& //~ ERROR expected expression
|
||||
if let Some(y) = a { true } else { false }
|
||||
}
|
||||
|
||||
fn moo(x: u32) -> bool {
|
||||
match x {
|
||||
_ => 1,
|
||||
} > 0 //~ ERROR expected expression
|
||||
}
|
||||
|
||||
fn main() {}
|
92
src/test/ui/parser/expr-as-stmt.stderr
Normal file
92
src/test/ui/parser/expr-as-stmt.stderr
Normal file
@ -0,0 +1,92 @@
|
||||
error: expected expression, found `+`
|
||||
--> $DIR/expr-as-stmt.rs:7:9
|
||||
|
|
||||
LL | {2} + {2}
|
||||
| --- ^ expected expression
|
||||
| |
|
||||
| help: parentheses are required to parse this as an expression: `({2})`
|
||||
|
||||
error: expected expression, found `+`
|
||||
--> $DIR/expr-as-stmt.rs:12:9
|
||||
|
|
||||
LL | {2} + 2
|
||||
| --- ^ expected expression
|
||||
| |
|
||||
| help: parentheses are required to parse this as an expression: `({2})`
|
||||
|
||||
error: expected expression, found `+`
|
||||
--> $DIR/expr-as-stmt.rs:18:12
|
||||
|
|
||||
LL | { 42 } + foo;
|
||||
| ------ ^ expected expression
|
||||
| |
|
||||
| help: parentheses are required to parse this as an expression: `({ 42 })`
|
||||
|
||||
error: expected expression, found `&&`
|
||||
--> $DIR/expr-as-stmt.rs:30:5
|
||||
|
|
||||
LL | if let Some(x) = a { true } else { false }
|
||||
| ------------------------------------------ help: parentheses are required to parse this as an expression: `(if let Some(x) = a { true } else { false })`
|
||||
LL | &&
|
||||
| ^^ expected expression
|
||||
|
||||
error: expected expression, found `>`
|
||||
--> $DIR/expr-as-stmt.rs:37:7
|
||||
|
|
||||
LL | } > 0
|
||||
| ^ expected expression
|
||||
help: parentheses are required to parse this as an expression
|
||||
|
|
||||
LL | (match x {
|
||||
LL | _ => 1,
|
||||
LL | }) > 0
|
||||
|
|
||||
|
||||
error[E0308]: mismatched types
|
||||
--> $DIR/expr-as-stmt.rs:7:6
|
||||
|
|
||||
LL | {2} + {2}
|
||||
| ^ expected (), found integer
|
||||
|
|
||||
= note: expected type `()`
|
||||
found type `{integer}`
|
||||
|
||||
error[E0308]: mismatched types
|
||||
--> $DIR/expr-as-stmt.rs:12:6
|
||||
|
|
||||
LL | {2} + 2
|
||||
| ^ expected (), found integer
|
||||
|
|
||||
= note: expected type `()`
|
||||
found type `{integer}`
|
||||
|
||||
error[E0308]: mismatched types
|
||||
--> $DIR/expr-as-stmt.rs:18:7
|
||||
|
|
||||
LL | { 42 } + foo;
|
||||
| ^^ expected (), found integer
|
||||
|
|
||||
= note: expected type `()`
|
||||
found type `{integer}`
|
||||
|
||||
error[E0308]: mismatched types
|
||||
--> $DIR/expr-as-stmt.rs:24:7
|
||||
|
|
||||
LL | { 3 } * 3
|
||||
| ^ expected (), found integer
|
||||
|
|
||||
= note: expected type `()`
|
||||
found type `{integer}`
|
||||
|
||||
error[E0614]: type `{integer}` cannot be dereferenced
|
||||
--> $DIR/expr-as-stmt.rs:24:11
|
||||
|
|
||||
LL | { 3 } * 3
|
||||
| ----- ^^^
|
||||
| |
|
||||
| help: parentheses are required to parse this as an expression: `({ 3 })`
|
||||
|
||||
error: aborting due to 10 previous errors
|
||||
|
||||
Some errors have detailed explanations: E0308, E0614.
|
||||
For more information about an error, try `rustc --explain E0308`.
|
@ -1,7 +1,7 @@
|
||||
fn main() {
|
||||
|
||||
match 0 {
|
||||
let _ = match 0 {
|
||||
0 => {
|
||||
0
|
||||
} + 5 //~ ERROR expected pattern, found `+`
|
||||
}
|
||||
};
|
||||
}
|
||||
|
@ -3,6 +3,12 @@ error: expected pattern, found `+`
|
||||
|
|
||||
LL | } + 5
|
||||
| ^ expected pattern
|
||||
help: parentheses are required to parse this as an expression
|
||||
|
|
||||
LL | 0 => ({
|
||||
LL | 0
|
||||
LL | }) + 5
|
||||
|
|
||||
|
||||
error: aborting due to previous error
|
||||
|
||||
|
@ -6,7 +6,7 @@ LL | for x in Foo {
|
||||
LL | | x: 3
|
||||
LL | | }.hi() {
|
||||
| |_____^
|
||||
help: surround the struct literal with parenthesis
|
||||
help: surround the struct literal with parentheses
|
||||
|
|
||||
LL | for x in (Foo {
|
||||
LL | x: 3
|
||||
|
@ -6,7 +6,7 @@ LL | if Foo {
|
||||
LL | | x: 3
|
||||
LL | | }.hi() {
|
||||
| |_____^
|
||||
help: surround the struct literal with parenthesis
|
||||
help: surround the struct literal with parentheses
|
||||
|
|
||||
LL | if (Foo {
|
||||
LL | x: 3
|
||||
|
@ -6,7 +6,7 @@ LL | match Foo {
|
||||
LL | | x: 3
|
||||
LL | | } {
|
||||
| |_____^
|
||||
help: surround the struct literal with parenthesis
|
||||
help: surround the struct literal with parentheses
|
||||
|
|
||||
LL | match (Foo {
|
||||
LL | x: 3
|
||||
|
@ -6,7 +6,7 @@ LL | while Foo {
|
||||
LL | | x: 3
|
||||
LL | | }.hi() {
|
||||
| |_____^
|
||||
help: surround the struct literal with parenthesis
|
||||
help: surround the struct literal with parentheses
|
||||
|
|
||||
LL | while (Foo {
|
||||
LL | x: 3
|
||||
|
@ -6,7 +6,7 @@ LL | while || Foo {
|
||||
LL | | x: 3
|
||||
LL | | }.hi() {
|
||||
| |_____^
|
||||
help: surround the struct literal with parenthesis
|
||||
help: surround the struct literal with parentheses
|
||||
|
|
||||
LL | while || (Foo {
|
||||
LL | x: 3
|
||||
|
@ -3,7 +3,7 @@ error: struct literals are not allowed here
|
||||
|
|
||||
LL | if x == E::I { field1: true, field2: 42 } {}
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
help: surround the struct literal with parenthesis
|
||||
help: surround the struct literal with parentheses
|
||||
|
|
||||
LL | if x == (E::I { field1: true, field2: 42 }) {}
|
||||
| ^ ^
|
||||
@ -13,7 +13,7 @@ error: struct literals are not allowed here
|
||||
|
|
||||
LL | if x == E::V { field: false } {}
|
||||
| ^^^^^^^^^^^^^^^^^^^^^
|
||||
help: surround the struct literal with parenthesis
|
||||
help: surround the struct literal with parentheses
|
||||
|
|
||||
LL | if x == (E::V { field: false }) {}
|
||||
| ^ ^
|
||||
@ -23,7 +23,7 @@ error: struct literals are not allowed here
|
||||
|
|
||||
LL | if x == E::J { field: -42 } {}
|
||||
| ^^^^^^^^^^^^^^^^^^^
|
||||
help: surround the struct literal with parenthesis
|
||||
help: surround the struct literal with parentheses
|
||||
|
|
||||
LL | if x == (E::J { field: -42 }) {}
|
||||
| ^ ^
|
||||
@ -33,7 +33,7 @@ error: struct literals are not allowed here
|
||||
|
|
||||
LL | if x == E::K { field: "" } {}
|
||||
| ^^^^^^^^^^^^^^^^^^
|
||||
help: surround the struct literal with parenthesis
|
||||
help: surround the struct literal with parentheses
|
||||
|
|
||||
LL | if x == (E::K { field: "" }) {}
|
||||
| ^ ^
|
||||
|
@ -7,4 +7,3 @@ edition = "2018"
|
||||
[dependencies]
|
||||
toml = "0.4"
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde_derive = "1.0"
|
||||
|
@ -11,9 +11,8 @@ filetime = "0.2"
|
||||
getopts = "0.2"
|
||||
log = "0.4"
|
||||
regex = "1.0"
|
||||
serde = "1.0"
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde_json = "1.0"
|
||||
serde_derive = "1.0"
|
||||
rustfix = "0.4.1"
|
||||
lazy_static = "1.0"
|
||||
walkdir = "2"
|
||||
|
@ -7,6 +7,8 @@ use std::io::BufReader;
|
||||
use std::path::Path;
|
||||
use std::str::FromStr;
|
||||
|
||||
use log::*;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
pub enum ErrorKind {
|
||||
Help,
|
||||
|
@ -4,6 +4,8 @@ use std::io::prelude::*;
|
||||
use std::io::BufReader;
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
use log::*;
|
||||
|
||||
use crate::common::{self, CompareMode, Config, Mode};
|
||||
use crate::util;
|
||||
|
||||
|
@ -3,6 +3,7 @@
|
||||
|
||||
use crate::errors::{Error, ErrorKind};
|
||||
use crate::runtest::ProcRes;
|
||||
use serde::Deserialize;
|
||||
use serde_json;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::str::FromStr;
|
||||
|
@ -3,14 +3,6 @@
|
||||
#![feature(vec_remove_item)]
|
||||
#![deny(warnings, rust_2018_idioms)]
|
||||
|
||||
#[cfg(unix)]
|
||||
extern crate libc;
|
||||
#[macro_use]
|
||||
extern crate log;
|
||||
#[macro_use]
|
||||
extern crate lazy_static;
|
||||
#[macro_use]
|
||||
extern crate serde_derive;
|
||||
extern crate test;
|
||||
|
||||
use crate::common::CompareMode;
|
||||
@ -30,6 +22,7 @@ use crate::util::logv;
|
||||
use walkdir::WalkDir;
|
||||
use env_logger;
|
||||
use getopts;
|
||||
use log::*;
|
||||
|
||||
use self::header::{EarlyProps, Ignore};
|
||||
|
||||
|
@ -29,6 +29,9 @@ use std::path::{Path, PathBuf};
|
||||
use std::process::{Child, Command, ExitStatus, Output, Stdio};
|
||||
use std::str;
|
||||
|
||||
use lazy_static::lazy_static;
|
||||
use log::*;
|
||||
|
||||
use crate::extract_gdb_version;
|
||||
use crate::is_android_gdb_target;
|
||||
|
||||
|
@ -3,6 +3,8 @@ use std::env;
|
||||
use std::path::PathBuf;
|
||||
use crate::common::Config;
|
||||
|
||||
use log::*;
|
||||
|
||||
/// Conversion table from triple OS name to Rust SYSNAME
|
||||
const OS_TABLE: &'static [(&'static str, &'static str)] = &[
|
||||
("android", "android"),
|
||||
|
@ -6,6 +6,5 @@ edition = "2018"
|
||||
|
||||
[dependencies]
|
||||
regex = "1"
|
||||
serde = "1.0.8"
|
||||
serde_derive = "1.0.8"
|
||||
serde = { version = "1.0.8", features = ["derive"] }
|
||||
serde_json = "1.0.2"
|
||||
|
@ -5,7 +5,7 @@ use std::fs;
|
||||
use std::path::Path;
|
||||
use std::process::Command;
|
||||
|
||||
use serde_derive::Deserialize;
|
||||
use serde::Deserialize;
|
||||
use serde_json;
|
||||
|
||||
const LICENSES: &[&str] = &[
|
||||
|
Loading…
Reference in New Issue
Block a user