Merge pull request #3 from rust-lang/master
update from origin 2020-06-15
This commit is contained in:
commit
395256a5dd
14
.github/workflows/ci.yml
vendored
14
.github/workflows/ci.yml
vendored
@ -102,9 +102,6 @@ jobs:
|
||||
- name: install MSYS2
|
||||
run: src/ci/scripts/install-msys2.sh
|
||||
if: success() && !env.SKIP_JOB
|
||||
- name: install MSYS2 packages
|
||||
run: src/ci/scripts/install-msys2-packages.sh
|
||||
if: success() && !env.SKIP_JOB
|
||||
- name: install MinGW
|
||||
run: src/ci/scripts/install-mingw.sh
|
||||
if: success() && !env.SKIP_JOB
|
||||
@ -212,9 +209,6 @@ jobs:
|
||||
- name: install MSYS2
|
||||
run: src/ci/scripts/install-msys2.sh
|
||||
if: success() && !env.SKIP_JOB
|
||||
- name: install MSYS2 packages
|
||||
run: src/ci/scripts/install-msys2-packages.sh
|
||||
if: success() && !env.SKIP_JOB
|
||||
- name: install MinGW
|
||||
run: src/ci/scripts/install-mingw.sh
|
||||
if: success() && !env.SKIP_JOB
|
||||
@ -434,11 +428,6 @@ jobs:
|
||||
NO_DEBUG_ASSERTIONS: 1
|
||||
NO_LLVM_ASSERTIONS: 1
|
||||
os: windows-latest-xl
|
||||
- name: x86_64-msvc-aux
|
||||
env:
|
||||
RUST_CHECK_TARGET: check-aux EXCLUDE_CARGO=1
|
||||
RUST_CONFIGURE_ARGS: "--build=x86_64-pc-windows-msvc"
|
||||
os: windows-latest-xl
|
||||
- name: x86_64-msvc-cargo
|
||||
env:
|
||||
SCRIPT: python x.py test src/tools/cargotest src/tools/cargo
|
||||
@ -564,9 +553,6 @@ jobs:
|
||||
- name: install MSYS2
|
||||
run: src/ci/scripts/install-msys2.sh
|
||||
if: success() && !env.SKIP_JOB
|
||||
- name: install MSYS2 packages
|
||||
run: src/ci/scripts/install-msys2-packages.sh
|
||||
if: success() && !env.SKIP_JOB
|
||||
- name: install MinGW
|
||||
run: src/ci/scripts/install-mingw.sh
|
||||
if: success() && !env.SKIP_JOB
|
||||
|
10
Cargo.lock
10
Cargo.lock
@ -282,7 +282,7 @@ checksum = "716960a18f978640f25101b5cbf1c6f6b0d3192fab36a2d98ca96f0ecbe41010"
|
||||
|
||||
[[package]]
|
||||
name = "cargo"
|
||||
version = "0.46.0"
|
||||
version = "0.47.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"atty",
|
||||
@ -1434,9 +1434,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "hermit-abi"
|
||||
version = "0.1.13"
|
||||
version = "0.1.14"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "91780f809e750b0a89f5544be56617ff6b1227ee485bcb06ebe10cdf89bd3b71"
|
||||
checksum = "b9586eedd4ce6b3c498bc3b4dd92fc9f11166aa908a914071953768066c67909"
|
||||
dependencies = [
|
||||
"compiler_builtins",
|
||||
"libc",
|
||||
@ -1848,9 +1848,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "libgit2-sys"
|
||||
version = "0.12.5+1.0.0"
|
||||
version = "0.12.7+1.0.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3eadeec65514971355bf7134967a543f71372f35b53ac6c7143e7bd157f07535"
|
||||
checksum = "bcd07968649bcb7b9351ecfde53ca4d27673cccfdf57c84255ec18710f3153e0"
|
||||
dependencies = [
|
||||
"cc",
|
||||
"libc",
|
||||
|
@ -69,7 +69,7 @@
|
||||
# the same format as above, but since these targets are experimental, they are
|
||||
# not built by default and the experimental Rust compilation targets that depend
|
||||
# on them will not work unless the user opts in to building them.
|
||||
#experimental-targets = ""
|
||||
#experimental-targets = "AVR"
|
||||
|
||||
# Cap the number of parallel linker invocations when compiling LLVM.
|
||||
# This can be useful when building LLVM with debug info, which significantly
|
||||
|
@ -52,6 +52,8 @@ pub trait Step: 'static + Clone + Debug + PartialEq + Eq + Hash {
|
||||
/// it's been assembled.
|
||||
type Output: Clone;
|
||||
|
||||
/// Whether this step is run by default as part of its respective phase.
|
||||
/// `true` here can still be overwritten by `should_run` calling `default_condition`.
|
||||
const DEFAULT: bool = false;
|
||||
|
||||
/// If true, then this rule should be skipped if --target was specified, but --host was not
|
||||
@ -371,7 +373,6 @@ impl<'a> Builder<'a> {
|
||||
test::UiFullDeps,
|
||||
test::Rustdoc,
|
||||
test::Pretty,
|
||||
test::RunPassValgrindPretty,
|
||||
test::Crate,
|
||||
test::CrateLibrustc,
|
||||
test::CrateRustdoc,
|
||||
|
@ -983,7 +983,13 @@ pub fn stream_cargo(
|
||||
for line in stdout.lines() {
|
||||
let line = t!(line);
|
||||
match serde_json::from_str::<CargoMessage<'_>>(&line) {
|
||||
Ok(msg) => cb(msg),
|
||||
Ok(msg) => {
|
||||
if builder.config.json_output {
|
||||
// Forward JSON to stdout.
|
||||
println!("{}", line);
|
||||
}
|
||||
cb(msg)
|
||||
}
|
||||
// If this was informational, just print it out and continue
|
||||
Err(_) => println!("{}", line),
|
||||
}
|
||||
|
@ -619,19 +619,21 @@ impl Step for DebuggerScripts {
|
||||
cp_debugger_script("natvis/libcore.natvis");
|
||||
cp_debugger_script("natvis/libstd.natvis");
|
||||
} else {
|
||||
cp_debugger_script("debugger_pretty_printers_common.py");
|
||||
cp_debugger_script("rust_types.py");
|
||||
|
||||
// gdb debugger scripts
|
||||
builder.install(&builder.src.join("src/etc/rust-gdb"), &sysroot.join("bin"), 0o755);
|
||||
builder.install(&builder.src.join("src/etc/rust-gdbgui"), &sysroot.join("bin"), 0o755);
|
||||
|
||||
cp_debugger_script("gdb_load_rust_pretty_printers.py");
|
||||
cp_debugger_script("gdb_rust_pretty_printing.py");
|
||||
cp_debugger_script("gdb_lookup.py");
|
||||
cp_debugger_script("gdb_providers.py");
|
||||
|
||||
// lldb debugger scripts
|
||||
builder.install(&builder.src.join("src/etc/rust-lldb"), &sysroot.join("bin"), 0o755);
|
||||
|
||||
cp_debugger_script("lldb_rust_formatters.py");
|
||||
cp_debugger_script("lldb_lookup.py");
|
||||
cp_debugger_script("lldb_providers.py");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -6,12 +6,6 @@ Q := @
|
||||
BOOTSTRAP_ARGS :=
|
||||
endif
|
||||
|
||||
ifdef EXCLUDE_CARGO
|
||||
AUX_ARGS :=
|
||||
else
|
||||
AUX_ARGS := src/tools/cargo src/tools/cargotest
|
||||
endif
|
||||
|
||||
BOOTSTRAP := $(CFG_PYTHON) $(CFG_SRC_DIR)src/bootstrap/bootstrap.py
|
||||
|
||||
all:
|
||||
@ -48,8 +42,8 @@ check:
|
||||
$(Q)$(BOOTSTRAP) test $(BOOTSTRAP_ARGS)
|
||||
check-aux:
|
||||
$(Q)$(BOOTSTRAP) test \
|
||||
src/test/run-pass-valgrind/pretty \
|
||||
$(AUX_ARGS) \
|
||||
src/tools/cargo \
|
||||
src/tools/cargotest \
|
||||
$(BOOTSTRAP_ARGS)
|
||||
check-bootstrap:
|
||||
$(Q)$(CFG_PYTHON) $(CFG_SRC_DIR)src/bootstrap/bootstrap_test.py
|
||||
|
@ -144,7 +144,7 @@ impl Step for Llvm {
|
||||
|
||||
let llvm_exp_targets = match builder.config.llvm_experimental_targets {
|
||||
Some(ref s) => s,
|
||||
None => "",
|
||||
None => "AVR",
|
||||
};
|
||||
|
||||
let assertions = if builder.config.llvm_assertions { "ON" } else { "OFF" };
|
||||
|
@ -154,6 +154,7 @@ impl Step for Cargotest {
|
||||
fn run(self, builder: &Builder<'_>) {
|
||||
let compiler = builder.compiler(self.stage, self.host);
|
||||
builder.ensure(compile::Rustc { compiler, target: compiler.host });
|
||||
let cargo = builder.ensure(tool::Cargo { compiler, target: compiler.host });
|
||||
|
||||
// Note that this is a short, cryptic, and not scoped directory name. This
|
||||
// is currently to minimize the length of path on Windows where we otherwise
|
||||
@ -165,7 +166,7 @@ impl Step for Cargotest {
|
||||
let mut cmd = builder.tool_cmd(Tool::CargoTest);
|
||||
try_run(
|
||||
builder,
|
||||
cmd.arg(&builder.initial_cargo)
|
||||
cmd.arg(&cargo)
|
||||
.arg(&out_dir)
|
||||
.env("RUSTC", builder.rustc(compiler))
|
||||
.env("RUSTDOC", builder.rustdoc(compiler)),
|
||||
@ -553,7 +554,7 @@ impl Step for Clippy {
|
||||
|
||||
builder.add_rustc_lib_path(compiler, &mut cargo);
|
||||
|
||||
try_run(builder, &mut cargo.into());
|
||||
builder.run(&mut cargo.into());
|
||||
}
|
||||
}
|
||||
|
||||
@ -929,13 +930,6 @@ host_test!(UiFullDeps { path: "src/test/ui-fulldeps", mode: "ui", suite: "ui-ful
|
||||
host_test!(Rustdoc { path: "src/test/rustdoc", mode: "rustdoc", suite: "rustdoc" });
|
||||
|
||||
host_test!(Pretty { path: "src/test/pretty", mode: "pretty", suite: "pretty" });
|
||||
test!(RunPassValgrindPretty {
|
||||
path: "src/test/run-pass-valgrind/pretty",
|
||||
mode: "pretty",
|
||||
suite: "run-pass-valgrind",
|
||||
default: false,
|
||||
host: true
|
||||
});
|
||||
|
||||
default_test!(RunMake { path: "src/test/run-make", mode: "run-make", suite: "run-make" });
|
||||
|
||||
|
@ -595,6 +595,7 @@ macro_rules! tool_extended {
|
||||
$toolstate:ident,
|
||||
$path:expr,
|
||||
$tool_name:expr,
|
||||
stable = $stable:expr,
|
||||
$extra_deps:block;)+) => {
|
||||
$(
|
||||
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
|
||||
@ -606,17 +607,22 @@ macro_rules! tool_extended {
|
||||
|
||||
impl Step for $name {
|
||||
type Output = Option<PathBuf>;
|
||||
const DEFAULT: bool = true;
|
||||
const DEFAULT: bool = true; // Overwritten below
|
||||
const ONLY_HOSTS: bool = true;
|
||||
|
||||
fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
|
||||
let builder = run.builder;
|
||||
run.path($path).default_condition(
|
||||
builder.config.extended
|
||||
&& builder.config.tools.as_ref().map_or(true, |tools| {
|
||||
tools.iter().any(|tool| match tool.as_ref() {
|
||||
"clippy" => $tool_name == "clippy-driver",
|
||||
x => $tool_name == x,
|
||||
&& builder.config.tools.as_ref().map_or(
|
||||
// By default, on nightly/dev enable all tools, else only
|
||||
// build stable tools.
|
||||
$stable || builder.build.unstable_features(),
|
||||
// If `tools` is set, search list for this tool.
|
||||
|tools| {
|
||||
tools.iter().any(|tool| match tool.as_ref() {
|
||||
"clippy" => $tool_name == "clippy-driver",
|
||||
x => $tool_name == x,
|
||||
})
|
||||
}),
|
||||
)
|
||||
@ -652,12 +658,12 @@ macro_rules! tool_extended {
|
||||
// Note: tools need to be also added to `Builder::get_step_descriptions` in `build.rs`
|
||||
// to make `./x.py build <tool>` work.
|
||||
tool_extended!((self, builder),
|
||||
Cargofmt, rustfmt, "src/tools/rustfmt", "cargo-fmt", {};
|
||||
CargoClippy, clippy, "src/tools/clippy", "cargo-clippy", {};
|
||||
Clippy, clippy, "src/tools/clippy", "clippy-driver", {};
|
||||
Miri, miri, "src/tools/miri", "miri", {};
|
||||
CargoMiri, miri, "src/tools/miri/cargo-miri", "cargo-miri", {};
|
||||
Rls, rls, "src/tools/rls", "rls", {
|
||||
Cargofmt, rustfmt, "src/tools/rustfmt", "cargo-fmt", stable=true, {};
|
||||
CargoClippy, clippy, "src/tools/clippy", "cargo-clippy", stable=true, {};
|
||||
Clippy, clippy, "src/tools/clippy", "clippy-driver", stable=true, {};
|
||||
Miri, miri, "src/tools/miri", "miri", stable=false, {};
|
||||
CargoMiri, miri, "src/tools/miri/cargo-miri", "cargo-miri", stable=false, {};
|
||||
Rls, rls, "src/tools/rls", "rls", stable=true, {
|
||||
builder.ensure(Clippy {
|
||||
compiler: self.compiler,
|
||||
target: self.target,
|
||||
@ -665,7 +671,7 @@ tool_extended!((self, builder),
|
||||
});
|
||||
self.extra_features.push("clippy".to_owned());
|
||||
};
|
||||
Rustfmt, rustfmt, "src/tools/rustfmt", "rustfmt", {};
|
||||
Rustfmt, rustfmt, "src/tools/rustfmt", "rustfmt", stable=true, {};
|
||||
);
|
||||
|
||||
impl<'a> Builder<'a> {
|
||||
|
@ -142,10 +142,6 @@ jobs:
|
||||
# FIXME(#59637)
|
||||
NO_DEBUG_ASSERTIONS: 1
|
||||
NO_LLVM_ASSERTIONS: 1
|
||||
# MSVC aux tests
|
||||
x86_64-msvc-aux:
|
||||
RUST_CHECK_TARGET: check-aux EXCLUDE_CARGO=1
|
||||
INITIAL_RUST_CONFIGURE_ARGS: --build=x86_64-pc-windows-msvc
|
||||
x86_64-msvc-cargo:
|
||||
SCRIPT: python x.py test src/tools/cargotest src/tools/cargo
|
||||
INITIAL_RUST_CONFIGURE_ARGS: --build=x86_64-pc-windows-msvc --enable-lld
|
||||
|
@ -82,10 +82,6 @@ steps:
|
||||
displayName: Install msys2
|
||||
condition: and(succeeded(), not(variables.SKIP_JOB))
|
||||
|
||||
- bash: src/ci/scripts/install-msys2-packages.sh
|
||||
displayName: Install msys2 packages
|
||||
condition: and(succeeded(), not(variables.SKIP_JOB))
|
||||
|
||||
- bash: src/ci/scripts/install-mingw.sh
|
||||
displayName: Install MinGW
|
||||
condition: and(succeeded(), not(variables.SKIP_JOB))
|
||||
|
@ -3,7 +3,7 @@
|
||||
#
|
||||
# Versions of the toolchain components are configurable in `musl-cross-make/Makefile` and
|
||||
# musl unlike GLIBC is forward compatible so upgrading it shouldn't break old distributions.
|
||||
# Right now we have: Binutils 2.27, GCC 6.4.0, musl 1.1.22.
|
||||
# Right now we have: Binutils 2.31.1, GCC 9.2.0, musl 1.1.24.
|
||||
set -ex
|
||||
|
||||
hide_output() {
|
||||
@ -33,11 +33,13 @@ shift
|
||||
# Apparently applying `-fPIC` everywhere allows them to link successfully.
|
||||
export CFLAGS="-fPIC $CFLAGS"
|
||||
|
||||
git clone https://github.com/richfelker/musl-cross-make -b v0.9.8
|
||||
git clone https://github.com/richfelker/musl-cross-make # -b v0.9.9
|
||||
cd musl-cross-make
|
||||
# A few commits ahead of v0.9.9 to include the cowpatch fix:
|
||||
git checkout a54eb56f33f255dfca60be045f12a5cfaf5a72a9
|
||||
|
||||
hide_output make -j$(nproc) TARGET=$TARGET
|
||||
hide_output make install TARGET=$TARGET OUTPUT=$OUTPUT
|
||||
hide_output make -j$(nproc) TARGET=$TARGET MUSL_VER=1.1.24
|
||||
hide_output make install TARGET=$TARGET MUSL_VER=1.1.24 OUTPUT=$OUTPUT
|
||||
|
||||
cd -
|
||||
|
||||
|
@ -24,7 +24,7 @@ shift
|
||||
# Apparently applying `-fPIC` everywhere allows them to link successfully.
|
||||
export CFLAGS="-fPIC $CFLAGS"
|
||||
|
||||
MUSL=musl-1.1.22
|
||||
MUSL=musl-1.1.24
|
||||
|
||||
# may have been downloaded in a previous run
|
||||
if [ ! -d $MUSL ]; then
|
||||
|
@ -27,6 +27,9 @@ ENV PATH=$PATH:/emsdk-portable
|
||||
ENV PATH=$PATH:/emsdk-portable/upstream/emscripten/
|
||||
ENV PATH=$PATH:/emsdk-portable/node/12.9.1_64bit/bin/
|
||||
ENV BINARYEN_ROOT=/emsdk-portable/upstream/
|
||||
ENV EMSDK=/emsdk-portable
|
||||
ENV EM_CONFIG=/emsdk-portable/.emscripten
|
||||
ENV EM_CACHE=/emsdk-portable/upstream/emscripten/cache
|
||||
|
||||
ENV TARGETS=wasm32-unknown-emscripten
|
||||
|
||||
|
@ -147,10 +147,6 @@ x--expand-yaml-anchors--remove:
|
||||
run: src/ci/scripts/install-msys2.sh
|
||||
<<: *step
|
||||
|
||||
- name: install MSYS2 packages
|
||||
run: src/ci/scripts/install-msys2-packages.sh
|
||||
<<: *step
|
||||
|
||||
- name: install MinGW
|
||||
run: src/ci/scripts/install-mingw.sh
|
||||
<<: *step
|
||||
@ -496,12 +492,6 @@ jobs:
|
||||
NO_LLVM_ASSERTIONS: 1
|
||||
<<: *job-windows-xl
|
||||
|
||||
- name: x86_64-msvc-aux
|
||||
env:
|
||||
RUST_CHECK_TARGET: check-aux EXCLUDE_CARGO=1
|
||||
RUST_CONFIGURE_ARGS: --build=x86_64-pc-windows-msvc
|
||||
<<: *job-windows-xl
|
||||
|
||||
- name: x86_64-msvc-cargo
|
||||
env:
|
||||
SCRIPT: python x.py test src/tools/cargotest src/tools/cargo
|
||||
|
@ -1,27 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -euo pipefail
|
||||
IFS=$'\n\t'
|
||||
|
||||
source "$(cd "$(dirname "$0")" && pwd)/../shared.sh"
|
||||
|
||||
if isWindows; then
|
||||
pacman -S --noconfirm --needed base-devel ca-certificates make diffutils tar \
|
||||
binutils
|
||||
|
||||
# Detect the native Python version installed on the agent. On GitHub
|
||||
# Actions, the C:\hostedtoolcache\windows\Python directory contains a
|
||||
# subdirectory for each installed Python version.
|
||||
#
|
||||
# The -V flag of the sort command sorts the input by version number.
|
||||
native_python_version="$(ls /c/hostedtoolcache/windows/Python | sort -Vr | head -n 1)"
|
||||
|
||||
# Make sure we use the native python interpreter instead of some msys equivalent
|
||||
# one way or another. The msys interpreters seem to have weird path conversions
|
||||
# baked in which break LLVM's build system one way or another, so let's use the
|
||||
# native version which keeps everything as native as possible.
|
||||
python_home="/c/hostedtoolcache/windows/Python/${native_python_version}/x64"
|
||||
cp "${python_home}/python.exe" "${python_home}/python3.exe"
|
||||
ciCommandAddPath "C:\\hostedtoolcache\\windows\\Python\\${native_python_version}\\x64"
|
||||
ciCommandAddPath "C:\\hostedtoolcache\\windows\\Python\\${native_python_version}\\x64\\Scripts"
|
||||
fi
|
@ -1,10 +1,6 @@
|
||||
#!/bin/bash
|
||||
# Download and install MSYS2, needed primarily for the test suite (run-make) but
|
||||
# also used by the MinGW toolchain for assembling things.
|
||||
#
|
||||
# FIXME: we should probe the default azure image and see if we can use the MSYS2
|
||||
# toolchain there. (if there's even one there). For now though this gets the job
|
||||
# done.
|
||||
|
||||
set -euo pipefail
|
||||
IFS=$'\n\t'
|
||||
@ -12,17 +8,26 @@ IFS=$'\n\t'
|
||||
source "$(cd "$(dirname "$0")" && pwd)/../shared.sh"
|
||||
|
||||
if isWindows; then
|
||||
# Pre-followed the api/v2 URL to the CDN since the API can be a bit flakey
|
||||
curl -sSL https://packages.chocolatey.org/msys2.20190524.0.0.20191030.nupkg > \
|
||||
msys2.nupkg
|
||||
curl -sSL https://packages.chocolatey.org/chocolatey-core.extension.1.3.5.1.nupkg > \
|
||||
chocolatey-core.extension.nupkg
|
||||
choco install -s . msys2 \
|
||||
--params="/InstallDir:$(ciCheckoutPath)/msys2 /NoPath" -y --no-progress
|
||||
rm msys2.nupkg chocolatey-core.extension.nupkg
|
||||
mkdir -p "$(ciCheckoutPath)/msys2/home/${USERNAME}"
|
||||
ciCommandAddPath "$(ciCheckoutPath)/msys2/usr/bin"
|
||||
msys2Path="c:/msys64"
|
||||
mkdir -p "${msys2Path}/home/${USERNAME}"
|
||||
ciCommandAddPath "${msys2Path}/usr/bin"
|
||||
|
||||
echo "switching shell to use our own bash"
|
||||
ciCommandSetEnv CI_OVERRIDE_SHELL "$(ciCheckoutPath)/msys2/usr/bin/bash.exe"
|
||||
ciCommandSetEnv CI_OVERRIDE_SHELL "${msys2Path}/usr/bin/bash.exe"
|
||||
|
||||
# Detect the native Python version installed on the agent. On GitHub
|
||||
# Actions, the C:\hostedtoolcache\windows\Python directory contains a
|
||||
# subdirectory for each installed Python version.
|
||||
#
|
||||
# The -V flag of the sort command sorts the input by version number.
|
||||
native_python_version="$(ls /c/hostedtoolcache/windows/Python | sort -Vr | head -n 1)"
|
||||
|
||||
# Make sure we use the native python interpreter instead of some msys equivalent
|
||||
# one way or another. The msys interpreters seem to have weird path conversions
|
||||
# baked in which break LLVM's build system one way or another, so let's use the
|
||||
# native version which keeps everything as native as possible.
|
||||
python_home="/c/hostedtoolcache/windows/Python/${native_python_version}/x64"
|
||||
cp "${python_home}/python.exe" "${python_home}/python3.exe"
|
||||
ciCommandAddPath "C:\\hostedtoolcache\\windows\\Python\\${native_python_version}\\x64"
|
||||
ciCommandAddPath "C:\\hostedtoolcache\\windows\\Python\\${native_python_version}\\x64\\Scripts"
|
||||
fi
|
||||
|
@ -416,7 +416,7 @@ without including it in your main documentation. For example, you could write th
|
||||
`lib.rs` to test your README as part of your doctests:
|
||||
|
||||
```rust,ignore
|
||||
#![feature(extern_doc)]
|
||||
#![feature(external_doc)]
|
||||
|
||||
#[doc(include="../README.md")]
|
||||
#[cfg(doctest)]
|
||||
|
@ -201,7 +201,7 @@ fn mul(a: u64, b: u64) -> u128 {
|
||||
);
|
||||
}
|
||||
|
||||
(hi as u128) << 64 + lo as u128
|
||||
((hi as u128) << 64) + lo as u128
|
||||
}
|
||||
```
|
||||
|
||||
@ -382,7 +382,9 @@ The macro will initially be supported only on ARM, AArch64, x86, x86-64 and RISC
|
||||
|
||||
The assembler template uses the same syntax as [format strings][format-syntax] (i.e. placeholders are specified by curly braces). The corresponding arguments are accessed in order, by index, or by name. However, implicit named arguments (introduced by [RFC #2795][rfc-2795]) are not supported.
|
||||
|
||||
As with format strings, named arguments must appear after positional arguments. Explicit register operands must appear at the end of the operand list, after any named arguments if any. Explicit register operands cannot be used by placeholders in the template string. All other operands must appear at least once in the template string, otherwise a compiler error is generated.
|
||||
As with format strings, named arguments must appear after positional arguments. Explicit register operands must appear at the end of the operand list, after named arguments if any.
|
||||
|
||||
Explicit register operands cannot be used by placeholders in the template string. All other named and positional operands must appear at least once in the template string, otherwise a compiler error is generated.
|
||||
|
||||
The exact assembly code syntax is target-specific and opaque to the compiler except for the way operands are substituted into the template string to form the code passed to the assembler.
|
||||
|
||||
|
@ -1,401 +0,0 @@
|
||||
"""
|
||||
This module provides an abstraction layer over common Rust pretty printing
|
||||
functionality needed by both GDB and LLDB.
|
||||
"""
|
||||
|
||||
import re
|
||||
|
||||
# Type codes that indicate the kind of type as it appears in DWARF debug
|
||||
# information. This code alone is not sufficient to determine the Rust type.
|
||||
# For example structs, tuples, fat pointers, or enum variants will all have
|
||||
# DWARF_TYPE_CODE_STRUCT.
|
||||
DWARF_TYPE_CODE_STRUCT = 1
|
||||
DWARF_TYPE_CODE_UNION = 2
|
||||
DWARF_TYPE_CODE_PTR = 3
|
||||
DWARF_TYPE_CODE_ARRAY = 4
|
||||
DWARF_TYPE_CODE_ENUM = 5
|
||||
|
||||
# These constants specify the most specific kind of type that could be
|
||||
# determined for a given value.
|
||||
TYPE_KIND_UNKNOWN = -1
|
||||
TYPE_KIND_EMPTY = 0
|
||||
TYPE_KIND_SLICE = 1
|
||||
TYPE_KIND_REGULAR_STRUCT = 2
|
||||
TYPE_KIND_TUPLE = 3
|
||||
TYPE_KIND_TUPLE_STRUCT = 4
|
||||
TYPE_KIND_CSTYLE_VARIANT = 5
|
||||
TYPE_KIND_TUPLE_VARIANT = 6
|
||||
TYPE_KIND_STRUCT_VARIANT = 7
|
||||
TYPE_KIND_STR_SLICE = 8
|
||||
TYPE_KIND_STD_VEC = 9
|
||||
TYPE_KIND_STD_STRING = 10
|
||||
TYPE_KIND_REGULAR_ENUM = 11
|
||||
TYPE_KIND_COMPRESSED_ENUM = 12
|
||||
TYPE_KIND_SINGLETON_ENUM = 13
|
||||
TYPE_KIND_CSTYLE_ENUM = 14
|
||||
TYPE_KIND_PTR = 15
|
||||
TYPE_KIND_FIXED_SIZE_VEC = 16
|
||||
TYPE_KIND_REGULAR_UNION = 17
|
||||
TYPE_KIND_OS_STRING = 18
|
||||
TYPE_KIND_STD_VECDEQUE = 19
|
||||
TYPE_KIND_STD_BTREESET = 20
|
||||
TYPE_KIND_STD_BTREEMAP = 21
|
||||
|
||||
ENCODED_ENUM_PREFIX = "RUST$ENCODED$ENUM$"
|
||||
ENUM_DISR_FIELD_NAME = "RUST$ENUM$DISR"
|
||||
|
||||
# Slice related constants
|
||||
SLICE_FIELD_NAME_DATA_PTR = "data_ptr"
|
||||
SLICE_FIELD_NAME_LENGTH = "length"
|
||||
SLICE_FIELD_NAMES = [SLICE_FIELD_NAME_DATA_PTR, SLICE_FIELD_NAME_LENGTH]
|
||||
|
||||
# std::Vec<> related constants
|
||||
STD_VEC_FIELD_NAME_LENGTH = "len"
|
||||
STD_VEC_FIELD_NAME_BUF = "buf"
|
||||
STD_VEC_FIELD_NAMES = [STD_VEC_FIELD_NAME_BUF,
|
||||
STD_VEC_FIELD_NAME_LENGTH]
|
||||
|
||||
# std::collections::VecDeque<> related constants
|
||||
STD_VECDEQUE_FIELD_NAME_TAIL = "tail"
|
||||
STD_VECDEQUE_FIELD_NAME_HEAD = "head"
|
||||
STD_VECDEQUE_FIELD_NAME_BUF = "buf"
|
||||
STD_VECDEQUE_FIELD_NAMES = [STD_VECDEQUE_FIELD_NAME_TAIL,
|
||||
STD_VECDEQUE_FIELD_NAME_HEAD,
|
||||
STD_VECDEQUE_FIELD_NAME_BUF]
|
||||
|
||||
# std::collections::BTreeSet<> related constants
|
||||
STD_BTREESET_FIELD_NAMES = ["map"]
|
||||
|
||||
# std::collections::BTreeMap<> related constants
|
||||
STD_BTREEMAP_FIELD_NAMES = ["root", "length"]
|
||||
|
||||
# std::String related constants
|
||||
STD_STRING_FIELD_NAMES = ["vec"]
|
||||
|
||||
# std::ffi::OsString related constants
|
||||
OS_STRING_FIELD_NAMES = ["inner"]
|
||||
|
||||
|
||||
class Type(object):
|
||||
"""
|
||||
This class provides a common interface for type-oriented operations.
|
||||
Sub-classes are supposed to wrap a debugger-specific type-object and
|
||||
provide implementations for the abstract methods in this class.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self.__type_kind = None
|
||||
|
||||
def get_unqualified_type_name(self):
|
||||
"""
|
||||
Implementations of this method should return the unqualified name of the
|
||||
type-object they are wrapping. Some examples:
|
||||
|
||||
'int' -> 'int'
|
||||
'std::vec::Vec<std::string::String>' -> 'Vec<std::string::String>'
|
||||
'&std::option::Option<std::string::String>' -> '&std::option::Option<std::string::String>'
|
||||
|
||||
As you can see, type arguments stay fully qualified.
|
||||
"""
|
||||
raise NotImplementedError("Override this method")
|
||||
|
||||
def get_dwarf_type_kind(self):
|
||||
"""
|
||||
Implementations of this method should return the correct
|
||||
DWARF_TYPE_CODE_* value for the wrapped type-object.
|
||||
"""
|
||||
raise NotImplementedError("Override this method")
|
||||
|
||||
def get_fields(self):
|
||||
"""
|
||||
Implementations of this method should return a list of field-objects of
|
||||
this type. For Rust-enums (i.e. with DWARF_TYPE_CODE_UNION) these field-
|
||||
objects represent the variants of the enum. Field-objects must have a
|
||||
`name` attribute that gives their name as specified in DWARF.
|
||||
"""
|
||||
assert ((self.get_dwarf_type_kind() == DWARF_TYPE_CODE_STRUCT) or
|
||||
(self.get_dwarf_type_kind() == DWARF_TYPE_CODE_UNION))
|
||||
raise NotImplementedError("Override this method")
|
||||
|
||||
def get_wrapped_value(self):
|
||||
"""
|
||||
Returns the debugger-specific type-object wrapped by this object. This
|
||||
is sometimes needed for doing things like pointer-arithmetic in GDB.
|
||||
"""
|
||||
raise NotImplementedError("Override this method")
|
||||
|
||||
def get_type_kind(self):
|
||||
"""This method returns the TYPE_KIND_* value for this type-object."""
|
||||
if self.__type_kind is None:
|
||||
dwarf_type_code = self.get_dwarf_type_kind()
|
||||
|
||||
if dwarf_type_code == DWARF_TYPE_CODE_STRUCT:
|
||||
self.__type_kind = self.__classify_struct()
|
||||
elif dwarf_type_code == DWARF_TYPE_CODE_UNION:
|
||||
self.__type_kind = self.__classify_union()
|
||||
elif dwarf_type_code == DWARF_TYPE_CODE_PTR:
|
||||
self.__type_kind = TYPE_KIND_PTR
|
||||
elif dwarf_type_code == DWARF_TYPE_CODE_ARRAY:
|
||||
self.__type_kind = TYPE_KIND_FIXED_SIZE_VEC
|
||||
else:
|
||||
self.__type_kind = TYPE_KIND_UNKNOWN
|
||||
return self.__type_kind
|
||||
|
||||
def __classify_struct(self):
|
||||
assert self.get_dwarf_type_kind() == DWARF_TYPE_CODE_STRUCT
|
||||
|
||||
unqualified_type_name = self.get_unqualified_type_name()
|
||||
|
||||
# STR SLICE
|
||||
if unqualified_type_name == "&str":
|
||||
return TYPE_KIND_STR_SLICE
|
||||
|
||||
# REGULAR SLICE
|
||||
if (unqualified_type_name.startswith(("&[", "&mut [")) and
|
||||
unqualified_type_name.endswith("]") and
|
||||
self.__conforms_to_field_layout(SLICE_FIELD_NAMES)):
|
||||
return TYPE_KIND_SLICE
|
||||
|
||||
fields = self.get_fields()
|
||||
field_count = len(fields)
|
||||
|
||||
# EMPTY STRUCT
|
||||
if field_count == 0:
|
||||
return TYPE_KIND_EMPTY
|
||||
|
||||
# STD VEC
|
||||
if (unqualified_type_name.startswith("Vec<") and
|
||||
self.__conforms_to_field_layout(STD_VEC_FIELD_NAMES)):
|
||||
return TYPE_KIND_STD_VEC
|
||||
|
||||
# STD COLLECTION VECDEQUE
|
||||
if (unqualified_type_name.startswith("VecDeque<") and
|
||||
self.__conforms_to_field_layout(STD_VECDEQUE_FIELD_NAMES)):
|
||||
return TYPE_KIND_STD_VECDEQUE
|
||||
|
||||
# STD COLLECTION BTREESET
|
||||
if (unqualified_type_name.startswith("BTreeSet<") and
|
||||
self.__conforms_to_field_layout(STD_BTREESET_FIELD_NAMES)):
|
||||
return TYPE_KIND_STD_BTREESET
|
||||
|
||||
# STD COLLECTION BTREEMAP
|
||||
if (unqualified_type_name.startswith("BTreeMap<") and
|
||||
self.__conforms_to_field_layout(STD_BTREEMAP_FIELD_NAMES)):
|
||||
return TYPE_KIND_STD_BTREEMAP
|
||||
|
||||
# STD STRING
|
||||
if (unqualified_type_name.startswith("String") and
|
||||
self.__conforms_to_field_layout(STD_STRING_FIELD_NAMES)):
|
||||
return TYPE_KIND_STD_STRING
|
||||
|
||||
# OS STRING
|
||||
if (unqualified_type_name == "OsString" and
|
||||
self.__conforms_to_field_layout(OS_STRING_FIELD_NAMES)):
|
||||
return TYPE_KIND_OS_STRING
|
||||
|
||||
# ENUM VARIANTS
|
||||
if fields[0].name == ENUM_DISR_FIELD_NAME:
|
||||
if field_count == 1:
|
||||
return TYPE_KIND_CSTYLE_VARIANT
|
||||
elif self.__all_fields_conform_to_tuple_field_naming(1):
|
||||
return TYPE_KIND_TUPLE_VARIANT
|
||||
else:
|
||||
return TYPE_KIND_STRUCT_VARIANT
|
||||
|
||||
# TUPLE
|
||||
if self.__all_fields_conform_to_tuple_field_naming(0):
|
||||
if unqualified_type_name.startswith("("):
|
||||
return TYPE_KIND_TUPLE
|
||||
else:
|
||||
return TYPE_KIND_TUPLE_STRUCT
|
||||
|
||||
# REGULAR STRUCT
|
||||
return TYPE_KIND_REGULAR_STRUCT
|
||||
|
||||
def __classify_union(self):
|
||||
assert self.get_dwarf_type_kind() == DWARF_TYPE_CODE_UNION
|
||||
|
||||
union_members = self.get_fields()
|
||||
union_member_count = len(union_members)
|
||||
if union_member_count == 0:
|
||||
return TYPE_KIND_EMPTY
|
||||
|
||||
first_variant_name = union_members[0].name
|
||||
if first_variant_name is None:
|
||||
if union_member_count == 1:
|
||||
return TYPE_KIND_SINGLETON_ENUM
|
||||
else:
|
||||
return TYPE_KIND_REGULAR_ENUM
|
||||
elif first_variant_name.startswith(ENCODED_ENUM_PREFIX):
|
||||
assert union_member_count == 1
|
||||
return TYPE_KIND_COMPRESSED_ENUM
|
||||
else:
|
||||
return TYPE_KIND_REGULAR_UNION
|
||||
|
||||
def __conforms_to_field_layout(self, expected_fields):
|
||||
actual_fields = self.get_fields()
|
||||
actual_field_count = len(actual_fields)
|
||||
|
||||
if actual_field_count != len(expected_fields):
|
||||
return False
|
||||
|
||||
for i in range(0, actual_field_count):
|
||||
if actual_fields[i].name != expected_fields[i]:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def __all_fields_conform_to_tuple_field_naming(self, start_index):
|
||||
fields = self.get_fields()
|
||||
field_count = len(fields)
|
||||
|
||||
for i in range(start_index, field_count):
|
||||
field_name = fields[i].name
|
||||
if (field_name is None) or (re.match(r"__\d+$", field_name) is None):
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
class Value(object):
|
||||
"""
|
||||
This class provides a common interface for value-oriented operations.
|
||||
Sub-classes are supposed to wrap a debugger-specific value-object and
|
||||
provide implementations for the abstract methods in this class.
|
||||
"""
|
||||
def __init__(self, ty):
|
||||
self.type = ty
|
||||
|
||||
def get_child_at_index(self, index):
|
||||
"""Returns the value of the field, array element or variant at the given index"""
|
||||
raise NotImplementedError("Override this method")
|
||||
|
||||
def as_integer(self):
|
||||
"""
|
||||
Try to convert the wrapped value into a Python integer. This should
|
||||
always succeed for values that are pointers or actual integers.
|
||||
"""
|
||||
raise NotImplementedError("Override this method")
|
||||
|
||||
def get_wrapped_value(self):
|
||||
"""
|
||||
Returns the debugger-specific value-object wrapped by this object. This
|
||||
is sometimes needed for doing things like pointer-arithmetic in GDB.
|
||||
"""
|
||||
raise NotImplementedError("Override this method")
|
||||
|
||||
|
||||
class EncodedEnumInfo(object):
|
||||
"""
|
||||
This class provides facilities for handling enum values with compressed
|
||||
encoding where a non-null field in one variant doubles as the discriminant.
|
||||
"""
|
||||
|
||||
def __init__(self, enum_val):
|
||||
assert enum_val.type.get_type_kind() == TYPE_KIND_COMPRESSED_ENUM
|
||||
variant_name = enum_val.type.get_fields()[0].name
|
||||
last_separator_index = variant_name.rfind("$")
|
||||
start_index = len(ENCODED_ENUM_PREFIX)
|
||||
indices_substring = variant_name[start_index:last_separator_index].split("$")
|
||||
self.__enum_val = enum_val
|
||||
self.__disr_field_indices = [int(index) for index in indices_substring]
|
||||
self.__null_variant_name = variant_name[last_separator_index + 1:]
|
||||
|
||||
def is_null_variant(self):
|
||||
ty = self.__enum_val.type
|
||||
sole_variant_val = self.__enum_val.get_child_at_index(0)
|
||||
discriminant_val = sole_variant_val
|
||||
for disr_field_index in self.__disr_field_indices:
|
||||
discriminant_val = discriminant_val.get_child_at_index(disr_field_index)
|
||||
|
||||
# If the discriminant field is a fat pointer we have to consider the
|
||||
# first word as the true discriminant
|
||||
if discriminant_val.type.get_dwarf_type_kind() == DWARF_TYPE_CODE_STRUCT:
|
||||
discriminant_val = discriminant_val.get_child_at_index(0)
|
||||
|
||||
return discriminant_val.as_integer() == 0
|
||||
|
||||
def get_non_null_variant_val(self):
|
||||
return self.__enum_val.get_child_at_index(0)
|
||||
|
||||
def get_null_variant_name(self):
|
||||
return self.__null_variant_name
|
||||
|
||||
|
||||
def get_discriminant_value_as_integer(enum_val):
|
||||
assert enum_val.type.get_dwarf_type_kind() == DWARF_TYPE_CODE_UNION
|
||||
# we can take any variant here because the discriminant has to be the same
|
||||
# for all of them.
|
||||
variant_val = enum_val.get_child_at_index(0)
|
||||
disr_val = variant_val.get_child_at_index(0)
|
||||
return disr_val.as_integer()
|
||||
|
||||
|
||||
def extract_length_ptr_and_cap_from_std_vec(vec_val):
|
||||
assert vec_val.type.get_type_kind() == TYPE_KIND_STD_VEC
|
||||
length_field_index = STD_VEC_FIELD_NAMES.index(STD_VEC_FIELD_NAME_LENGTH)
|
||||
buf_field_index = STD_VEC_FIELD_NAMES.index(STD_VEC_FIELD_NAME_BUF)
|
||||
|
||||
length = vec_val.get_child_at_index(length_field_index).as_integer()
|
||||
buf = vec_val.get_child_at_index(buf_field_index)
|
||||
|
||||
vec_ptr_val = buf.get_child_at_index(0)
|
||||
capacity = buf.get_child_at_index(1).as_integer()
|
||||
data_ptr = vec_ptr_val.get_child_at_index(0)
|
||||
assert data_ptr.type.get_dwarf_type_kind() == DWARF_TYPE_CODE_PTR
|
||||
return (length, data_ptr, capacity)
|
||||
|
||||
|
||||
def extract_tail_head_ptr_and_cap_from_std_vecdeque(vec_val):
|
||||
assert vec_val.type.get_type_kind() == TYPE_KIND_STD_VECDEQUE
|
||||
tail_field_index = STD_VECDEQUE_FIELD_NAMES.index(STD_VECDEQUE_FIELD_NAME_TAIL)
|
||||
head_field_index = STD_VECDEQUE_FIELD_NAMES.index(STD_VECDEQUE_FIELD_NAME_HEAD)
|
||||
buf_field_index = STD_VECDEQUE_FIELD_NAMES.index(STD_VECDEQUE_FIELD_NAME_BUF)
|
||||
|
||||
tail = vec_val.get_child_at_index(tail_field_index).as_integer()
|
||||
head = vec_val.get_child_at_index(head_field_index).as_integer()
|
||||
buf = vec_val.get_child_at_index(buf_field_index)
|
||||
|
||||
vec_ptr_val = buf.get_child_at_index(0)
|
||||
capacity = buf.get_child_at_index(1).as_integer()
|
||||
data_ptr = vec_ptr_val.get_child_at_index(0)
|
||||
assert data_ptr.type.get_dwarf_type_kind() == DWARF_TYPE_CODE_PTR
|
||||
return (tail, head, data_ptr, capacity)
|
||||
|
||||
|
||||
def extract_length_and_ptr_from_slice(slice_val):
|
||||
assert (slice_val.type.get_type_kind() == TYPE_KIND_SLICE or
|
||||
slice_val.type.get_type_kind() == TYPE_KIND_STR_SLICE)
|
||||
|
||||
length_field_index = SLICE_FIELD_NAMES.index(SLICE_FIELD_NAME_LENGTH)
|
||||
ptr_field_index = SLICE_FIELD_NAMES.index(SLICE_FIELD_NAME_DATA_PTR)
|
||||
|
||||
length = slice_val.get_child_at_index(length_field_index).as_integer()
|
||||
data_ptr = slice_val.get_child_at_index(ptr_field_index)
|
||||
|
||||
assert data_ptr.type.get_dwarf_type_kind() == DWARF_TYPE_CODE_PTR
|
||||
return (length, data_ptr)
|
||||
|
||||
|
||||
UNQUALIFIED_TYPE_MARKERS = frozenset(["(", "[", "&", "*"])
|
||||
|
||||
|
||||
def extract_type_name(qualified_type_name):
|
||||
"""Extracts the type name from a fully qualified path"""
|
||||
if qualified_type_name[0] in UNQUALIFIED_TYPE_MARKERS:
|
||||
return qualified_type_name
|
||||
|
||||
end_of_search = qualified_type_name.find("<")
|
||||
if end_of_search < 0:
|
||||
end_of_search = len(qualified_type_name)
|
||||
|
||||
index = qualified_type_name.rfind("::", 0, end_of_search)
|
||||
if index < 0:
|
||||
return qualified_type_name
|
||||
else:
|
||||
return qualified_type_name[index + 2:]
|
||||
|
||||
|
||||
try:
|
||||
compat_str = unicode # Python 2
|
||||
except NameError:
|
||||
compat_str = str
|
@ -1,3 +1,3 @@
|
||||
import gdb
|
||||
import gdb_rust_pretty_printing
|
||||
gdb_rust_pretty_printing.register_printers(gdb.current_objfile())
|
||||
import gdb_lookup
|
||||
gdb_lookup.register_printers(gdb.current_objfile())
|
||||
|
92
src/etc/gdb_lookup.py
Normal file
92
src/etc/gdb_lookup.py
Normal file
@ -0,0 +1,92 @@
|
||||
import gdb
|
||||
import re
|
||||
|
||||
from gdb_providers import *
|
||||
from rust_types import *
|
||||
|
||||
|
||||
rust_enabled = 'set language rust' in gdb.execute('complete set language ru', to_string=True)
|
||||
_gdb_version_matched = re.search('([0-9]+)\\.([0-9]+)', gdb.VERSION)
|
||||
gdb_version = [int(num) for num in _gdb_version_matched.groups()] if _gdb_version_matched else []
|
||||
|
||||
def register_printers(objfile):
|
||||
objfile.pretty_printers.append(lookup)
|
||||
|
||||
|
||||
# BACKCOMPAT: rust 1.35
|
||||
def is_hashbrown_hashmap(hash_map):
|
||||
return len(hash_map.type.fields()) == 1
|
||||
|
||||
|
||||
def classify_rust_type(type):
|
||||
type_class = type.code
|
||||
if type_class == gdb.TYPE_CODE_STRUCT:
|
||||
return classify_struct(type.tag, type.fields())
|
||||
if type_class == gdb.TYPE_CODE_UNION:
|
||||
return classify_union(type.fields())
|
||||
|
||||
return RustType.OTHER
|
||||
|
||||
|
||||
def check_enum_discriminant(valobj):
|
||||
content = valobj[valobj.type.fields()[0]]
|
||||
fields = content.type.fields()
|
||||
if len(fields) > 1:
|
||||
discriminant = int(content[fields[0]]) + 1
|
||||
if discriminant > len(fields):
|
||||
# invalid discriminant
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def lookup(valobj):
|
||||
rust_type = classify_rust_type(valobj.type)
|
||||
|
||||
if rust_type == RustType.ENUM:
|
||||
# use enum provider only for GDB <7.12
|
||||
if gdb_version[0] < 7 or (gdb_version[0] == 7 and gdb_version[1] < 12):
|
||||
if check_enum_discriminant(valobj):
|
||||
return EnumProvider(valobj)
|
||||
|
||||
if rust_type == RustType.STD_STRING:
|
||||
return StdStringProvider(valobj)
|
||||
if rust_type == RustType.STD_OS_STRING:
|
||||
return StdOsStringProvider(valobj)
|
||||
if rust_type == RustType.STD_STR and not rust_enabled:
|
||||
return StdStrProvider(valobj)
|
||||
|
||||
if rust_type == RustType.STD_VEC:
|
||||
return StdVecProvider(valobj)
|
||||
if rust_type == RustType.STD_VEC_DEQUE:
|
||||
return StdVecDequeProvider(valobj)
|
||||
if rust_type == RustType.STD_BTREE_SET:
|
||||
return StdBTreeSetProvider(valobj)
|
||||
if rust_type == RustType.STD_BTREE_MAP:
|
||||
return StdBTreeMapProvider(valobj)
|
||||
if rust_type == RustType.STD_HASH_MAP:
|
||||
if is_hashbrown_hashmap(valobj):
|
||||
return StdHashMapProvider(valobj)
|
||||
else:
|
||||
return StdOldHashMapProvider(valobj)
|
||||
if rust_type == RustType.STD_HASH_SET:
|
||||
hash_map = valobj["map"]
|
||||
if is_hashbrown_hashmap(hash_map):
|
||||
return StdHashMapProvider(hash_map, show_values=False)
|
||||
else:
|
||||
return StdOldHashMapProvider(hash_map, show_values=False)
|
||||
|
||||
if rust_type == RustType.STD_RC:
|
||||
return StdRcProvider(valobj)
|
||||
if rust_type == RustType.STD_ARC:
|
||||
return StdRcProvider(valobj, is_atomic=True)
|
||||
|
||||
if rust_type == RustType.STD_CELL:
|
||||
return StdCellProvider(valobj)
|
||||
if rust_type == RustType.STD_REF:
|
||||
return StdRefProvider(valobj)
|
||||
if rust_type == RustType.STD_REF_MUT:
|
||||
return StdRefProvider(valobj)
|
||||
if rust_type == RustType.STD_REF_CELL:
|
||||
return StdRefCellProvider(valobj)
|
||||
|
||||
return None
|
385
src/etc/gdb_providers.py
Normal file
385
src/etc/gdb_providers.py
Normal file
@ -0,0 +1,385 @@
|
||||
from sys import version_info
|
||||
|
||||
import gdb
|
||||
from gdb import lookup_type
|
||||
|
||||
if version_info[0] >= 3:
|
||||
xrange = range
|
||||
|
||||
ZERO_FIELD = "__0"
|
||||
FIRST_FIELD = "__1"
|
||||
|
||||
|
||||
def unwrap_unique_or_non_null(unique_or_nonnull):
|
||||
# BACKCOMPAT: rust 1.32
|
||||
# https://github.com/rust-lang/rust/commit/7a0911528058e87d22ea305695f4047572c5e067
|
||||
ptr = unique_or_nonnull["pointer"]
|
||||
return ptr if ptr.type.code == gdb.TYPE_CODE_PTR else ptr[ZERO_FIELD]
|
||||
|
||||
|
||||
class EnumProvider:
|
||||
def __init__(self, valobj):
|
||||
content = valobj[valobj.type.fields()[0]]
|
||||
fields = content.type.fields()
|
||||
self.empty = len(fields) == 0
|
||||
if not self.empty:
|
||||
if len(fields) == 1:
|
||||
discriminant = 0
|
||||
else:
|
||||
discriminant = int(content[fields[0]]) + 1
|
||||
self.active_variant = content[fields[discriminant]]
|
||||
self.name = fields[discriminant].name
|
||||
self.full_name = "{}::{}".format(valobj.type.name, self.name)
|
||||
else:
|
||||
self.full_name = valobj.type.name
|
||||
|
||||
def to_string(self):
|
||||
return self.full_name
|
||||
|
||||
def children(self):
|
||||
if not self.empty:
|
||||
yield self.name, self.active_variant
|
||||
|
||||
|
||||
class StdStringProvider:
|
||||
def __init__(self, valobj):
|
||||
self.valobj = valobj
|
||||
vec = valobj["vec"]
|
||||
self.length = int(vec["len"])
|
||||
self.data_ptr = unwrap_unique_or_non_null(vec["buf"]["ptr"])
|
||||
|
||||
def to_string(self):
|
||||
return self.data_ptr.lazy_string(encoding="utf-8", length=self.length)
|
||||
|
||||
@staticmethod
|
||||
def display_hint():
|
||||
return "string"
|
||||
|
||||
|
||||
class StdOsStringProvider:
|
||||
def __init__(self, valobj):
|
||||
self.valobj = valobj
|
||||
buf = self.valobj["inner"]["inner"]
|
||||
is_windows = "Wtf8Buf" in buf.type.name
|
||||
vec = buf[ZERO_FIELD] if is_windows else buf
|
||||
|
||||
self.length = int(vec["len"])
|
||||
self.data_ptr = unwrap_unique_or_non_null(vec["buf"]["ptr"])
|
||||
|
||||
def to_string(self):
|
||||
return self.data_ptr.lazy_string(encoding="utf-8", length=self.length)
|
||||
|
||||
def display_hint(self):
|
||||
return "string"
|
||||
|
||||
|
||||
class StdStrProvider:
|
||||
def __init__(self, valobj):
|
||||
self.valobj = valobj
|
||||
self.length = int(valobj["length"])
|
||||
self.data_ptr = valobj["data_ptr"]
|
||||
|
||||
def to_string(self):
|
||||
return self.data_ptr.lazy_string(encoding="utf-8", length=self.length)
|
||||
|
||||
@staticmethod
|
||||
def display_hint():
|
||||
return "string"
|
||||
|
||||
|
||||
class StdVecProvider:
|
||||
def __init__(self, valobj):
|
||||
self.valobj = valobj
|
||||
self.length = int(valobj["len"])
|
||||
self.data_ptr = unwrap_unique_or_non_null(valobj["buf"]["ptr"])
|
||||
|
||||
def to_string(self):
|
||||
return "Vec(size={})".format(self.length)
|
||||
|
||||
def children(self):
|
||||
saw_inaccessible = False
|
||||
for index in xrange(self.length):
|
||||
element_ptr = self.data_ptr + index
|
||||
if saw_inaccessible:
|
||||
return
|
||||
try:
|
||||
# rust-lang/rust#64343: passing deref expr to `str` allows
|
||||
# catching exception on garbage pointer
|
||||
str(element_ptr.dereference())
|
||||
yield "[{}]".format(index), element_ptr.dereference()
|
||||
except RuntimeError:
|
||||
saw_inaccessible = True
|
||||
yield str(index), "inaccessible"
|
||||
|
||||
@staticmethod
|
||||
def display_hint():
|
||||
return "array"
|
||||
|
||||
|
||||
class StdVecDequeProvider:
|
||||
def __init__(self, valobj):
|
||||
self.valobj = valobj
|
||||
self.head = int(valobj["head"])
|
||||
self.tail = int(valobj["tail"])
|
||||
self.cap = int(valobj["buf"]["cap"])
|
||||
self.data_ptr = unwrap_unique_or_non_null(valobj["buf"]["ptr"])
|
||||
if self.head >= self.tail:
|
||||
self.size = self.head - self.tail
|
||||
else:
|
||||
self.size = self.cap + self.head - self.tail
|
||||
|
||||
def to_string(self):
|
||||
return "VecDeque(size={})".format(self.size)
|
||||
|
||||
def children(self):
|
||||
for index in xrange(0, self.size):
|
||||
value = (self.data_ptr + ((self.tail + index) % self.cap)).dereference()
|
||||
yield "[{}]".format(index), value
|
||||
|
||||
@staticmethod
|
||||
def display_hint():
|
||||
return "array"
|
||||
|
||||
|
||||
class StdRcProvider:
|
||||
def __init__(self, valobj, is_atomic=False):
|
||||
self.valobj = valobj
|
||||
self.is_atomic = is_atomic
|
||||
self.ptr = unwrap_unique_or_non_null(valobj["ptr"])
|
||||
self.value = self.ptr["data" if is_atomic else "value"]
|
||||
self.strong = self.ptr["strong"]["v" if is_atomic else "value"]["value"]
|
||||
self.weak = self.ptr["weak"]["v" if is_atomic else "value"]["value"] - 1
|
||||
|
||||
def to_string(self):
|
||||
if self.is_atomic:
|
||||
return "Arc(strong={}, weak={})".format(int(self.strong), int(self.weak))
|
||||
else:
|
||||
return "Rc(strong={}, weak={})".format(int(self.strong), int(self.weak))
|
||||
|
||||
def children(self):
|
||||
yield "value", self.value
|
||||
yield "strong", self.strong
|
||||
yield "weak", self.weak
|
||||
|
||||
|
||||
class StdCellProvider:
|
||||
def __init__(self, valobj):
|
||||
self.value = valobj["value"]["value"]
|
||||
|
||||
def to_string(self):
|
||||
return "Cell"
|
||||
|
||||
def children(self):
|
||||
yield "value", self.value
|
||||
|
||||
|
||||
class StdRefProvider:
|
||||
def __init__(self, valobj):
|
||||
self.value = valobj["value"].dereference()
|
||||
self.borrow = valobj["borrow"]["borrow"]["value"]["value"]
|
||||
|
||||
def to_string(self):
|
||||
borrow = int(self.borrow)
|
||||
if borrow >= 0:
|
||||
return "Ref(borrow={})".format(borrow)
|
||||
else:
|
||||
return "Ref(borrow_mut={})".format(-borrow)
|
||||
|
||||
def children(self):
|
||||
yield "*value", self.value
|
||||
yield "borrow", self.borrow
|
||||
|
||||
|
||||
class StdRefCellProvider:
|
||||
def __init__(self, valobj):
|
||||
self.value = valobj["value"]["value"]
|
||||
self.borrow = valobj["borrow"]["value"]["value"]
|
||||
|
||||
def to_string(self):
|
||||
borrow = int(self.borrow)
|
||||
if borrow >= 0:
|
||||
return "RefCell(borrow={})".format(borrow)
|
||||
else:
|
||||
return "RefCell(borrow_mut={})".format(-borrow)
|
||||
|
||||
def children(self):
|
||||
yield "value", self.value
|
||||
yield "borrow", self.borrow
|
||||
|
||||
|
||||
# Yield each key (and optionally value) from a BoxedNode.
|
||||
def children_of_node(boxed_node, height, want_values):
|
||||
def cast_to_internal(node):
|
||||
internal_type_name = str(node.type.target()).replace("LeafNode", "InternalNode", 1)
|
||||
internal_type = lookup_type(internal_type_name)
|
||||
return node.cast(internal_type.pointer())
|
||||
|
||||
node_ptr = unwrap_unique_or_non_null(boxed_node["ptr"])
|
||||
node_ptr = cast_to_internal(node_ptr) if height > 0 else node_ptr
|
||||
leaf = node_ptr["data"] if height > 0 else node_ptr.dereference()
|
||||
keys = leaf["keys"]
|
||||
values = leaf["vals"]
|
||||
length = int(leaf["len"])
|
||||
|
||||
for i in xrange(0, length + 1):
|
||||
if height > 0:
|
||||
child_ptr = node_ptr["edges"][i]["value"]["value"]
|
||||
for child in children_of_node(child_ptr, height - 1, want_values):
|
||||
yield child
|
||||
if i < length:
|
||||
if want_values:
|
||||
yield keys[i]["value"]["value"], values[i]["value"]["value"]
|
||||
else:
|
||||
yield keys[i]["value"]["value"]
|
||||
|
||||
|
||||
class StdBTreeSetProvider:
|
||||
def __init__(self, valobj):
|
||||
self.valobj = valobj
|
||||
|
||||
def to_string(self):
|
||||
return "BTreeSet(size={})".format(self.valobj["map"]["length"])
|
||||
|
||||
def children(self):
|
||||
inner_map = self.valobj["map"]
|
||||
if inner_map["length"] > 0:
|
||||
root = inner_map["root"]
|
||||
if "core::option::Option<" in root.type.name:
|
||||
type_name = str(root.type.name).replace("core::option::Option<", "", 1)[:-1]
|
||||
root = root.cast(gdb.lookup_type(type_name))
|
||||
|
||||
node_ptr = root["node"]
|
||||
for i, child in enumerate(children_of_node(node_ptr, root["height"], False)):
|
||||
yield "[{}]".format(i), child
|
||||
|
||||
@staticmethod
|
||||
def display_hint():
|
||||
return "array"
|
||||
|
||||
|
||||
class StdBTreeMapProvider:
|
||||
def __init__(self, valobj):
|
||||
self.valobj = valobj
|
||||
|
||||
def to_string(self):
|
||||
return "BTreeMap(size={})".format(self.valobj["length"])
|
||||
|
||||
def children(self):
|
||||
if self.valobj["length"] > 0:
|
||||
root = self.valobj["root"]
|
||||
if "core::option::Option<" in root.type.name:
|
||||
type_name = str(root.type.name).replace("core::option::Option<", "", 1)[:-1]
|
||||
root = root.cast(gdb.lookup_type(type_name))
|
||||
|
||||
node_ptr = root["node"]
|
||||
for i, child in enumerate(children_of_node(node_ptr, root["height"], True)):
|
||||
yield "key{}".format(i), child[0]
|
||||
yield "val{}".format(i), child[1]
|
||||
|
||||
@staticmethod
|
||||
def display_hint():
|
||||
return "map"
|
||||
|
||||
|
||||
# BACKCOMPAT: rust 1.35
|
||||
class StdOldHashMapProvider:
|
||||
def __init__(self, valobj, show_values=True):
|
||||
self.valobj = valobj
|
||||
self.show_values = show_values
|
||||
|
||||
self.table = self.valobj["table"]
|
||||
self.size = int(self.table["size"])
|
||||
self.hashes = self.table["hashes"]
|
||||
self.hash_uint_type = self.hashes.type
|
||||
self.hash_uint_size = self.hashes.type.sizeof
|
||||
self.modulo = 2 ** self.hash_uint_size
|
||||
self.data_ptr = self.hashes[ZERO_FIELD]["pointer"]
|
||||
|
||||
self.capacity_mask = int(self.table["capacity_mask"])
|
||||
self.capacity = (self.capacity_mask + 1) % self.modulo
|
||||
|
||||
marker = self.table["marker"].type
|
||||
self.pair_type = marker.template_argument(0)
|
||||
self.pair_type_size = self.pair_type.sizeof
|
||||
|
||||
self.valid_indices = []
|
||||
for idx in range(self.capacity):
|
||||
data_ptr = self.data_ptr.cast(self.hash_uint_type.pointer())
|
||||
address = data_ptr + idx
|
||||
hash_uint = address.dereference()
|
||||
hash_ptr = hash_uint[ZERO_FIELD]["pointer"]
|
||||
if int(hash_ptr) != 0:
|
||||
self.valid_indices.append(idx)
|
||||
|
||||
def to_string(self):
|
||||
if self.show_values:
|
||||
return "HashMap(size={})".format(self.size)
|
||||
else:
|
||||
return "HashSet(size={})".format(self.size)
|
||||
|
||||
def children(self):
|
||||
start = int(self.data_ptr) & ~1
|
||||
|
||||
hashes = self.hash_uint_size * self.capacity
|
||||
align = self.pair_type_size
|
||||
len_rounded_up = (((((hashes + align) % self.modulo - 1) % self.modulo) & ~(
|
||||
(align - 1) % self.modulo)) % self.modulo - hashes) % self.modulo
|
||||
|
||||
pairs_offset = hashes + len_rounded_up
|
||||
pairs_start = gdb.Value(start + pairs_offset).cast(self.pair_type.pointer())
|
||||
|
||||
for index in range(self.size):
|
||||
table_index = self.valid_indices[index]
|
||||
idx = table_index & self.capacity_mask
|
||||
element = (pairs_start + idx).dereference()
|
||||
if self.show_values:
|
||||
yield "key{}".format(index), element[ZERO_FIELD]
|
||||
yield "val{}".format(index), element[FIRST_FIELD]
|
||||
else:
|
||||
yield "[{}]".format(index), element[ZERO_FIELD]
|
||||
|
||||
def display_hint(self):
|
||||
return "map" if self.show_values else "array"
|
||||
|
||||
|
||||
class StdHashMapProvider:
|
||||
def __init__(self, valobj, show_values=True):
|
||||
self.valobj = valobj
|
||||
self.show_values = show_values
|
||||
|
||||
table = self.valobj["base"]["table"]
|
||||
capacity = int(table["bucket_mask"]) + 1
|
||||
ctrl = table["ctrl"]["pointer"]
|
||||
|
||||
self.size = int(table["items"])
|
||||
self.data_ptr = table["data"]["pointer"]
|
||||
self.pair_type = self.data_ptr.dereference().type
|
||||
|
||||
self.valid_indices = []
|
||||
for idx in range(capacity):
|
||||
address = ctrl + idx
|
||||
value = address.dereference()
|
||||
is_presented = value & 128 == 0
|
||||
if is_presented:
|
||||
self.valid_indices.append(idx)
|
||||
|
||||
def to_string(self):
|
||||
if self.show_values:
|
||||
return "HashMap(size={})".format(self.size)
|
||||
else:
|
||||
return "HashSet(size={})".format(self.size)
|
||||
|
||||
def children(self):
|
||||
pairs_start = self.data_ptr
|
||||
|
||||
for index in range(self.size):
|
||||
idx = self.valid_indices[index]
|
||||
element = (pairs_start + idx).dereference()
|
||||
if self.show_values:
|
||||
yield "key{}".format(index), element[ZERO_FIELD]
|
||||
yield "val{}".format(index), element[FIRST_FIELD]
|
||||
else:
|
||||
yield "[{}]".format(index), element[ZERO_FIELD]
|
||||
|
||||
def display_hint(self):
|
||||
return "map" if self.show_values else "array"
|
@ -1,466 +0,0 @@
|
||||
import gdb
|
||||
import re
|
||||
import sys
|
||||
import debugger_pretty_printers_common as rustpp
|
||||
|
||||
# We want a version of `range` which doesn't allocate an intermediate list,
|
||||
# specifically it should use a lazy iterator. In Python 2 this was `xrange`, but
|
||||
# if we're running with Python 3 then we need to use `range` instead.
|
||||
if sys.version_info[0] >= 3:
|
||||
xrange = range
|
||||
|
||||
rust_enabled = 'set language rust' in gdb.execute('complete set language ru', to_string=True)
|
||||
|
||||
# The btree pretty-printers fail in a confusing way unless
|
||||
# https://sourceware.org/bugzilla/show_bug.cgi?id=21763 is fixed.
|
||||
# This fix went in 8.1, so check for that.
|
||||
# See https://github.com/rust-lang/rust/issues/56730
|
||||
gdb_81 = False
|
||||
_match = re.search('([0-9]+)\\.([0-9]+)', gdb.VERSION)
|
||||
if _match:
|
||||
if int(_match.group(1)) > 8 or (int(_match.group(1)) == 8 and int(_match.group(2)) >= 1):
|
||||
gdb_81 = True
|
||||
|
||||
# ===============================================================================
|
||||
# GDB Pretty Printing Module for Rust
|
||||
# ===============================================================================
|
||||
|
||||
|
||||
class GdbType(rustpp.Type):
|
||||
|
||||
def __init__(self, ty):
|
||||
super(GdbType, self).__init__()
|
||||
self.ty = ty
|
||||
self.fields = None
|
||||
|
||||
def get_unqualified_type_name(self):
|
||||
tag = self.ty.tag
|
||||
|
||||
if tag is None:
|
||||
return tag
|
||||
|
||||
return rustpp.extract_type_name(tag).replace("&'static ", "&")
|
||||
|
||||
def get_dwarf_type_kind(self):
|
||||
if self.ty.code == gdb.TYPE_CODE_STRUCT:
|
||||
return rustpp.DWARF_TYPE_CODE_STRUCT
|
||||
|
||||
if self.ty.code == gdb.TYPE_CODE_UNION:
|
||||
return rustpp.DWARF_TYPE_CODE_UNION
|
||||
|
||||
if self.ty.code == gdb.TYPE_CODE_PTR:
|
||||
return rustpp.DWARF_TYPE_CODE_PTR
|
||||
|
||||
if self.ty.code == gdb.TYPE_CODE_ENUM:
|
||||
return rustpp.DWARF_TYPE_CODE_ENUM
|
||||
|
||||
def get_fields(self):
|
||||
assert ((self.get_dwarf_type_kind() == rustpp.DWARF_TYPE_CODE_STRUCT) or
|
||||
(self.get_dwarf_type_kind() == rustpp.DWARF_TYPE_CODE_UNION))
|
||||
if self.fields is None:
|
||||
self.fields = list(self.ty.fields())
|
||||
return self.fields
|
||||
|
||||
def get_wrapped_value(self):
|
||||
return self.ty
|
||||
|
||||
|
||||
class GdbValue(rustpp.Value):
|
||||
def __init__(self, gdb_val):
|
||||
super(GdbValue, self).__init__(GdbType(gdb_val.type))
|
||||
self.gdb_val = gdb_val
|
||||
self.children = {}
|
||||
|
||||
def get_child_at_index(self, index):
|
||||
child = self.children.get(index)
|
||||
if child is None:
|
||||
gdb_field = get_field_at_index(self.gdb_val, index)
|
||||
child = GdbValue(self.gdb_val[gdb_field])
|
||||
self.children[index] = child
|
||||
return child
|
||||
|
||||
def as_integer(self):
|
||||
if self.gdb_val.type.code == gdb.TYPE_CODE_PTR:
|
||||
as_str = rustpp.compat_str(self.gdb_val).split()[0]
|
||||
return int(as_str, 0)
|
||||
return int(self.gdb_val)
|
||||
|
||||
def get_wrapped_value(self):
|
||||
return self.gdb_val
|
||||
|
||||
|
||||
def register_printers(objfile):
|
||||
"""Registers Rust pretty printers for the given objfile"""
|
||||
objfile.pretty_printers.append(rust_pretty_printer_lookup_function)
|
||||
|
||||
|
||||
def rust_pretty_printer_lookup_function(gdb_val):
|
||||
"""
|
||||
Returns the correct Rust pretty printer for the given value
|
||||
if there is one
|
||||
"""
|
||||
|
||||
val = GdbValue(gdb_val)
|
||||
type_kind = val.type.get_type_kind()
|
||||
|
||||
if type_kind == rustpp.TYPE_KIND_SLICE:
|
||||
return RustSlicePrinter(val)
|
||||
|
||||
if type_kind == rustpp.TYPE_KIND_STD_VEC:
|
||||
return RustStdVecPrinter(val)
|
||||
|
||||
if type_kind == rustpp.TYPE_KIND_STD_VECDEQUE:
|
||||
return RustStdVecDequePrinter(val)
|
||||
|
||||
if type_kind == rustpp.TYPE_KIND_STD_BTREESET and gdb_81:
|
||||
return RustStdBTreeSetPrinter(val)
|
||||
|
||||
if type_kind == rustpp.TYPE_KIND_STD_BTREEMAP and gdb_81:
|
||||
return RustStdBTreeMapPrinter(val)
|
||||
|
||||
if type_kind == rustpp.TYPE_KIND_STD_STRING:
|
||||
return RustStdStringPrinter(val)
|
||||
|
||||
if type_kind == rustpp.TYPE_KIND_OS_STRING:
|
||||
return RustOsStringPrinter(val)
|
||||
|
||||
# Checks after this point should only be for "compiler" types --
|
||||
# things that gdb's Rust language support knows about.
|
||||
if rust_enabled:
|
||||
return None
|
||||
|
||||
if type_kind == rustpp.TYPE_KIND_EMPTY:
|
||||
return RustEmptyPrinter(val)
|
||||
|
||||
if type_kind == rustpp.TYPE_KIND_REGULAR_STRUCT:
|
||||
return RustStructPrinter(val,
|
||||
omit_first_field=False,
|
||||
omit_type_name=False,
|
||||
is_tuple_like=False)
|
||||
|
||||
if type_kind == rustpp.TYPE_KIND_STRUCT_VARIANT:
|
||||
return RustStructPrinter(val,
|
||||
omit_first_field=True,
|
||||
omit_type_name=False,
|
||||
is_tuple_like=False)
|
||||
|
||||
if type_kind == rustpp.TYPE_KIND_STR_SLICE:
|
||||
return RustStringSlicePrinter(val)
|
||||
|
||||
if type_kind == rustpp.TYPE_KIND_TUPLE:
|
||||
return RustStructPrinter(val,
|
||||
omit_first_field=False,
|
||||
omit_type_name=True,
|
||||
is_tuple_like=True)
|
||||
|
||||
if type_kind == rustpp.TYPE_KIND_TUPLE_STRUCT:
|
||||
return RustStructPrinter(val,
|
||||
omit_first_field=False,
|
||||
omit_type_name=False,
|
||||
is_tuple_like=True)
|
||||
|
||||
if type_kind == rustpp.TYPE_KIND_CSTYLE_VARIANT:
|
||||
return RustCStyleVariantPrinter(val.get_child_at_index(0))
|
||||
|
||||
if type_kind == rustpp.TYPE_KIND_TUPLE_VARIANT:
|
||||
return RustStructPrinter(val,
|
||||
omit_first_field=True,
|
||||
omit_type_name=False,
|
||||
is_tuple_like=True)
|
||||
|
||||
if type_kind == rustpp.TYPE_KIND_SINGLETON_ENUM:
|
||||
variant = get_field_at_index(gdb_val, 0)
|
||||
return rust_pretty_printer_lookup_function(gdb_val[variant])
|
||||
|
||||
if type_kind == rustpp.TYPE_KIND_REGULAR_ENUM:
|
||||
# This is a regular enum, extract the discriminant
|
||||
discriminant_val = rustpp.get_discriminant_value_as_integer(val)
|
||||
variant = get_field_at_index(gdb_val, discriminant_val)
|
||||
return rust_pretty_printer_lookup_function(gdb_val[variant])
|
||||
|
||||
if type_kind == rustpp.TYPE_KIND_COMPRESSED_ENUM:
|
||||
encoded_enum_info = rustpp.EncodedEnumInfo(val)
|
||||
if encoded_enum_info.is_null_variant():
|
||||
return IdentityPrinter(encoded_enum_info.get_null_variant_name())
|
||||
|
||||
non_null_val = encoded_enum_info.get_non_null_variant_val()
|
||||
return rust_pretty_printer_lookup_function(non_null_val.get_wrapped_value())
|
||||
|
||||
# No pretty printer has been found
|
||||
return None
|
||||
|
||||
|
||||
# =------------------------------------------------------------------------------
|
||||
# Pretty Printer Classes
|
||||
# =------------------------------------------------------------------------------
|
||||
class RustEmptyPrinter(object):
|
||||
def __init__(self, val):
|
||||
self.__val = val
|
||||
|
||||
def to_string(self):
|
||||
return self.__val.type.get_unqualified_type_name()
|
||||
|
||||
|
||||
class RustStructPrinter(object):
|
||||
def __init__(self, val, omit_first_field, omit_type_name, is_tuple_like):
|
||||
self.__val = val
|
||||
self.__omit_first_field = omit_first_field
|
||||
self.__omit_type_name = omit_type_name
|
||||
self.__is_tuple_like = is_tuple_like
|
||||
|
||||
def to_string(self):
|
||||
if self.__omit_type_name:
|
||||
return None
|
||||
return self.__val.type.get_unqualified_type_name()
|
||||
|
||||
def children(self):
|
||||
cs = []
|
||||
wrapped_value = self.__val.get_wrapped_value()
|
||||
|
||||
for number, field in enumerate(self.__val.type.get_fields()):
|
||||
field_value = wrapped_value[field.name]
|
||||
if self.__is_tuple_like:
|
||||
cs.append((str(number), field_value))
|
||||
else:
|
||||
cs.append((field.name, field_value))
|
||||
|
||||
if self.__omit_first_field:
|
||||
cs = cs[1:]
|
||||
|
||||
return cs
|
||||
|
||||
def display_hint(self):
|
||||
if self.__is_tuple_like:
|
||||
return "array"
|
||||
else:
|
||||
return ""
|
||||
|
||||
|
||||
class RustSlicePrinter(object):
|
||||
def __init__(self, val):
|
||||
self.__val = val
|
||||
|
||||
@staticmethod
|
||||
def display_hint():
|
||||
return "array"
|
||||
|
||||
def to_string(self):
|
||||
(length, data_ptr) = rustpp.extract_length_and_ptr_from_slice(self.__val)
|
||||
return (self.__val.type.get_unqualified_type_name() +
|
||||
("(len: %i)" % length))
|
||||
|
||||
def children(self):
|
||||
(length, data_ptr) = rustpp.extract_length_and_ptr_from_slice(self.__val)
|
||||
assert data_ptr.type.get_dwarf_type_kind() == rustpp.DWARF_TYPE_CODE_PTR
|
||||
raw_ptr = data_ptr.get_wrapped_value()
|
||||
|
||||
for index in xrange(0, length):
|
||||
yield (str(index), (raw_ptr + index).dereference())
|
||||
|
||||
|
||||
class RustStringSlicePrinter(object):
|
||||
def __init__(self, val):
|
||||
self.__val = val
|
||||
|
||||
def to_string(self):
|
||||
(length, data_ptr) = rustpp.extract_length_and_ptr_from_slice(self.__val)
|
||||
raw_ptr = data_ptr.get_wrapped_value()
|
||||
return raw_ptr.lazy_string(encoding="utf-8", length=length)
|
||||
|
||||
def display_hint(self):
|
||||
return "string"
|
||||
|
||||
|
||||
class RustStdVecPrinter(object):
|
||||
def __init__(self, val):
|
||||
self.__val = val
|
||||
|
||||
@staticmethod
|
||||
def display_hint():
|
||||
return "array"
|
||||
|
||||
def to_string(self):
|
||||
(length, data_ptr, cap) = rustpp.extract_length_ptr_and_cap_from_std_vec(self.__val)
|
||||
return (self.__val.type.get_unqualified_type_name() +
|
||||
("(len: %i, cap: %i)" % (length, cap)))
|
||||
|
||||
def children(self):
|
||||
saw_inaccessible = False
|
||||
(length, data_ptr, cap) = rustpp.extract_length_ptr_and_cap_from_std_vec(self.__val)
|
||||
gdb_ptr = data_ptr.get_wrapped_value()
|
||||
for index in xrange(0, length):
|
||||
if saw_inaccessible:
|
||||
return
|
||||
try:
|
||||
# rust-lang/rust#64343: passing deref expr to `str` allows
|
||||
# catching exception on garbage pointer
|
||||
str((gdb_ptr + index).dereference())
|
||||
yield (str(index), (gdb_ptr + index).dereference())
|
||||
except RuntimeError:
|
||||
saw_inaccessible = True
|
||||
yield (str(index), "inaccessible")
|
||||
|
||||
|
||||
class RustStdVecDequePrinter(object):
|
||||
def __init__(self, val):
|
||||
self.__val = val
|
||||
|
||||
@staticmethod
|
||||
def display_hint():
|
||||
return "array"
|
||||
|
||||
def to_string(self):
|
||||
(tail, head, data_ptr, cap) = \
|
||||
rustpp.extract_tail_head_ptr_and_cap_from_std_vecdeque(self.__val)
|
||||
if head >= tail:
|
||||
size = head - tail
|
||||
else:
|
||||
size = cap + head - tail
|
||||
return (self.__val.type.get_unqualified_type_name() +
|
||||
("(len: %i, cap: %i)" % (size, cap)))
|
||||
|
||||
def children(self):
|
||||
(tail, head, data_ptr, cap) = \
|
||||
rustpp.extract_tail_head_ptr_and_cap_from_std_vecdeque(self.__val)
|
||||
gdb_ptr = data_ptr.get_wrapped_value()
|
||||
if head >= tail:
|
||||
size = head - tail
|
||||
else:
|
||||
size = cap + head - tail
|
||||
for index in xrange(0, size):
|
||||
yield (str(index), (gdb_ptr + ((tail + index) % cap)).dereference())
|
||||
|
||||
|
||||
# Yield each key (and optionally value) from a BoxedNode.
|
||||
def children_of_node(boxed_node, height, want_values):
|
||||
node_ptr = boxed_node['ptr']['pointer']
|
||||
if height > 0:
|
||||
type_name = str(node_ptr.type.target()).replace('LeafNode', 'InternalNode', 1)
|
||||
node_type = gdb.lookup_type(type_name)
|
||||
node_ptr = node_ptr.cast(node_type.pointer())
|
||||
leaf = node_ptr['data']
|
||||
else:
|
||||
leaf = node_ptr.dereference()
|
||||
keys = leaf['keys']
|
||||
if want_values:
|
||||
values = leaf['vals']
|
||||
length = int(leaf['len'])
|
||||
for i in xrange(0, length + 1):
|
||||
if height > 0:
|
||||
child_ptr = node_ptr['edges'][i]['value']['value']
|
||||
for child in children_of_node(child_ptr, height - 1, want_values):
|
||||
yield child
|
||||
if i < length:
|
||||
if want_values:
|
||||
yield (keys[i]['value']['value'], values[i]['value']['value'])
|
||||
else:
|
||||
yield keys[i]['value']['value']
|
||||
|
||||
|
||||
class RustStdBTreeSetPrinter(object):
|
||||
def __init__(self, val):
|
||||
self.__val = val
|
||||
|
||||
@staticmethod
|
||||
def display_hint():
|
||||
return "array"
|
||||
|
||||
def to_string(self):
|
||||
return (self.__val.type.get_unqualified_type_name() +
|
||||
("(len: %i)" % self.__val.get_wrapped_value()['map']['length']))
|
||||
|
||||
def children(self):
|
||||
prev_idx = None
|
||||
innermap = GdbValue(self.__val.get_wrapped_value()['map'])
|
||||
if innermap.get_wrapped_value()['length'] > 0:
|
||||
root = GdbValue(innermap.get_wrapped_value()['root'])
|
||||
type_name = str(root.type.ty.name).replace('core::option::Option<', '', 1)[:-1]
|
||||
root = root.get_wrapped_value().cast(gdb.lookup_type(type_name))
|
||||
node_ptr = root['node']
|
||||
i = 0
|
||||
for child in children_of_node(node_ptr, root['height'], False):
|
||||
yield (str(i), child)
|
||||
i = i + 1
|
||||
|
||||
|
||||
class RustStdBTreeMapPrinter(object):
|
||||
def __init__(self, val):
|
||||
self.__val = val
|
||||
|
||||
@staticmethod
|
||||
def display_hint():
|
||||
return "map"
|
||||
|
||||
def to_string(self):
|
||||
return (self.__val.type.get_unqualified_type_name() +
|
||||
("(len: %i)" % self.__val.get_wrapped_value()['length']))
|
||||
|
||||
def children(self):
|
||||
if self.__val.get_wrapped_value()['length'] > 0:
|
||||
root = GdbValue(self.__val.get_wrapped_value()['root'])
|
||||
type_name = str(root.type.ty.name).replace('core::option::Option<', '', 1)[:-1]
|
||||
root = root.get_wrapped_value().cast(gdb.lookup_type(type_name))
|
||||
node_ptr = root['node']
|
||||
i = 0
|
||||
for child in children_of_node(node_ptr, root['height'], True):
|
||||
yield (str(i), child[0])
|
||||
yield (str(i), child[1])
|
||||
i = i + 1
|
||||
|
||||
|
||||
class RustStdStringPrinter(object):
|
||||
def __init__(self, val):
|
||||
self.__val = val
|
||||
|
||||
def to_string(self):
|
||||
vec = self.__val.get_child_at_index(0)
|
||||
(length, data_ptr, cap) = rustpp.extract_length_ptr_and_cap_from_std_vec(vec)
|
||||
return data_ptr.get_wrapped_value().lazy_string(encoding="utf-8",
|
||||
length=length)
|
||||
|
||||
def display_hint(self):
|
||||
return "string"
|
||||
|
||||
|
||||
class RustOsStringPrinter(object):
|
||||
def __init__(self, val):
|
||||
self.__val = val
|
||||
|
||||
def to_string(self):
|
||||
buf = self.__val.get_child_at_index(0)
|
||||
vec = buf.get_child_at_index(0)
|
||||
if vec.type.get_unqualified_type_name() == "Wtf8Buf":
|
||||
vec = vec.get_child_at_index(0)
|
||||
|
||||
(length, data_ptr, cap) = rustpp.extract_length_ptr_and_cap_from_std_vec(
|
||||
vec)
|
||||
return data_ptr.get_wrapped_value().lazy_string(length=length)
|
||||
|
||||
def display_hint(self):
|
||||
return "string"
|
||||
|
||||
|
||||
class RustCStyleVariantPrinter(object):
|
||||
def __init__(self, val):
|
||||
assert val.type.get_dwarf_type_kind() == rustpp.DWARF_TYPE_CODE_ENUM
|
||||
self.__val = val
|
||||
|
||||
def to_string(self):
|
||||
return str(self.__val.get_wrapped_value())
|
||||
|
||||
|
||||
class IdentityPrinter(object):
|
||||
def __init__(self, string):
|
||||
self.string = string
|
||||
|
||||
def to_string(self):
|
||||
return self.string
|
||||
|
||||
|
||||
def get_field_at_index(gdb_val, index):
|
||||
i = 0
|
||||
for field in gdb_val.type.fields():
|
||||
if i == index:
|
||||
return field
|
||||
i += 1
|
||||
return None
|
19
src/etc/lldb_commands
Normal file
19
src/etc/lldb_commands
Normal file
@ -0,0 +1,19 @@
|
||||
command script import \"$RUSTC_SYSROOT/lib/rustlib/etc/lldb_lookup.py\"
|
||||
type synthetic add -l lldb_lookup.synthetic_lookup -x \".*\" --category Rust
|
||||
type summary add -F lldb_lookup.summary_lookup -e -x -h \"^(alloc::([a-z_]+::)+)String$\" --category Rust
|
||||
type summary add -F lldb_lookup.summary_lookup -e -x -h \"^&str$\" --category Rust
|
||||
type summary add -F lldb_lookup.summary_lookup -e -x -h \"^&\\[.+\\]$\" --category Rust
|
||||
type summary add -F lldb_lookup.summary_lookup -e -x -h \"^(std::ffi::([a-z_]+::)+)OsString$\" --category Rust
|
||||
type summary add -F lldb_lookup.summary_lookup -e -x -h \"^(alloc::([a-z_]+::)+)Vec<.+>$\" --category Rust
|
||||
type summary add -F lldb_lookup.summary_lookup -e -x -h \"^(alloc::([a-z_]+::)+)VecDeque<.+>$\" --category Rust
|
||||
type summary add -F lldb_lookup.summary_lookup -e -x -h \"^(alloc::([a-z_]+::)+)BTreeSet<.+>$\" --category Rust
|
||||
type summary add -F lldb_lookup.summary_lookup -e -x -h \"^(alloc::([a-z_]+::)+)BTreeMap<.+>$\" --category Rust
|
||||
type summary add -F lldb_lookup.summary_lookup -e -x -h \"^(std::collections::([a-z_]+::)+)HashMap<.+>$\" --category Rust
|
||||
type summary add -F lldb_lookup.summary_lookup -e -x -h \"^(std::collections::([a-z_]+::)+)HashSet<.+>$\" --category Rust
|
||||
type summary add -F lldb_lookup.summary_lookup -e -x -h \"^(alloc::([a-z_]+::)+)Rc<.+>$\" --category Rust
|
||||
type summary add -F lldb_lookup.summary_lookup -e -x -h \"^(alloc::([a-z_]+::)+)Arc<.+>$\" --category Rust
|
||||
type summary add -F lldb_lookup.summary_lookup -e -x -h \"^(core::([a-z_]+::)+)Cell<.+>$\" --category Rust
|
||||
type summary add -F lldb_lookup.summary_lookup -e -x -h \"^(core::([a-z_]+::)+)Ref<.+>$\" --category Rust
|
||||
type summary add -F lldb_lookup.summary_lookup -e -x -h \"^(core::([a-z_]+::)+)RefMut<.+>$\" --category Rust
|
||||
type summary add -F lldb_lookup.summary_lookup -e -x -h \"^(core::([a-z_]+::)+)RefCell<.+>$\" --category Rust
|
||||
type category enable Rust
|
115
src/etc/lldb_lookup.py
Normal file
115
src/etc/lldb_lookup.py
Normal file
@ -0,0 +1,115 @@
|
||||
import lldb
|
||||
|
||||
from lldb_providers import *
|
||||
from rust_types import RustType, classify_struct, classify_union
|
||||
|
||||
|
||||
# BACKCOMPAT: rust 1.35
|
||||
def is_hashbrown_hashmap(hash_map):
|
||||
return len(hash_map.type.fields) == 1
|
||||
|
||||
|
||||
def classify_rust_type(type):
|
||||
type_class = type.GetTypeClass()
|
||||
if type_class == lldb.eTypeClassStruct:
|
||||
return classify_struct(type.name, type.fields)
|
||||
if type_class == lldb.eTypeClassUnion:
|
||||
return classify_union(type.fields)
|
||||
|
||||
return RustType.OTHER
|
||||
|
||||
|
||||
def summary_lookup(valobj, dict):
|
||||
# type: (SBValue, dict) -> str
|
||||
"""Returns the summary provider for the given value"""
|
||||
rust_type = classify_rust_type(valobj.GetType())
|
||||
|
||||
if rust_type == RustType.STD_STRING:
|
||||
return StdStringSummaryProvider(valobj, dict)
|
||||
if rust_type == RustType.STD_OS_STRING:
|
||||
return StdOsStringSummaryProvider(valobj, dict)
|
||||
if rust_type == RustType.STD_STR:
|
||||
return StdStrSummaryProvider(valobj, dict)
|
||||
|
||||
if rust_type == RustType.STD_VEC:
|
||||
return SizeSummaryProvider(valobj, dict)
|
||||
if rust_type == RustType.STD_VEC_DEQUE:
|
||||
return SizeSummaryProvider(valobj, dict)
|
||||
if rust_type == RustType.STD_SLICE:
|
||||
return SizeSummaryProvider(valobj, dict)
|
||||
|
||||
if rust_type == RustType.STD_HASH_MAP:
|
||||
return SizeSummaryProvider(valobj, dict)
|
||||
if rust_type == RustType.STD_HASH_SET:
|
||||
return SizeSummaryProvider(valobj, dict)
|
||||
|
||||
if rust_type == RustType.STD_RC:
|
||||
return StdRcSummaryProvider(valobj, dict)
|
||||
if rust_type == RustType.STD_ARC:
|
||||
return StdRcSummaryProvider(valobj, dict)
|
||||
|
||||
if rust_type == RustType.STD_REF:
|
||||
return StdRefSummaryProvider(valobj, dict)
|
||||
if rust_type == RustType.STD_REF_MUT:
|
||||
return StdRefSummaryProvider(valobj, dict)
|
||||
if rust_type == RustType.STD_REF_CELL:
|
||||
return StdRefSummaryProvider(valobj, dict)
|
||||
|
||||
return ""
|
||||
|
||||
|
||||
def synthetic_lookup(valobj, dict):
|
||||
# type: (SBValue, dict) -> object
|
||||
"""Returns the synthetic provider for the given value"""
|
||||
rust_type = classify_rust_type(valobj.GetType())
|
||||
|
||||
if rust_type == RustType.STRUCT:
|
||||
return StructSyntheticProvider(valobj, dict)
|
||||
if rust_type == RustType.STRUCT_VARIANT:
|
||||
return StructSyntheticProvider(valobj, dict, is_variant=True)
|
||||
if rust_type == RustType.TUPLE:
|
||||
return TupleSyntheticProvider(valobj, dict)
|
||||
if rust_type == RustType.TUPLE_VARIANT:
|
||||
return TupleSyntheticProvider(valobj, dict, is_variant=True)
|
||||
if rust_type == RustType.EMPTY:
|
||||
return EmptySyntheticProvider(valobj, dict)
|
||||
if rust_type == RustType.REGULAR_ENUM:
|
||||
discriminant = valobj.GetChildAtIndex(0).GetChildAtIndex(0).GetValueAsUnsigned()
|
||||
return synthetic_lookup(valobj.GetChildAtIndex(discriminant), dict)
|
||||
if rust_type == RustType.SINGLETON_ENUM:
|
||||
return synthetic_lookup(valobj.GetChildAtIndex(0), dict)
|
||||
|
||||
if rust_type == RustType.STD_VEC:
|
||||
return StdVecSyntheticProvider(valobj, dict)
|
||||
if rust_type == RustType.STD_VEC_DEQUE:
|
||||
return StdVecDequeSyntheticProvider(valobj, dict)
|
||||
if rust_type == RustType.STD_SLICE:
|
||||
return StdSliceSyntheticProvider(valobj, dict)
|
||||
|
||||
if rust_type == RustType.STD_HASH_MAP:
|
||||
if is_hashbrown_hashmap(valobj):
|
||||
return StdHashMapSyntheticProvider(valobj, dict)
|
||||
else:
|
||||
return StdOldHashMapSyntheticProvider(valobj, dict)
|
||||
if rust_type == RustType.STD_HASH_SET:
|
||||
hash_map = valobj.GetChildAtIndex(0)
|
||||
if is_hashbrown_hashmap(hash_map):
|
||||
return StdHashMapSyntheticProvider(hash_map, dict, show_values=False)
|
||||
else:
|
||||
return StdOldHashMapSyntheticProvider(hash_map, dict, show_values=False)
|
||||
|
||||
if rust_type == RustType.STD_RC:
|
||||
return StdRcSyntheticProvider(valobj, dict)
|
||||
if rust_type == RustType.STD_ARC:
|
||||
return StdRcSyntheticProvider(valobj, dict, is_atomic=True)
|
||||
|
||||
if rust_type == RustType.STD_CELL:
|
||||
return StdCellSyntheticProvider(valobj, dict)
|
||||
if rust_type == RustType.STD_REF:
|
||||
return StdRefSyntheticProvider(valobj, dict)
|
||||
if rust_type == RustType.STD_REF_MUT:
|
||||
return StdRefSyntheticProvider(valobj, dict)
|
||||
if rust_type == RustType.STD_REF_CELL:
|
||||
return StdRefSyntheticProvider(valobj, dict, is_cell=True)
|
||||
|
||||
return DefaultSynthteticProvider(valobj, dict)
|
715
src/etc/lldb_providers.py
Normal file
715
src/etc/lldb_providers.py
Normal file
@ -0,0 +1,715 @@
|
||||
import sys
|
||||
|
||||
from lldb import SBValue, SBData, SBError, eBasicTypeLong, eBasicTypeUnsignedLong, \
|
||||
eBasicTypeUnsignedChar
|
||||
|
||||
# from lldb.formatters import Logger
|
||||
|
||||
####################################################################################################
|
||||
# This file contains two kinds of pretty-printers: summary and synthetic.
|
||||
#
|
||||
# Important classes from LLDB module:
|
||||
# SBValue: the value of a variable, a register, or an expression
|
||||
# SBType: the data type; each SBValue has a corresponding SBType
|
||||
#
|
||||
# Summary provider is a function with the type `(SBValue, dict) -> str`.
|
||||
# The first parameter is the object encapsulating the actual variable being displayed;
|
||||
# The second parameter is an internal support parameter used by LLDB, and you should not touch it.
|
||||
#
|
||||
# Synthetic children is the way to provide a children-based representation of the object's value.
|
||||
# Synthetic provider is a class that implements the following interface:
|
||||
#
|
||||
# class SyntheticChildrenProvider:
|
||||
# def __init__(self, SBValue, dict)
|
||||
# def num_children(self)
|
||||
# def get_child_index(self, str)
|
||||
# def get_child_at_index(self, int)
|
||||
# def update(self)
|
||||
# def has_children(self)
|
||||
# def get_value(self)
|
||||
#
|
||||
#
|
||||
# You can find more information and examples here:
|
||||
# 1. https://lldb.llvm.org/varformats.html
|
||||
# 2. https://lldb.llvm.org/python-reference.html
|
||||
# 3. https://lldb.llvm.org/python_reference/lldb.formatters.cpp.libcxx-pysrc.html
|
||||
# 4. https://github.com/llvm-mirror/lldb/tree/master/examples/summaries/cocoa
|
||||
####################################################################################################
|
||||
|
||||
PY3 = sys.version_info[0] == 3
|
||||
|
||||
|
||||
class ValueBuilder:
|
||||
def __init__(self, valobj):
|
||||
# type: (SBValue) -> ValueBuilder
|
||||
self.valobj = valobj
|
||||
process = valobj.GetProcess()
|
||||
self.endianness = process.GetByteOrder()
|
||||
self.pointer_size = process.GetAddressByteSize()
|
||||
|
||||
def from_int(self, name, value):
|
||||
# type: (str, int) -> SBValue
|
||||
type = self.valobj.GetType().GetBasicType(eBasicTypeLong)
|
||||
data = SBData.CreateDataFromSInt64Array(self.endianness, self.pointer_size, [value])
|
||||
return self.valobj.CreateValueFromData(name, data, type)
|
||||
|
||||
def from_uint(self, name, value):
|
||||
# type: (str, int) -> SBValue
|
||||
type = self.valobj.GetType().GetBasicType(eBasicTypeUnsignedLong)
|
||||
data = SBData.CreateDataFromUInt64Array(self.endianness, self.pointer_size, [value])
|
||||
return self.valobj.CreateValueFromData(name, data, type)
|
||||
|
||||
|
||||
def unwrap_unique_or_non_null(unique_or_nonnull):
|
||||
# BACKCOMPAT: rust 1.32
|
||||
# https://github.com/rust-lang/rust/commit/7a0911528058e87d22ea305695f4047572c5e067
|
||||
ptr = unique_or_nonnull.GetChildMemberWithName("pointer")
|
||||
return ptr if ptr.TypeIsPointerType() else ptr.GetChildAtIndex(0)
|
||||
|
||||
|
||||
class DefaultSynthteticProvider:
|
||||
def __init__(self, valobj, dict):
|
||||
# type: (SBValue, dict) -> DefaultSynthteticProvider
|
||||
# logger = Logger.Logger()
|
||||
# logger >> "Default synthetic provider for " + str(valobj.GetName())
|
||||
self.valobj = valobj
|
||||
|
||||
def num_children(self):
|
||||
# type: () -> int
|
||||
return self.valobj.GetNumChildren()
|
||||
|
||||
def get_child_index(self, name):
|
||||
# type: (str) -> int
|
||||
return self.valobj.GetIndexOfChildWithName(name)
|
||||
|
||||
def get_child_at_index(self, index):
|
||||
# type: (int) -> SBValue
|
||||
return self.valobj.GetChildAtIndex(index)
|
||||
|
||||
def update(self):
|
||||
# type: () -> None
|
||||
pass
|
||||
|
||||
def has_children(self):
|
||||
# type: () -> bool
|
||||
return self.valobj.MightHaveChildren()
|
||||
|
||||
|
||||
class EmptySyntheticProvider:
|
||||
def __init__(self, valobj, dict):
|
||||
# type: (SBValue, dict) -> EmptySyntheticProvider
|
||||
# logger = Logger.Logger()
|
||||
# logger >> "[EmptySyntheticProvider] for " + str(valobj.GetName())
|
||||
self.valobj = valobj
|
||||
|
||||
def num_children(self):
|
||||
# type: () -> int
|
||||
return 0
|
||||
|
||||
def get_child_index(self, name):
|
||||
# type: (str) -> int
|
||||
return None
|
||||
|
||||
def get_child_at_index(self, index):
|
||||
# type: (int) -> SBValue
|
||||
return None
|
||||
|
||||
def update(self):
|
||||
# type: () -> None
|
||||
pass
|
||||
|
||||
def has_children(self):
|
||||
# type: () -> bool
|
||||
return False
|
||||
|
||||
|
||||
def SizeSummaryProvider(valobj, dict):
|
||||
# type: (SBValue, dict) -> str
|
||||
return 'size=' + str(valobj.GetNumChildren())
|
||||
|
||||
|
||||
def vec_to_string(vec):
|
||||
length = vec.GetNumChildren()
|
||||
chars = [vec.GetChildAtIndex(i).GetValueAsUnsigned() for i in range(length)]
|
||||
return bytes(chars).decode(errors='replace') if PY3 else "".join(chr(char) for char in chars)
|
||||
|
||||
|
||||
def StdStringSummaryProvider(valobj, dict):
|
||||
# type: (SBValue, dict) -> str
|
||||
# logger = Logger.Logger()
|
||||
# logger >> "[StdStringSummaryProvider] for " + str(valobj.GetName())
|
||||
vec = valobj.GetChildAtIndex(0)
|
||||
return '"%s"' % vec_to_string(vec)
|
||||
|
||||
|
||||
def StdOsStringSummaryProvider(valobj, dict):
|
||||
# type: (SBValue, dict) -> str
|
||||
# logger = Logger.Logger()
|
||||
# logger >> "[StdOsStringSummaryProvider] for " + str(valobj.GetName())
|
||||
buf = valobj.GetChildAtIndex(0).GetChildAtIndex(0)
|
||||
is_windows = "Wtf8Buf" in buf.type.name
|
||||
vec = buf.GetChildAtIndex(0) if is_windows else buf
|
||||
return '"%s"' % vec_to_string(vec)
|
||||
|
||||
|
||||
def StdStrSummaryProvider(valobj, dict):
|
||||
# type: (SBValue, dict) -> str
|
||||
# logger = Logger.Logger()
|
||||
# logger >> "[StdStrSummaryProvider] for " + str(valobj.GetName())
|
||||
|
||||
length = valobj.GetChildMemberWithName("length").GetValueAsUnsigned()
|
||||
if length == 0:
|
||||
return '""'
|
||||
|
||||
data_ptr = valobj.GetChildMemberWithName("data_ptr")
|
||||
|
||||
start = data_ptr.GetValueAsUnsigned()
|
||||
error = SBError()
|
||||
process = data_ptr.GetProcess()
|
||||
data = process.ReadMemory(start, length, error)
|
||||
data = data.decode(encoding='UTF-8') if PY3 else data
|
||||
return '"%s"' % data
|
||||
|
||||
|
||||
class StructSyntheticProvider:
|
||||
"""Pretty-printer for structs and struct enum variants"""
|
||||
|
||||
def __init__(self, valobj, dict, is_variant=False):
|
||||
# type: (SBValue, dict, bool) -> StructSyntheticProvider
|
||||
# logger = Logger.Logger()
|
||||
self.valobj = valobj
|
||||
self.is_variant = is_variant
|
||||
self.type = valobj.GetType()
|
||||
self.fields = {}
|
||||
|
||||
if is_variant:
|
||||
self.fields_count = self.type.GetNumberOfFields() - 1
|
||||
real_fields = self.type.fields[1:]
|
||||
else:
|
||||
self.fields_count = self.type.GetNumberOfFields()
|
||||
real_fields = self.type.fields
|
||||
|
||||
for number, field in enumerate(real_fields):
|
||||
self.fields[field.name] = number
|
||||
|
||||
def num_children(self):
|
||||
# type: () -> int
|
||||
return self.fields_count
|
||||
|
||||
def get_child_index(self, name):
|
||||
# type: (str) -> int
|
||||
return self.fields.get(name, -1)
|
||||
|
||||
def get_child_at_index(self, index):
|
||||
# type: (int) -> SBValue
|
||||
if self.is_variant:
|
||||
field = self.type.GetFieldAtIndex(index + 1)
|
||||
else:
|
||||
field = self.type.GetFieldAtIndex(index)
|
||||
return self.valobj.GetChildMemberWithName(field.name)
|
||||
|
||||
def update(self):
|
||||
# type: () -> None
|
||||
pass
|
||||
|
||||
def has_children(self):
|
||||
# type: () -> bool
|
||||
return True
|
||||
|
||||
|
||||
class TupleSyntheticProvider:
|
||||
"""Pretty-printer for tuples and tuple enum variants"""
|
||||
|
||||
def __init__(self, valobj, dict, is_variant=False):
|
||||
# type: (SBValue, dict, bool) -> TupleSyntheticProvider
|
||||
# logger = Logger.Logger()
|
||||
self.valobj = valobj
|
||||
self.is_variant = is_variant
|
||||
self.type = valobj.GetType()
|
||||
|
||||
if is_variant:
|
||||
self.size = self.type.GetNumberOfFields() - 1
|
||||
else:
|
||||
self.size = self.type.GetNumberOfFields()
|
||||
|
||||
def num_children(self):
|
||||
# type: () -> int
|
||||
return self.size
|
||||
|
||||
def get_child_index(self, name):
|
||||
# type: (str) -> int
|
||||
if name.isdigit():
|
||||
return int(name)
|
||||
else:
|
||||
return -1
|
||||
|
||||
def get_child_at_index(self, index):
|
||||
# type: (int) -> SBValue
|
||||
if self.is_variant:
|
||||
field = self.type.GetFieldAtIndex(index + 1)
|
||||
else:
|
||||
field = self.type.GetFieldAtIndex(index)
|
||||
element = self.valobj.GetChildMemberWithName(field.name)
|
||||
return self.valobj.CreateValueFromData(str(index), element.GetData(), element.GetType())
|
||||
|
||||
def update(self):
|
||||
# type: () -> None
|
||||
pass
|
||||
|
||||
def has_children(self):
|
||||
# type: () -> bool
|
||||
return True
|
||||
|
||||
|
||||
class StdVecSyntheticProvider:
|
||||
"""Pretty-printer for alloc::vec::Vec<T>
|
||||
|
||||
struct Vec<T> { buf: RawVec<T>, len: usize }
|
||||
struct RawVec<T> { ptr: Unique<T>, cap: usize, ... }
|
||||
rust 1.31.1: struct Unique<T: ?Sized> { pointer: NonZero<*const T>, ... }
|
||||
rust 1.33.0: struct Unique<T: ?Sized> { pointer: *const T, ... }
|
||||
struct NonZero<T>(T)
|
||||
"""
|
||||
|
||||
def __init__(self, valobj, dict):
|
||||
# type: (SBValue, dict) -> StdVecSyntheticProvider
|
||||
# logger = Logger.Logger()
|
||||
# logger >> "[StdVecSyntheticProvider] for " + str(valobj.GetName())
|
||||
self.valobj = valobj
|
||||
self.update()
|
||||
|
||||
def num_children(self):
|
||||
# type: () -> int
|
||||
return self.length
|
||||
|
||||
def get_child_index(self, name):
|
||||
# type: (str) -> int
|
||||
index = name.lstrip('[').rstrip(']')
|
||||
if index.isdigit():
|
||||
return int(index)
|
||||
else:
|
||||
return -1
|
||||
|
||||
def get_child_at_index(self, index):
|
||||
# type: (int) -> SBValue
|
||||
start = self.data_ptr.GetValueAsUnsigned()
|
||||
address = start + index * self.element_type_size
|
||||
element = self.data_ptr.CreateValueFromAddress("[%s]" % index, address, self.element_type)
|
||||
return element
|
||||
|
||||
def update(self):
|
||||
# type: () -> None
|
||||
self.length = self.valobj.GetChildMemberWithName("len").GetValueAsUnsigned()
|
||||
self.buf = self.valobj.GetChildMemberWithName("buf")
|
||||
|
||||
self.data_ptr = unwrap_unique_or_non_null(self.buf.GetChildMemberWithName("ptr"))
|
||||
|
||||
self.element_type = self.data_ptr.GetType().GetPointeeType()
|
||||
self.element_type_size = self.element_type.GetByteSize()
|
||||
|
||||
def has_children(self):
|
||||
# type: () -> bool
|
||||
return True
|
||||
|
||||
|
||||
class StdSliceSyntheticProvider:
|
||||
def __init__(self, valobj, dict):
|
||||
self.valobj = valobj
|
||||
self.update()
|
||||
|
||||
def num_children(self):
|
||||
# type: () -> int
|
||||
return self.length
|
||||
|
||||
def get_child_index(self, name):
|
||||
# type: (str) -> int
|
||||
index = name.lstrip('[').rstrip(']')
|
||||
if index.isdigit():
|
||||
return int(index)
|
||||
else:
|
||||
return -1
|
||||
|
||||
def get_child_at_index(self, index):
|
||||
# type: (int) -> SBValue
|
||||
start = self.data_ptr.GetValueAsUnsigned()
|
||||
address = start + index * self.element_type_size
|
||||
element = self.data_ptr.CreateValueFromAddress("[%s]" % index, address, self.element_type)
|
||||
return element
|
||||
|
||||
def update(self):
|
||||
# type: () -> None
|
||||
self.length = self.valobj.GetChildMemberWithName("length").GetValueAsUnsigned()
|
||||
self.data_ptr = self.valobj.GetChildMemberWithName("data_ptr")
|
||||
|
||||
self.element_type = self.data_ptr.GetType().GetPointeeType()
|
||||
self.element_type_size = self.element_type.GetByteSize()
|
||||
|
||||
def has_children(self):
|
||||
# type: () -> bool
|
||||
return True
|
||||
|
||||
|
||||
class StdVecDequeSyntheticProvider:
|
||||
"""Pretty-printer for alloc::collections::vec_deque::VecDeque<T>
|
||||
|
||||
struct VecDeque<T> { tail: usize, head: usize, buf: RawVec<T> }
|
||||
"""
|
||||
|
||||
def __init__(self, valobj, dict):
|
||||
# type: (SBValue, dict) -> StdVecDequeSyntheticProvider
|
||||
# logger = Logger.Logger()
|
||||
# logger >> "[StdVecDequeSyntheticProvider] for " + str(valobj.GetName())
|
||||
self.valobj = valobj
|
||||
self.update()
|
||||
|
||||
def num_children(self):
|
||||
# type: () -> int
|
||||
return self.size
|
||||
|
||||
def get_child_index(self, name):
|
||||
# type: (str) -> int
|
||||
index = name.lstrip('[').rstrip(']')
|
||||
if index.isdigit() and self.tail <= index and (self.tail + index) % self.cap < self.head:
|
||||
return int(index)
|
||||
else:
|
||||
return -1
|
||||
|
||||
def get_child_at_index(self, index):
|
||||
# type: (int) -> SBValue
|
||||
start = self.data_ptr.GetValueAsUnsigned()
|
||||
address = start + ((index + self.tail) % self.cap) * self.element_type_size
|
||||
element = self.data_ptr.CreateValueFromAddress("[%s]" % index, address, self.element_type)
|
||||
return element
|
||||
|
||||
def update(self):
|
||||
# type: () -> None
|
||||
self.head = self.valobj.GetChildMemberWithName("head").GetValueAsUnsigned()
|
||||
self.tail = self.valobj.GetChildMemberWithName("tail").GetValueAsUnsigned()
|
||||
self.buf = self.valobj.GetChildMemberWithName("buf")
|
||||
self.cap = self.buf.GetChildMemberWithName("cap").GetValueAsUnsigned()
|
||||
if self.head >= self.tail:
|
||||
self.size = self.head - self.tail
|
||||
else:
|
||||
self.size = self.cap + self.head - self.tail
|
||||
|
||||
self.data_ptr = unwrap_unique_or_non_null(self.buf.GetChildMemberWithName("ptr"))
|
||||
|
||||
self.element_type = self.data_ptr.GetType().GetPointeeType()
|
||||
self.element_type_size = self.element_type.GetByteSize()
|
||||
|
||||
def has_children(self):
|
||||
# type: () -> bool
|
||||
return True
|
||||
|
||||
|
||||
# BACKCOMPAT: rust 1.35
|
||||
class StdOldHashMapSyntheticProvider:
|
||||
"""Pretty-printer for std::collections::hash::map::HashMap<K, V, S>
|
||||
|
||||
struct HashMap<K, V, S> {..., table: RawTable<K, V>, ... }
|
||||
struct RawTable<K, V> { capacity_mask: usize, size: usize, hashes: TaggedHashUintPtr, ... }
|
||||
"""
|
||||
|
||||
def __init__(self, valobj, dict, show_values=True):
|
||||
# type: (SBValue, dict, bool) -> StdOldHashMapSyntheticProvider
|
||||
self.valobj = valobj
|
||||
self.show_values = show_values
|
||||
self.update()
|
||||
|
||||
def num_children(self):
|
||||
# type: () -> int
|
||||
return self.size
|
||||
|
||||
def get_child_index(self, name):
|
||||
# type: (str) -> int
|
||||
index = name.lstrip('[').rstrip(']')
|
||||
if index.isdigit():
|
||||
return int(index)
|
||||
else:
|
||||
return -1
|
||||
|
||||
def get_child_at_index(self, index):
|
||||
# type: (int) -> SBValue
|
||||
# logger = Logger.Logger()
|
||||
start = self.data_ptr.GetValueAsUnsigned() & ~1
|
||||
|
||||
# See `libstd/collections/hash/table.rs:raw_bucket_at
|
||||
hashes = self.hash_uint_size * self.capacity
|
||||
align = self.pair_type_size
|
||||
# See `libcore/alloc.rs:padding_needed_for`
|
||||
len_rounded_up = (((((hashes + align) % self.modulo - 1) % self.modulo) & ~(
|
||||
(align - 1) % self.modulo)) % self.modulo - hashes) % self.modulo
|
||||
# len_rounded_up = ((hashes + align - 1) & ~(align - 1)) - hashes
|
||||
|
||||
pairs_offset = hashes + len_rounded_up
|
||||
pairs_start = start + pairs_offset
|
||||
|
||||
table_index = self.valid_indices[index]
|
||||
idx = table_index & self.capacity_mask
|
||||
address = pairs_start + idx * self.pair_type_size
|
||||
element = self.data_ptr.CreateValueFromAddress("[%s]" % index, address, self.pair_type)
|
||||
if self.show_values:
|
||||
return element
|
||||
else:
|
||||
key = element.GetChildAtIndex(0)
|
||||
return self.valobj.CreateValueFromData("[%s]" % index, key.GetData(), key.GetType())
|
||||
|
||||
def update(self):
|
||||
# type: () -> None
|
||||
# logger = Logger.Logger()
|
||||
|
||||
self.table = self.valobj.GetChildMemberWithName("table") # type: SBValue
|
||||
self.size = self.table.GetChildMemberWithName("size").GetValueAsUnsigned()
|
||||
self.hashes = self.table.GetChildMemberWithName("hashes")
|
||||
self.hash_uint_type = self.hashes.GetType()
|
||||
self.hash_uint_size = self.hashes.GetType().GetByteSize()
|
||||
self.modulo = 2 ** self.hash_uint_size
|
||||
self.data_ptr = self.hashes.GetChildAtIndex(0).GetChildAtIndex(0)
|
||||
|
||||
self.capacity_mask = self.table.GetChildMemberWithName("capacity_mask").GetValueAsUnsigned()
|
||||
self.capacity = (self.capacity_mask + 1) % self.modulo
|
||||
|
||||
marker = self.table.GetChildMemberWithName("marker").GetType() # type: SBType
|
||||
self.pair_type = marker.template_args[0]
|
||||
self.pair_type_size = self.pair_type.GetByteSize()
|
||||
|
||||
self.valid_indices = []
|
||||
for idx in range(self.capacity):
|
||||
address = self.data_ptr.GetValueAsUnsigned() + idx * self.hash_uint_size
|
||||
hash_uint = self.data_ptr.CreateValueFromAddress("[%s]" % idx, address,
|
||||
self.hash_uint_type)
|
||||
hash_ptr = hash_uint.GetChildAtIndex(0).GetChildAtIndex(0)
|
||||
if hash_ptr.GetValueAsUnsigned() != 0:
|
||||
self.valid_indices.append(idx)
|
||||
|
||||
# logger >> "Valid indices: {}".format(str(self.valid_indices))
|
||||
|
||||
def has_children(self):
|
||||
# type: () -> bool
|
||||
return True
|
||||
|
||||
|
||||
class StdHashMapSyntheticProvider:
|
||||
"""Pretty-printer for hashbrown's HashMap"""
|
||||
|
||||
def __init__(self, valobj, dict, show_values=True):
|
||||
# type: (SBValue, dict, bool) -> StdHashMapSyntheticProvider
|
||||
self.valobj = valobj
|
||||
self.show_values = show_values
|
||||
self.update()
|
||||
|
||||
def num_children(self):
|
||||
# type: () -> int
|
||||
return self.size
|
||||
|
||||
def get_child_index(self, name):
|
||||
# type: (str) -> int
|
||||
index = name.lstrip('[').rstrip(']')
|
||||
if index.isdigit():
|
||||
return int(index)
|
||||
else:
|
||||
return -1
|
||||
|
||||
def get_child_at_index(self, index):
|
||||
# type: (int) -> SBValue
|
||||
pairs_start = self.data_ptr.GetValueAsUnsigned()
|
||||
idx = self.valid_indices[index]
|
||||
address = pairs_start + idx * self.pair_type_size
|
||||
element = self.data_ptr.CreateValueFromAddress("[%s]" % index, address, self.pair_type)
|
||||
if self.show_values:
|
||||
return element
|
||||
else:
|
||||
key = element.GetChildAtIndex(0)
|
||||
return self.valobj.CreateValueFromData("[%s]" % index, key.GetData(), key.GetType())
|
||||
|
||||
def update(self):
|
||||
# type: () -> None
|
||||
table = self.valobj.GetChildMemberWithName("base").GetChildMemberWithName("table")
|
||||
capacity = table.GetChildMemberWithName("bucket_mask").GetValueAsUnsigned() + 1
|
||||
ctrl = table.GetChildMemberWithName("ctrl").GetChildAtIndex(0)
|
||||
|
||||
self.size = table.GetChildMemberWithName("items").GetValueAsUnsigned()
|
||||
self.data_ptr = table.GetChildMemberWithName("data").GetChildAtIndex(0)
|
||||
self.pair_type = self.data_ptr.Dereference().GetType()
|
||||
self.pair_type_size = self.pair_type.GetByteSize()
|
||||
|
||||
u8_type = self.valobj.GetTarget().GetBasicType(eBasicTypeUnsignedChar)
|
||||
u8_type_size = self.valobj.GetTarget().GetBasicType(eBasicTypeUnsignedChar).GetByteSize()
|
||||
|
||||
self.valid_indices = []
|
||||
for idx in range(capacity):
|
||||
address = ctrl.GetValueAsUnsigned() + idx * u8_type_size
|
||||
value = ctrl.CreateValueFromAddress("ctrl[%s]" % idx, address,
|
||||
u8_type).GetValueAsUnsigned()
|
||||
is_present = value & 128 == 0
|
||||
if is_present:
|
||||
self.valid_indices.append(idx)
|
||||
|
||||
def has_children(self):
|
||||
# type: () -> bool
|
||||
return True
|
||||
|
||||
|
||||
def StdRcSummaryProvider(valobj, dict):
|
||||
# type: (SBValue, dict) -> str
|
||||
strong = valobj.GetChildMemberWithName("strong").GetValueAsUnsigned()
|
||||
weak = valobj.GetChildMemberWithName("weak").GetValueAsUnsigned()
|
||||
return "strong={}, weak={}".format(strong, weak)
|
||||
|
||||
|
||||
class StdRcSyntheticProvider:
|
||||
"""Pretty-printer for alloc::rc::Rc<T> and alloc::sync::Arc<T>
|
||||
|
||||
struct Rc<T> { ptr: NonNull<RcBox<T>>, ... }
|
||||
rust 1.31.1: struct NonNull<T> { pointer: NonZero<*const T> }
|
||||
rust 1.33.0: struct NonNull<T> { pointer: *const T }
|
||||
struct NonZero<T>(T)
|
||||
struct RcBox<T> { strong: Cell<usize>, weak: Cell<usize>, value: T }
|
||||
struct Cell<T> { value: UnsafeCell<T> }
|
||||
struct UnsafeCell<T> { value: T }
|
||||
|
||||
struct Arc<T> { ptr: NonNull<ArcInner<T>>, ... }
|
||||
struct ArcInner<T> { strong: atomic::AtomicUsize, weak: atomic::AtomicUsize, data: T }
|
||||
struct AtomicUsize { v: UnsafeCell<usize> }
|
||||
"""
|
||||
|
||||
def __init__(self, valobj, dict, is_atomic=False):
|
||||
# type: (SBValue, dict, bool) -> StdRcSyntheticProvider
|
||||
self.valobj = valobj
|
||||
|
||||
self.ptr = unwrap_unique_or_non_null(self.valobj.GetChildMemberWithName("ptr"))
|
||||
|
||||
self.value = self.ptr.GetChildMemberWithName("data" if is_atomic else "value")
|
||||
|
||||
self.strong = self.ptr.GetChildMemberWithName("strong").GetChildAtIndex(
|
||||
0).GetChildMemberWithName("value")
|
||||
self.weak = self.ptr.GetChildMemberWithName("weak").GetChildAtIndex(
|
||||
0).GetChildMemberWithName("value")
|
||||
|
||||
self.value_builder = ValueBuilder(valobj)
|
||||
|
||||
self.update()
|
||||
|
||||
def num_children(self):
|
||||
# type: () -> int
|
||||
# Actually there are 3 children, but only the `value` should be shown as a child
|
||||
return 1
|
||||
|
||||
def get_child_index(self, name):
|
||||
# type: (str) -> int
|
||||
if name == "value":
|
||||
return 0
|
||||
if name == "strong":
|
||||
return 1
|
||||
if name == "weak":
|
||||
return 2
|
||||
return -1
|
||||
|
||||
def get_child_at_index(self, index):
|
||||
# type: (int) -> SBValue
|
||||
if index == 0:
|
||||
return self.value
|
||||
if index == 1:
|
||||
return self.value_builder.from_uint("strong", self.strong_count)
|
||||
if index == 2:
|
||||
return self.value_builder.from_uint("weak", self.weak_count)
|
||||
|
||||
return None
|
||||
|
||||
def update(self):
|
||||
# type: () -> None
|
||||
self.strong_count = self.strong.GetValueAsUnsigned()
|
||||
self.weak_count = self.weak.GetValueAsUnsigned() - 1
|
||||
|
||||
def has_children(self):
|
||||
# type: () -> bool
|
||||
return True
|
||||
|
||||
|
||||
class StdCellSyntheticProvider:
|
||||
"""Pretty-printer for std::cell::Cell"""
|
||||
|
||||
def __init__(self, valobj, dict):
|
||||
# type: (SBValue, dict) -> StdCellSyntheticProvider
|
||||
self.valobj = valobj
|
||||
self.value = valobj.GetChildMemberWithName("value").GetChildAtIndex(0)
|
||||
|
||||
def num_children(self):
|
||||
# type: () -> int
|
||||
return 1
|
||||
|
||||
def get_child_index(self, name):
|
||||
# type: (str) -> int
|
||||
if name == "value":
|
||||
return 0
|
||||
return -1
|
||||
|
||||
def get_child_at_index(self, index):
|
||||
# type: (int) -> SBValue
|
||||
if index == 0:
|
||||
return self.value
|
||||
return None
|
||||
|
||||
def update(self):
|
||||
# type: () -> None
|
||||
pass
|
||||
|
||||
def has_children(self):
|
||||
# type: () -> bool
|
||||
return True
|
||||
|
||||
|
||||
def StdRefSummaryProvider(valobj, dict):
|
||||
# type: (SBValue, dict) -> str
|
||||
borrow = valobj.GetChildMemberWithName("borrow").GetValueAsSigned()
|
||||
return "borrow={}".format(borrow) if borrow >= 0 else "borrow_mut={}".format(-borrow)
|
||||
|
||||
|
||||
class StdRefSyntheticProvider:
|
||||
"""Pretty-printer for std::cell::Ref, std::cell::RefMut, and std::cell::RefCell"""
|
||||
|
||||
def __init__(self, valobj, dict, is_cell=False):
|
||||
# type: (SBValue, dict, bool) -> StdRefSyntheticProvider
|
||||
self.valobj = valobj
|
||||
|
||||
borrow = valobj.GetChildMemberWithName("borrow")
|
||||
value = valobj.GetChildMemberWithName("value")
|
||||
if is_cell:
|
||||
self.borrow = borrow.GetChildMemberWithName("value").GetChildMemberWithName("value")
|
||||
self.value = value.GetChildMemberWithName("value")
|
||||
else:
|
||||
self.borrow = borrow.GetChildMemberWithName("borrow").GetChildMemberWithName(
|
||||
"value").GetChildMemberWithName("value")
|
||||
self.value = value.Dereference()
|
||||
|
||||
self.value_builder = ValueBuilder(valobj)
|
||||
|
||||
self.update()
|
||||
|
||||
def num_children(self):
|
||||
# type: () -> int
|
||||
# Actually there are 2 children, but only the `value` should be shown as a child
|
||||
return 1
|
||||
|
||||
def get_child_index(self, name):
|
||||
if name == "value":
|
||||
return 0
|
||||
if name == "borrow":
|
||||
return 1
|
||||
return -1
|
||||
|
||||
def get_child_at_index(self, index):
|
||||
# type: (int) -> SBValue
|
||||
if index == 0:
|
||||
return self.value
|
||||
if index == 1:
|
||||
return self.value_builder.from_int("borrow", self.borrow_count)
|
||||
return None
|
||||
|
||||
def update(self):
|
||||
# type: () -> None
|
||||
self.borrow_count = self.borrow.GetValueAsSigned()
|
||||
|
||||
def has_children(self):
|
||||
# type: () -> bool
|
||||
return True
|
@ -1,305 +0,0 @@
|
||||
import lldb
|
||||
import debugger_pretty_printers_common as rustpp
|
||||
|
||||
# ===============================================================================
|
||||
# LLDB Pretty Printing Module for Rust
|
||||
# ===============================================================================
|
||||
|
||||
|
||||
class LldbType(rustpp.Type):
|
||||
|
||||
def __init__(self, ty):
|
||||
super(LldbType, self).__init__()
|
||||
self.ty = ty
|
||||
self.fields = None
|
||||
|
||||
def get_unqualified_type_name(self):
|
||||
qualified_name = self.ty.GetName()
|
||||
|
||||
if qualified_name is None:
|
||||
return qualified_name
|
||||
|
||||
return rustpp.extract_type_name(qualified_name).replace("&'static ", "&")
|
||||
|
||||
def get_dwarf_type_kind(self):
|
||||
type_class = self.ty.GetTypeClass()
|
||||
|
||||
if type_class == lldb.eTypeClassStruct:
|
||||
return rustpp.DWARF_TYPE_CODE_STRUCT
|
||||
|
||||
if type_class == lldb.eTypeClassUnion:
|
||||
return rustpp.DWARF_TYPE_CODE_UNION
|
||||
|
||||
if type_class == lldb.eTypeClassPointer:
|
||||
return rustpp.DWARF_TYPE_CODE_PTR
|
||||
|
||||
if type_class == lldb.eTypeClassArray:
|
||||
return rustpp.DWARF_TYPE_CODE_ARRAY
|
||||
|
||||
if type_class == lldb.eTypeClassEnumeration:
|
||||
return rustpp.DWARF_TYPE_CODE_ENUM
|
||||
|
||||
return None
|
||||
|
||||
def get_fields(self):
|
||||
assert ((self.get_dwarf_type_kind() == rustpp.DWARF_TYPE_CODE_STRUCT) or
|
||||
(self.get_dwarf_type_kind() == rustpp.DWARF_TYPE_CODE_UNION))
|
||||
if self.fields is None:
|
||||
self.fields = list(self.ty.fields)
|
||||
return self.fields
|
||||
|
||||
def get_wrapped_value(self):
|
||||
return self.ty
|
||||
|
||||
|
||||
class LldbValue(rustpp.Value):
|
||||
def __init__(self, lldb_val):
|
||||
ty = lldb_val.type
|
||||
wty = LldbType(ty)
|
||||
super(LldbValue, self).__init__(wty)
|
||||
self.lldb_val = lldb_val
|
||||
self.children = {}
|
||||
|
||||
def get_child_at_index(self, index):
|
||||
child = self.children.get(index)
|
||||
if child is None:
|
||||
lldb_field = self.lldb_val.GetChildAtIndex(index)
|
||||
child = LldbValue(lldb_field)
|
||||
self.children[index] = child
|
||||
return child
|
||||
|
||||
def as_integer(self):
|
||||
return self.lldb_val.GetValueAsUnsigned()
|
||||
|
||||
def get_wrapped_value(self):
|
||||
return self.lldb_val
|
||||
|
||||
|
||||
def print_val(lldb_val, internal_dict):
|
||||
val = LldbValue(lldb_val)
|
||||
type_kind = val.type.get_type_kind()
|
||||
|
||||
if (type_kind == rustpp.TYPE_KIND_REGULAR_STRUCT or
|
||||
type_kind == rustpp.TYPE_KIND_REGULAR_UNION or
|
||||
type_kind == rustpp.TYPE_KIND_EMPTY):
|
||||
return print_struct_val(val,
|
||||
internal_dict,
|
||||
omit_first_field=False,
|
||||
omit_type_name=False,
|
||||
is_tuple_like=False)
|
||||
|
||||
if type_kind == rustpp.TYPE_KIND_STRUCT_VARIANT:
|
||||
return print_struct_val(val,
|
||||
internal_dict,
|
||||
omit_first_field=True,
|
||||
omit_type_name=False,
|
||||
is_tuple_like=False)
|
||||
|
||||
if type_kind == rustpp.TYPE_KIND_SLICE:
|
||||
return print_vec_slice_val(val, internal_dict)
|
||||
|
||||
if type_kind == rustpp.TYPE_KIND_STR_SLICE:
|
||||
return print_str_slice_val(val, internal_dict)
|
||||
|
||||
if type_kind == rustpp.TYPE_KIND_STD_VEC:
|
||||
return print_std_vec_val(val, internal_dict)
|
||||
|
||||
if type_kind == rustpp.TYPE_KIND_STD_STRING:
|
||||
return print_std_string_val(val, internal_dict)
|
||||
|
||||
if type_kind == rustpp.TYPE_KIND_TUPLE:
|
||||
return print_struct_val(val,
|
||||
internal_dict,
|
||||
omit_first_field=False,
|
||||
omit_type_name=True,
|
||||
is_tuple_like=True)
|
||||
|
||||
if type_kind == rustpp.TYPE_KIND_TUPLE_STRUCT:
|
||||
return print_struct_val(val,
|
||||
internal_dict,
|
||||
omit_first_field=False,
|
||||
omit_type_name=False,
|
||||
is_tuple_like=True)
|
||||
|
||||
if type_kind == rustpp.TYPE_KIND_CSTYLE_VARIANT:
|
||||
return val.type.get_unqualified_type_name()
|
||||
|
||||
if type_kind == rustpp.TYPE_KIND_TUPLE_VARIANT:
|
||||
return print_struct_val(val,
|
||||
internal_dict,
|
||||
omit_first_field=True,
|
||||
omit_type_name=False,
|
||||
is_tuple_like=True)
|
||||
|
||||
if type_kind == rustpp.TYPE_KIND_SINGLETON_ENUM:
|
||||
return print_val(lldb_val.GetChildAtIndex(0), internal_dict)
|
||||
|
||||
if type_kind == rustpp.TYPE_KIND_PTR:
|
||||
return print_pointer_val(val, internal_dict)
|
||||
|
||||
if type_kind == rustpp.TYPE_KIND_FIXED_SIZE_VEC:
|
||||
return print_fixed_size_vec_val(val, internal_dict)
|
||||
|
||||
if type_kind == rustpp.TYPE_KIND_REGULAR_ENUM:
|
||||
# This is a regular enum, extract the discriminant
|
||||
discriminant_val = rustpp.get_discriminant_value_as_integer(val)
|
||||
return print_val(lldb_val.GetChildAtIndex(discriminant_val), internal_dict)
|
||||
|
||||
if type_kind == rustpp.TYPE_KIND_COMPRESSED_ENUM:
|
||||
encoded_enum_info = rustpp.EncodedEnumInfo(val)
|
||||
if encoded_enum_info.is_null_variant():
|
||||
return encoded_enum_info.get_null_variant_name()
|
||||
|
||||
non_null_val = encoded_enum_info.get_non_null_variant_val()
|
||||
return print_val(non_null_val.get_wrapped_value(), internal_dict)
|
||||
|
||||
# No pretty printer has been found
|
||||
return lldb_val.GetValue()
|
||||
|
||||
|
||||
# =---------------------------------------------------------------------------------------
|
||||
# Type-Specialized Printing Functions
|
||||
# =---------------------------------------------------------------------------------------
|
||||
|
||||
def print_struct_val(val, internal_dict, omit_first_field, omit_type_name, is_tuple_like):
|
||||
"""
|
||||
Prints a struct, tuple, or tuple struct value with Rust syntax.
|
||||
Ignores any fields before field_start_index.
|
||||
"""
|
||||
assert (val.type.get_dwarf_type_kind() == rustpp.DWARF_TYPE_CODE_STRUCT or
|
||||
val.type.get_dwarf_type_kind() == rustpp.DWARF_TYPE_CODE_UNION)
|
||||
|
||||
if omit_type_name:
|
||||
type_name = ""
|
||||
else:
|
||||
type_name = val.type.get_unqualified_type_name()
|
||||
|
||||
if is_tuple_like:
|
||||
template = "%(type_name)s(%(body)s)"
|
||||
separator = ", "
|
||||
else:
|
||||
template = "%(type_name)s {\n%(body)s\n}"
|
||||
separator = ", \n"
|
||||
|
||||
fields = val.type.get_fields()
|
||||
|
||||
def render_child(child_index):
|
||||
this = ""
|
||||
if not is_tuple_like:
|
||||
field_name = fields[child_index].name
|
||||
this += field_name + ": "
|
||||
|
||||
field_val = val.get_child_at_index(child_index)
|
||||
|
||||
if not field_val.get_wrapped_value().IsValid():
|
||||
field = fields[child_index]
|
||||
# LLDB is not good at handling zero-sized values, so we have to help
|
||||
# it a little
|
||||
if field.GetType().GetByteSize() == 0:
|
||||
return this + rustpp.extract_type_name(field.GetType().GetName())
|
||||
else:
|
||||
return this + "<invalid value>"
|
||||
|
||||
return this + print_val(field_val.get_wrapped_value(), internal_dict)
|
||||
|
||||
if omit_first_field:
|
||||
field_start_index = 1
|
||||
else:
|
||||
field_start_index = 0
|
||||
|
||||
body = separator.join([render_child(idx) for idx in range(field_start_index, len(fields))])
|
||||
|
||||
return template % {"type_name": type_name,
|
||||
"body": body}
|
||||
|
||||
|
||||
def print_pointer_val(val, internal_dict):
|
||||
"""Prints a pointer value with Rust syntax"""
|
||||
assert val.type.get_dwarf_type_kind() == rustpp.DWARF_TYPE_CODE_PTR
|
||||
sigil = "&"
|
||||
type_name = val.type.get_unqualified_type_name()
|
||||
if type_name and type_name[0:1] in ["&", "*"]:
|
||||
sigil = type_name[0:1]
|
||||
|
||||
return sigil + hex(val.as_integer())
|
||||
|
||||
|
||||
def print_fixed_size_vec_val(val, internal_dict):
|
||||
assert val.type.get_dwarf_type_kind() == rustpp.DWARF_TYPE_CODE_ARRAY
|
||||
lldb_val = val.get_wrapped_value()
|
||||
|
||||
output = "["
|
||||
|
||||
for i in range(lldb_val.num_children):
|
||||
output += print_val(lldb_val.GetChildAtIndex(i), internal_dict)
|
||||
if i != lldb_val.num_children - 1:
|
||||
output += ", "
|
||||
|
||||
output += "]"
|
||||
return output
|
||||
|
||||
|
||||
def print_vec_slice_val(val, internal_dict):
|
||||
(length, data_ptr) = rustpp.extract_length_and_ptr_from_slice(val)
|
||||
return "&[%s]" % print_array_of_values(val.get_wrapped_value().GetName(),
|
||||
data_ptr,
|
||||
length,
|
||||
internal_dict)
|
||||
|
||||
|
||||
def print_std_vec_val(val, internal_dict):
|
||||
(length, data_ptr, cap) = rustpp.extract_length_ptr_and_cap_from_std_vec(val)
|
||||
return "vec![%s]" % print_array_of_values(val.get_wrapped_value().GetName(),
|
||||
data_ptr,
|
||||
length,
|
||||
internal_dict)
|
||||
|
||||
|
||||
def print_str_slice_val(val, internal_dict):
|
||||
(length, data_ptr) = rustpp.extract_length_and_ptr_from_slice(val)
|
||||
return read_utf8_string(data_ptr, length)
|
||||
|
||||
|
||||
def print_std_string_val(val, internal_dict):
|
||||
vec = val.get_child_at_index(0)
|
||||
(length, data_ptr, cap) = rustpp.extract_length_ptr_and_cap_from_std_vec(vec)
|
||||
return read_utf8_string(data_ptr, length)
|
||||
|
||||
# =-----------------------------------------------------------------------
|
||||
# Helper Functions
|
||||
# =-----------------------------------------------------------------------
|
||||
|
||||
|
||||
def print_array_of_values(array_name, data_ptr_val, length, internal_dict):
|
||||
"""Prints a contiguous memory range, interpreting it as values of the
|
||||
pointee-type of data_ptr_val."""
|
||||
|
||||
data_ptr_type = data_ptr_val.type
|
||||
assert data_ptr_type.get_dwarf_type_kind() == rustpp.DWARF_TYPE_CODE_PTR
|
||||
|
||||
element_type = data_ptr_type.get_wrapped_value().GetPointeeType()
|
||||
element_type_size = element_type.GetByteSize()
|
||||
|
||||
start_address = data_ptr_val.as_integer()
|
||||
raw_value = data_ptr_val.get_wrapped_value()
|
||||
|
||||
def render_element(i):
|
||||
address = start_address + i * element_type_size
|
||||
element_val = raw_value.CreateValueFromAddress(array_name + ("[%s]" % i),
|
||||
address,
|
||||
element_type)
|
||||
return print_val(element_val, internal_dict)
|
||||
|
||||
return ', '.join([render_element(i) for i in range(length)])
|
||||
|
||||
|
||||
def read_utf8_string(ptr_val, byte_count):
|
||||
if byte_count == 0:
|
||||
return '""'
|
||||
error = lldb.SBError()
|
||||
process = ptr_val.get_wrapped_value().GetProcess()
|
||||
data = process.ReadMemory(ptr_val.as_integer(), byte_count, error)
|
||||
if error.Success():
|
||||
return '"%s"' % data.decode(encoding='UTF-8')
|
||||
else:
|
||||
return '<error: %s>' % error.GetCString()
|
@ -30,13 +30,5 @@ EOF
|
||||
fi
|
||||
fi
|
||||
|
||||
# Prepare commands that will be loaded before any file on the command line has been loaded
|
||||
script_import="command script import \"$RUSTC_SYSROOT/lib/rustlib/etc/lldb_rust_formatters.py\""
|
||||
category_definition="type summary add --no-value --python-function lldb_rust_formatters.print_val -x \".*\" --category Rust"
|
||||
category_enable="type category enable Rust"
|
||||
|
||||
# Call LLDB with the commands added to the argument list
|
||||
exec "$lldb" --one-line-before-file "$script_import" \
|
||||
--one-line-before-file "$category_definition" \
|
||||
--one-line-before-file "$category_enable" \
|
||||
"$@"
|
||||
exec "$lldb" --source-before-file ./lldb_commands "$@"
|
||||
|
113
src/etc/rust_types.py
Normal file
113
src/etc/rust_types.py
Normal file
@ -0,0 +1,113 @@
|
||||
import re
|
||||
|
||||
|
||||
class RustType(object):
|
||||
OTHER = "Other"
|
||||
STRUCT = "Struct"
|
||||
TUPLE = "Tuple"
|
||||
CSTYLE_VARIANT = "CStyleVariant"
|
||||
TUPLE_VARIANT = "TupleVariant"
|
||||
STRUCT_VARIANT = "StructVariant"
|
||||
ENUM = "Enum"
|
||||
EMPTY = "Empty"
|
||||
SINGLETON_ENUM = "SingletonEnum"
|
||||
REGULAR_ENUM = "RegularEnum"
|
||||
COMPRESSED_ENUM = "CompressedEnum"
|
||||
REGULAR_UNION = "RegularUnion"
|
||||
|
||||
STD_STRING = "StdString"
|
||||
STD_OS_STRING = "StdOsString"
|
||||
STD_STR = "StdStr"
|
||||
STD_SLICE = "StdSlice"
|
||||
STD_VEC = "StdVec"
|
||||
STD_VEC_DEQUE = "StdVecDeque"
|
||||
STD_BTREE_SET = "StdBTreeSet"
|
||||
STD_BTREE_MAP = "StdBTreeMap"
|
||||
STD_HASH_MAP = "StdHashMap"
|
||||
STD_HASH_SET = "StdHashSet"
|
||||
STD_RC = "StdRc"
|
||||
STD_ARC = "StdArc"
|
||||
STD_CELL = "StdCell"
|
||||
STD_REF = "StdRef"
|
||||
STD_REF_MUT = "StdRefMut"
|
||||
STD_REF_CELL = "StdRefCell"
|
||||
|
||||
|
||||
STD_STRING_REGEX = re.compile(r"^(alloc::(\w+::)+)String$")
|
||||
STD_STR_REGEX = re.compile(r"^&str$")
|
||||
STD_SLICE_REGEX = re.compile(r"^&\[.+\]$")
|
||||
STD_OS_STRING_REGEX = re.compile(r"^(std::ffi::(\w+::)+)OsString$")
|
||||
STD_VEC_REGEX = re.compile(r"^(alloc::(\w+::)+)Vec<.+>$")
|
||||
STD_VEC_DEQUE_REGEX = re.compile(r"^(alloc::(\w+::)+)VecDeque<.+>$")
|
||||
STD_BTREE_SET_REGEX = re.compile(r"^(alloc::(\w+::)+)BTreeSet<.+>$")
|
||||
STD_BTREE_MAP_REGEX = re.compile(r"^(alloc::(\w+::)+)BTreeMap<.+>$")
|
||||
STD_HASH_MAP_REGEX = re.compile(r"^(std::collections::(\w+::)+)HashMap<.+>$")
|
||||
STD_HASH_SET_REGEX = re.compile(r"^(std::collections::(\w+::)+)HashSet<.+>$")
|
||||
STD_RC_REGEX = re.compile(r"^(alloc::(\w+::)+)Rc<.+>$")
|
||||
STD_ARC_REGEX = re.compile(r"^(alloc::(\w+::)+)Arc<.+>$")
|
||||
STD_CELL_REGEX = re.compile(r"^(core::(\w+::)+)Cell<.+>$")
|
||||
STD_REF_REGEX = re.compile(r"^(core::(\w+::)+)Ref<.+>$")
|
||||
STD_REF_MUT_REGEX = re.compile(r"^(core::(\w+::)+)RefMut<.+>$")
|
||||
STD_REF_CELL_REGEX = re.compile(r"^(core::(\w+::)+)RefCell<.+>$")
|
||||
|
||||
TUPLE_ITEM_REGEX = re.compile(r"__\d+$")
|
||||
|
||||
ENCODED_ENUM_PREFIX = "RUST$ENCODED$ENUM$"
|
||||
ENUM_DISR_FIELD_NAME = "<<variant>>"
|
||||
|
||||
STD_TYPE_TO_REGEX = {
|
||||
RustType.STD_STRING: STD_STRING_REGEX,
|
||||
RustType.STD_OS_STRING: STD_OS_STRING_REGEX,
|
||||
RustType.STD_STR: STD_STR_REGEX,
|
||||
RustType.STD_SLICE: STD_SLICE_REGEX,
|
||||
RustType.STD_VEC: STD_VEC_REGEX,
|
||||
RustType.STD_VEC_DEQUE: STD_VEC_DEQUE_REGEX,
|
||||
RustType.STD_HASH_MAP: STD_HASH_MAP_REGEX,
|
||||
RustType.STD_HASH_SET: STD_HASH_SET_REGEX,
|
||||
RustType.STD_BTREE_SET: STD_BTREE_SET_REGEX,
|
||||
RustType.STD_BTREE_MAP: STD_BTREE_MAP_REGEX,
|
||||
RustType.STD_RC: STD_RC_REGEX,
|
||||
RustType.STD_ARC: STD_ARC_REGEX,
|
||||
RustType.STD_REF: STD_REF_REGEX,
|
||||
RustType.STD_REF_MUT: STD_REF_MUT_REGEX,
|
||||
RustType.STD_REF_CELL: STD_REF_CELL_REGEX,
|
||||
RustType.STD_CELL: STD_CELL_REGEX,
|
||||
}
|
||||
|
||||
def is_tuple_fields(fields):
|
||||
# type: (list) -> bool
|
||||
return all(TUPLE_ITEM_REGEX.match(str(field.name)) for field in fields)
|
||||
|
||||
|
||||
def classify_struct(name, fields):
|
||||
if len(fields) == 0:
|
||||
return RustType.EMPTY
|
||||
|
||||
for ty, regex in STD_TYPE_TO_REGEX.items():
|
||||
if regex.match(name):
|
||||
return ty
|
||||
|
||||
if fields[0].name == ENUM_DISR_FIELD_NAME:
|
||||
return RustType.ENUM
|
||||
|
||||
if is_tuple_fields(fields):
|
||||
return RustType.TUPLE
|
||||
|
||||
return RustType.STRUCT
|
||||
|
||||
|
||||
def classify_union(fields):
|
||||
if len(fields) == 0:
|
||||
return RustType.EMPTY
|
||||
|
||||
first_variant_name = fields[0].name
|
||||
if first_variant_name is None:
|
||||
if len(fields) == 1:
|
||||
return RustType.SINGLETON_ENUM
|
||||
else:
|
||||
return RustType.REGULAR_ENUM
|
||||
elif first_variant_name.startswith(ENCODED_ENUM_PREFIX):
|
||||
assert len(fields) == 1
|
||||
return RustType.COMPRESSED_ENUM
|
||||
else:
|
||||
return RustType.REGULAR_UNION
|
@ -2034,7 +2034,7 @@ trait RcBoxPtr<T: ?Sized> {
|
||||
// The reference count will never be zero when this is called;
|
||||
// nevertheless, we insert an abort here to hint LLVM at
|
||||
// an otherwise missed optimization.
|
||||
if strong == 0 || strong == usize::max_value() {
|
||||
if strong == 0 || strong == usize::MAX {
|
||||
abort();
|
||||
}
|
||||
self.inner().strong.set(strong + 1);
|
||||
@ -2058,7 +2058,7 @@ trait RcBoxPtr<T: ?Sized> {
|
||||
// The reference count will never be zero when this is called;
|
||||
// nevertheless, we insert an abort here to hint LLVM at
|
||||
// an otherwise missed optimization.
|
||||
if weak == 0 || weak == usize::max_value() {
|
||||
if weak == 0 || weak == usize::MAX {
|
||||
abort();
|
||||
}
|
||||
self.inner().weak.set(weak + 1);
|
||||
|
@ -407,14 +407,14 @@ fn test_from_vec() {
|
||||
fn test_downcast() {
|
||||
use std::any::Any;
|
||||
|
||||
let r1: Rc<dyn Any> = Rc::new(i32::max_value());
|
||||
let r1: Rc<dyn Any> = Rc::new(i32::MAX);
|
||||
let r2: Rc<dyn Any> = Rc::new("abc");
|
||||
|
||||
assert!(r1.clone().downcast::<u32>().is_err());
|
||||
|
||||
let r1i32 = r1.downcast::<i32>();
|
||||
assert!(r1i32.is_ok());
|
||||
assert_eq!(r1i32.unwrap(), Rc::new(i32::max_value()));
|
||||
assert_eq!(r1i32.unwrap(), Rc::new(i32::MAX));
|
||||
|
||||
assert!(r2.clone().downcast::<i32>().is_err());
|
||||
|
||||
|
@ -465,14 +465,14 @@ fn test_from_vec() {
|
||||
fn test_downcast() {
|
||||
use std::any::Any;
|
||||
|
||||
let r1: Arc<dyn Any + Send + Sync> = Arc::new(i32::max_value());
|
||||
let r1: Arc<dyn Any + Send + Sync> = Arc::new(i32::MAX);
|
||||
let r2: Arc<dyn Any + Send + Sync> = Arc::new("abc");
|
||||
|
||||
assert!(r1.clone().downcast::<u32>().is_err());
|
||||
|
||||
let r1i32 = r1.downcast::<i32>();
|
||||
assert!(r1i32.is_ok());
|
||||
assert_eq!(r1i32.unwrap(), Arc::new(i32::max_value()));
|
||||
assert_eq!(r1i32.unwrap(), Arc::new(i32::MAX));
|
||||
|
||||
assert!(r2.clone().downcast::<i32>().is_err());
|
||||
|
||||
|
@ -566,13 +566,13 @@ mod slice_index {
|
||||
data: "hello";
|
||||
// note: using 0 specifically ensures that the result of overflowing is 0..0,
|
||||
// so that `get` doesn't simply return None for the wrong reason.
|
||||
bad: data[0..=usize::max_value()];
|
||||
bad: data[0..=usize::MAX];
|
||||
message: "maximum usize";
|
||||
}
|
||||
|
||||
in mod rangetoinclusive {
|
||||
data: "hello";
|
||||
bad: data[..=usize::max_value()];
|
||||
bad: data[..=usize::MAX];
|
||||
message: "maximum usize";
|
||||
}
|
||||
}
|
||||
|
@ -68,7 +68,7 @@ fn test_reserve() {
|
||||
|
||||
#[test]
|
||||
fn test_zst_capacity() {
|
||||
assert_eq!(Vec::<()>::new().capacity(), usize::max_value());
|
||||
assert_eq!(Vec::<()>::new().capacity(), usize::MAX);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@ -563,19 +563,19 @@ fn test_drain_inclusive_range() {
|
||||
|
||||
#[test]
|
||||
fn test_drain_max_vec_size() {
|
||||
let mut v = Vec::<()>::with_capacity(usize::max_value());
|
||||
let mut v = Vec::<()>::with_capacity(usize::MAX);
|
||||
unsafe {
|
||||
v.set_len(usize::max_value());
|
||||
v.set_len(usize::MAX);
|
||||
}
|
||||
for _ in v.drain(usize::max_value() - 1..) {}
|
||||
assert_eq!(v.len(), usize::max_value() - 1);
|
||||
for _ in v.drain(usize::MAX - 1..) {}
|
||||
assert_eq!(v.len(), usize::MAX - 1);
|
||||
|
||||
let mut v = Vec::<()>::with_capacity(usize::max_value());
|
||||
let mut v = Vec::<()>::with_capacity(usize::MAX);
|
||||
unsafe {
|
||||
v.set_len(usize::max_value());
|
||||
v.set_len(usize::MAX);
|
||||
}
|
||||
for _ in v.drain(usize::max_value() - 1..=usize::max_value() - 1) {}
|
||||
assert_eq!(v.len(), usize::max_value() - 1);
|
||||
for _ in v.drain(usize::MAX - 1..=usize::MAX - 1) {}
|
||||
assert_eq!(v.len(), usize::MAX - 1);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -2779,19 +2779,25 @@ impl<'a, T> Drain<'a, T> {
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// # #![feature(vec_drain_as_slice)]
|
||||
/// let mut vec = vec!['a', 'b', 'c'];
|
||||
/// let mut drain = vec.drain(..);
|
||||
/// assert_eq!(drain.as_slice(), &['a', 'b', 'c']);
|
||||
/// let _ = drain.next().unwrap();
|
||||
/// assert_eq!(drain.as_slice(), &['b', 'c']);
|
||||
/// ```
|
||||
#[unstable(feature = "vec_drain_as_slice", reason = "recently added", issue = "58957")]
|
||||
#[stable(feature = "vec_drain_as_slice", since = "1.46.0")]
|
||||
pub fn as_slice(&self) -> &[T] {
|
||||
self.iter.as_slice()
|
||||
}
|
||||
}
|
||||
|
||||
#[stable(feature = "vec_drain_as_slice", since = "1.46.0")]
|
||||
impl<'a, T> AsRef<[T]> for Drain<'a, T> {
|
||||
fn as_ref(&self) -> &[T] {
|
||||
self.as_slice()
|
||||
}
|
||||
}
|
||||
|
||||
#[stable(feature = "drain", since = "1.6.0")]
|
||||
unsafe impl<T: Sync> Sync for Drain<'_, T> {}
|
||||
#[stable(feature = "drain", since = "1.6.0")]
|
||||
|
@ -778,18 +778,13 @@ impl<T: ?Sized> RefCell<T> {
|
||||
///
|
||||
/// An example of panic:
|
||||
///
|
||||
/// ```
|
||||
/// ```should_panic
|
||||
/// use std::cell::RefCell;
|
||||
/// use std::thread;
|
||||
///
|
||||
/// let result = thread::spawn(move || {
|
||||
/// let c = RefCell::new(5);
|
||||
/// let m = c.borrow_mut();
|
||||
/// let c = RefCell::new(5);
|
||||
///
|
||||
/// let b = c.borrow(); // this causes a panic
|
||||
/// }).join();
|
||||
///
|
||||
/// assert!(result.is_err());
|
||||
/// let m = c.borrow_mut();
|
||||
/// let b = c.borrow(); // this causes a panic
|
||||
/// ```
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
#[inline]
|
||||
@ -858,18 +853,13 @@ impl<T: ?Sized> RefCell<T> {
|
||||
///
|
||||
/// An example of panic:
|
||||
///
|
||||
/// ```
|
||||
/// ```should_panic
|
||||
/// use std::cell::RefCell;
|
||||
/// use std::thread;
|
||||
///
|
||||
/// let result = thread::spawn(move || {
|
||||
/// let c = RefCell::new(5);
|
||||
/// let m = c.borrow();
|
||||
/// let c = RefCell::new(5);
|
||||
/// let m = c.borrow();
|
||||
///
|
||||
/// let b = c.borrow_mut(); // this causes a panic
|
||||
/// }).join();
|
||||
///
|
||||
/// assert!(result.is_err());
|
||||
/// let b = c.borrow_mut(); // this causes a panic
|
||||
/// ```
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
#[inline]
|
||||
@ -1163,8 +1153,8 @@ impl<'b> BorrowRef<'b> {
|
||||
// Incrementing borrow can result in a non-reading value (<= 0) in these cases:
|
||||
// 1. It was < 0, i.e. there are writing borrows, so we can't allow a read borrow
|
||||
// due to Rust's reference aliasing rules
|
||||
// 2. It was isize::max_value() (the max amount of reading borrows) and it overflowed
|
||||
// into isize::min_value() (the max amount of writing borrows) so we can't allow
|
||||
// 2. It was isize::MAX (the max amount of reading borrows) and it overflowed
|
||||
// into isize::MIN (the max amount of writing borrows) so we can't allow
|
||||
// an additional read borrow because isize can't represent so many read borrows
|
||||
// (this can only happen if you mem::forget more than a small constant amount of
|
||||
// `Ref`s, which is not good practice)
|
||||
@ -1172,7 +1162,7 @@ impl<'b> BorrowRef<'b> {
|
||||
} else {
|
||||
// Incrementing borrow can result in a reading value (> 0) in these cases:
|
||||
// 1. It was = 0, i.e. it wasn't borrowed, and we are taking the first read borrow
|
||||
// 2. It was > 0 and < isize::max_value(), i.e. there were read borrows, and isize
|
||||
// 2. It was > 0 and < isize::MAX, i.e. there were read borrows, and isize
|
||||
// is large enough to represent having one more read borrow
|
||||
borrow.set(b);
|
||||
Some(BorrowRef { borrow })
|
||||
@ -1198,7 +1188,7 @@ impl Clone for BorrowRef<'_> {
|
||||
debug_assert!(is_reading(borrow));
|
||||
// Prevent the borrow counter from overflowing into
|
||||
// a writing borrow.
|
||||
assert!(borrow != isize::max_value());
|
||||
assert!(borrow != isize::MAX);
|
||||
self.borrow.set(borrow + 1);
|
||||
BorrowRef { borrow: self.borrow }
|
||||
}
|
||||
@ -1489,7 +1479,7 @@ impl<'b> BorrowRefMut<'b> {
|
||||
let borrow = self.borrow.get();
|
||||
debug_assert!(is_writing(borrow));
|
||||
// Prevent the borrow counter from underflowing.
|
||||
assert!(borrow != isize::min_value());
|
||||
assert!(borrow != isize::MIN);
|
||||
self.borrow.set(borrow - 1);
|
||||
BorrowRefMut { borrow: self.borrow }
|
||||
}
|
||||
|
@ -278,16 +278,11 @@ impl fmt::Display for CharTryFromError {
|
||||
///
|
||||
/// Passing a large radix, causing a panic:
|
||||
///
|
||||
/// ```
|
||||
/// use std::thread;
|
||||
/// ```should_panic
|
||||
/// use std::char;
|
||||
///
|
||||
/// let result = thread::spawn(|| {
|
||||
/// // this panics
|
||||
/// let c = char::from_digit(1, 37);
|
||||
/// }).join();
|
||||
///
|
||||
/// assert!(result.is_err());
|
||||
/// // this panics
|
||||
/// let c = char::from_digit(1, 37);
|
||||
/// ```
|
||||
#[inline]
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
|
@ -229,16 +229,11 @@ impl char {
|
||||
///
|
||||
/// Passing a large radix, causing a panic:
|
||||
///
|
||||
/// ```
|
||||
/// use std::thread;
|
||||
/// ```should_panic
|
||||
/// use std::char;
|
||||
///
|
||||
/// let result = thread::spawn(|| {
|
||||
/// // this panics
|
||||
/// let c = char::from_digit(1, 37);
|
||||
/// }).join();
|
||||
///
|
||||
/// assert!(result.is_err());
|
||||
/// // this panics
|
||||
/// char::from_digit(1, 37);
|
||||
/// ```
|
||||
#[unstable(feature = "assoc_char_funcs", reason = "recently added", issue = "71763")]
|
||||
#[inline]
|
||||
@ -282,15 +277,9 @@ impl char {
|
||||
///
|
||||
/// Passing a large radix, causing a panic:
|
||||
///
|
||||
/// ```
|
||||
/// use std::thread;
|
||||
///
|
||||
/// let result = thread::spawn(|| {
|
||||
/// // this panics
|
||||
/// '1'.is_digit(37);
|
||||
/// }).join();
|
||||
///
|
||||
/// assert!(result.is_err());
|
||||
/// ```should_panic
|
||||
/// // this panics
|
||||
/// '1'.is_digit(37);
|
||||
/// ```
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
#[inline]
|
||||
@ -337,14 +326,9 @@ impl char {
|
||||
///
|
||||
/// Passing a large radix, causing a panic:
|
||||
///
|
||||
/// ```
|
||||
/// use std::thread;
|
||||
///
|
||||
/// let result = thread::spawn(|| {
|
||||
/// '1'.to_digit(37);
|
||||
/// }).join();
|
||||
///
|
||||
/// assert!(result.is_err());
|
||||
/// ```should_panic
|
||||
/// // this panics
|
||||
/// '1'.to_digit(37);
|
||||
/// ```
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
#[inline]
|
||||
@ -646,17 +630,11 @@ impl char {
|
||||
///
|
||||
/// A buffer that's too small:
|
||||
///
|
||||
/// ```
|
||||
/// use std::thread;
|
||||
/// ```should_panic
|
||||
/// let mut b = [0; 1];
|
||||
///
|
||||
/// let result = thread::spawn(|| {
|
||||
/// let mut b = [0; 1];
|
||||
///
|
||||
/// // this panics
|
||||
/// 'ß'.encode_utf8(&mut b);
|
||||
/// }).join();
|
||||
///
|
||||
/// assert!(result.is_err());
|
||||
/// // this panics
|
||||
/// 'ß'.encode_utf8(&mut b);
|
||||
/// ```
|
||||
#[stable(feature = "unicode_encode_char", since = "1.15.0")]
|
||||
#[inline]
|
||||
@ -687,17 +665,11 @@ impl char {
|
||||
///
|
||||
/// A buffer that's too small:
|
||||
///
|
||||
/// ```
|
||||
/// use std::thread;
|
||||
/// ```should_panic
|
||||
/// let mut b = [0; 1];
|
||||
///
|
||||
/// let result = thread::spawn(|| {
|
||||
/// let mut b = [0; 1];
|
||||
///
|
||||
/// // this panics
|
||||
/// '𝕊'.encode_utf16(&mut b);
|
||||
/// }).join();
|
||||
///
|
||||
/// assert!(result.is_err());
|
||||
/// // this panics
|
||||
/// '𝕊'.encode_utf16(&mut b);
|
||||
/// ```
|
||||
#[stable(feature = "unicode_encode_char", since = "1.15.0")]
|
||||
#[inline]
|
||||
|
@ -217,7 +217,7 @@ macro_rules! try_from_upper_bounded {
|
||||
/// is outside of the range of the target type.
|
||||
#[inline]
|
||||
fn try_from(u: $source) -> Result<Self, Self::Error> {
|
||||
if u > (Self::max_value() as $source) {
|
||||
if u > (Self::MAX as $source) {
|
||||
Err(TryFromIntError(()))
|
||||
} else {
|
||||
Ok(u as Self)
|
||||
@ -239,8 +239,8 @@ macro_rules! try_from_both_bounded {
|
||||
/// is outside of the range of the target type.
|
||||
#[inline]
|
||||
fn try_from(u: $source) -> Result<Self, Self::Error> {
|
||||
let min = Self::min_value() as $source;
|
||||
let max = Self::max_value() as $source;
|
||||
let min = Self::MIN as $source;
|
||||
let max = Self::MAX as $source;
|
||||
if u < min || u > max {
|
||||
Err(TryFromIntError(()))
|
||||
} else {
|
||||
|
@ -56,6 +56,7 @@ pub const fn from_generator<T>(gen: T) -> impl Future<Output = T::Return>
|
||||
where
|
||||
T: Generator<ResumeTy, Yield = ()>,
|
||||
{
|
||||
#[rustc_diagnostic_item = "gen_future"]
|
||||
struct GenFuture<T: Generator<ResumeTy, Yield = ()>>(T);
|
||||
|
||||
// We rely on the fact that async/await futures are immovable in order to create
|
||||
|
@ -2717,12 +2717,12 @@ pub trait Iterator {
|
||||
/// ```
|
||||
/// let a = [1, 2, 3];
|
||||
///
|
||||
/// let v_cloned: Vec<_> = a.iter().copied().collect();
|
||||
/// let v_copied: Vec<_> = a.iter().copied().collect();
|
||||
///
|
||||
/// // copied is the same as .map(|&x| x)
|
||||
/// let v_map: Vec<_> = a.iter().map(|&x| x).collect();
|
||||
///
|
||||
/// assert_eq!(v_cloned, vec![1, 2, 3]);
|
||||
/// assert_eq!(v_copied, vec![1, 2, 3]);
|
||||
/// assert_eq!(v_map, vec![1, 2, 3]);
|
||||
/// ```
|
||||
#[stable(feature = "iter_copied", since = "1.36.0")]
|
||||
|
@ -145,7 +145,6 @@
|
||||
#![feature(associated_type_bounds)]
|
||||
#![feature(const_type_id)]
|
||||
#![feature(const_caller_location)]
|
||||
#![feature(option_zip)]
|
||||
#![feature(no_niche)] // rust-lang/rust#68303
|
||||
|
||||
#[prelude_import]
|
||||
|
@ -750,9 +750,9 @@ $EndFeature, "
|
||||
}
|
||||
|
||||
doc_comment! {
|
||||
concat!("Unchecked integer addition. Computes `self + rhs, assuming overflow
|
||||
concat!("Unchecked integer addition. Computes `self + rhs`, assuming overflow
|
||||
cannot occur. This results in undefined behavior when `self + rhs > ", stringify!($SelfT),
|
||||
"::max_value()` or `self + rhs < ", stringify!($SelfT), "::min_value()`."),
|
||||
"::MAX` or `self + rhs < ", stringify!($SelfT), "::MIN`."),
|
||||
#[unstable(
|
||||
feature = "unchecked_math",
|
||||
reason = "niche optimization path",
|
||||
@ -792,9 +792,9 @@ $EndFeature, "
|
||||
}
|
||||
|
||||
doc_comment! {
|
||||
concat!("Unchecked integer subtraction. Computes `self - rhs, assuming overflow
|
||||
concat!("Unchecked integer subtraction. Computes `self - rhs`, assuming overflow
|
||||
cannot occur. This results in undefined behavior when `self - rhs > ", stringify!($SelfT),
|
||||
"::max_value()` or `self - rhs < ", stringify!($SelfT), "::min_value()`."),
|
||||
"::MAX` or `self - rhs < ", stringify!($SelfT), "::MIN`."),
|
||||
#[unstable(
|
||||
feature = "unchecked_math",
|
||||
reason = "niche optimization path",
|
||||
@ -834,9 +834,9 @@ $EndFeature, "
|
||||
}
|
||||
|
||||
doc_comment! {
|
||||
concat!("Unchecked integer multiplication. Computes `self * rhs, assuming overflow
|
||||
concat!("Unchecked integer multiplication. Computes `self * rhs`, assuming overflow
|
||||
cannot occur. This results in undefined behavior when `self * rhs > ", stringify!($SelfT),
|
||||
"::max_value()` or `self * rhs < ", stringify!($SelfT), "::min_value()`."),
|
||||
"::MAX` or `self * rhs < ", stringify!($SelfT), "::MIN`."),
|
||||
#[unstable(
|
||||
feature = "unchecked_math",
|
||||
reason = "niche optimization path",
|
||||
@ -871,7 +871,7 @@ $EndFeature, "
|
||||
without modifying the original"]
|
||||
#[inline]
|
||||
pub const fn checked_div(self, rhs: Self) -> Option<Self> {
|
||||
if rhs == 0 || (self == Self::min_value() && rhs == -1) {
|
||||
if rhs == 0 || (self == Self::MIN && rhs == -1) {
|
||||
None
|
||||
} else {
|
||||
// SAFETY: div by zero and by INT_MIN have been checked above
|
||||
@ -900,7 +900,7 @@ assert_eq!((1", stringify!($SelfT), ").checked_div_euclid(0), None);
|
||||
without modifying the original"]
|
||||
#[inline]
|
||||
pub const fn checked_div_euclid(self, rhs: Self) -> Option<Self> {
|
||||
if rhs == 0 || (self == Self::min_value() && rhs == -1) {
|
||||
if rhs == 0 || (self == Self::MIN && rhs == -1) {
|
||||
None
|
||||
} else {
|
||||
Some(self.div_euclid(rhs))
|
||||
@ -929,7 +929,7 @@ $EndFeature, "
|
||||
without modifying the original"]
|
||||
#[inline]
|
||||
pub const fn checked_rem(self, rhs: Self) -> Option<Self> {
|
||||
if rhs == 0 || (self == Self::min_value() && rhs == -1) {
|
||||
if rhs == 0 || (self == Self::MIN && rhs == -1) {
|
||||
None
|
||||
} else {
|
||||
// SAFETY: div by zero and by INT_MIN have been checked above
|
||||
@ -957,7 +957,7 @@ assert_eq!(", stringify!($SelfT), "::MIN.checked_rem_euclid(-1), None);
|
||||
without modifying the original"]
|
||||
#[inline]
|
||||
pub const fn checked_rem_euclid(self, rhs: Self) -> Option<Self> {
|
||||
if rhs == 0 || (self == Self::min_value() && rhs == -1) {
|
||||
if rhs == 0 || (self == Self::MIN && rhs == -1) {
|
||||
None
|
||||
} else {
|
||||
Some(self.rem_euclid(rhs))
|
||||
@ -1236,9 +1236,9 @@ $EndFeature, "
|
||||
match self.checked_mul(rhs) {
|
||||
Some(x) => x,
|
||||
None => if (self < 0) == (rhs < 0) {
|
||||
Self::max_value()
|
||||
Self::MAX
|
||||
} else {
|
||||
Self::min_value()
|
||||
Self::MIN
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1267,8 +1267,8 @@ $EndFeature, "
|
||||
pub const fn saturating_pow(self, exp: u32) -> Self {
|
||||
match self.checked_pow(exp) {
|
||||
Some(x) => x,
|
||||
None if self < 0 && exp % 2 == 1 => Self::min_value(),
|
||||
None => Self::max_value(),
|
||||
None if self < 0 && exp % 2 == 1 => Self::MIN,
|
||||
None => Self::MAX,
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1738,7 +1738,7 @@ $EndFeature, "
|
||||
#[must_use = "this returns the result of the operation, \
|
||||
without modifying the original"]
|
||||
pub const fn overflowing_div(self, rhs: Self) -> (Self, bool) {
|
||||
if self == Self::min_value() && rhs == -1 {
|
||||
if self == Self::MIN && rhs == -1 {
|
||||
(self, true)
|
||||
} else {
|
||||
(self / rhs, false)
|
||||
@ -1771,7 +1771,7 @@ assert_eq!(", stringify!($SelfT), "::MIN.overflowing_div_euclid(-1), (", stringi
|
||||
#[must_use = "this returns the result of the operation, \
|
||||
without modifying the original"]
|
||||
pub const fn overflowing_div_euclid(self, rhs: Self) -> (Self, bool) {
|
||||
if self == Self::min_value() && rhs == -1 {
|
||||
if self == Self::MIN && rhs == -1 {
|
||||
(self, true)
|
||||
} else {
|
||||
(self.div_euclid(rhs), false)
|
||||
@ -1805,7 +1805,7 @@ $EndFeature, "
|
||||
#[must_use = "this returns the result of the operation, \
|
||||
without modifying the original"]
|
||||
pub const fn overflowing_rem(self, rhs: Self) -> (Self, bool) {
|
||||
if self == Self::min_value() && rhs == -1 {
|
||||
if self == Self::MIN && rhs == -1 {
|
||||
(0, true)
|
||||
} else {
|
||||
(self % rhs, false)
|
||||
@ -1838,7 +1838,7 @@ assert_eq!(", stringify!($SelfT), "::MIN.overflowing_rem_euclid(-1), (0, true));
|
||||
without modifying the original"]
|
||||
#[inline]
|
||||
pub const fn overflowing_rem_euclid(self, rhs: Self) -> (Self, bool) {
|
||||
if self == Self::min_value() && rhs == -1 {
|
||||
if self == Self::MIN && rhs == -1 {
|
||||
(0, true)
|
||||
} else {
|
||||
(self.rem_euclid(rhs), false)
|
||||
@ -1869,8 +1869,8 @@ assert_eq!(", stringify!($SelfT), "::MIN.overflowing_neg(), (", stringify!($Self
|
||||
#[allow(unused_attributes)]
|
||||
#[allow_internal_unstable(const_if_match)]
|
||||
pub const fn overflowing_neg(self) -> (Self, bool) {
|
||||
if self == Self::min_value() {
|
||||
(Self::min_value(), true)
|
||||
if self == Self::MIN {
|
||||
(Self::MIN, true)
|
||||
} else {
|
||||
(-self, false)
|
||||
}
|
||||
@ -1952,7 +1952,7 @@ $EndFeature, "
|
||||
#[rustc_const_stable(feature = "const_int_methods", since = "1.32.0")]
|
||||
#[inline]
|
||||
pub const fn overflowing_abs(self) -> (Self, bool) {
|
||||
(self.wrapping_abs(), self == Self::min_value())
|
||||
(self.wrapping_abs(), self == Self::MIN)
|
||||
}
|
||||
}
|
||||
|
||||
@ -2986,9 +2986,9 @@ assert_eq!((", stringify!($SelfT), "::MAX - 2).checked_add(3), None);", $EndFeat
|
||||
}
|
||||
|
||||
doc_comment! {
|
||||
concat!("Unchecked integer addition. Computes `self + rhs, assuming overflow
|
||||
concat!("Unchecked integer addition. Computes `self + rhs`, assuming overflow
|
||||
cannot occur. This results in undefined behavior when `self + rhs > ", stringify!($SelfT),
|
||||
"::max_value()` or `self + rhs < ", stringify!($SelfT), "::min_value()`."),
|
||||
"::MAX` or `self + rhs < ", stringify!($SelfT), "::MIN`."),
|
||||
#[unstable(
|
||||
feature = "unchecked_math",
|
||||
reason = "niche optimization path",
|
||||
@ -3026,9 +3026,9 @@ assert_eq!(0", stringify!($SelfT), ".checked_sub(1), None);", $EndFeature, "
|
||||
}
|
||||
|
||||
doc_comment! {
|
||||
concat!("Unchecked integer subtraction. Computes `self - rhs, assuming overflow
|
||||
concat!("Unchecked integer subtraction. Computes `self - rhs`, assuming overflow
|
||||
cannot occur. This results in undefined behavior when `self - rhs > ", stringify!($SelfT),
|
||||
"::max_value()` or `self - rhs < ", stringify!($SelfT), "::min_value()`."),
|
||||
"::MAX` or `self - rhs < ", stringify!($SelfT), "::MIN`."),
|
||||
#[unstable(
|
||||
feature = "unchecked_math",
|
||||
reason = "niche optimization path",
|
||||
@ -3066,9 +3066,9 @@ assert_eq!(", stringify!($SelfT), "::MAX.checked_mul(2), None);", $EndFeature, "
|
||||
}
|
||||
|
||||
doc_comment! {
|
||||
concat!("Unchecked integer multiplication. Computes `self * rhs, assuming overflow
|
||||
concat!("Unchecked integer multiplication. Computes `self * rhs`, assuming overflow
|
||||
cannot occur. This results in undefined behavior when `self * rhs > ", stringify!($SelfT),
|
||||
"::max_value()` or `self * rhs < ", stringify!($SelfT), "::min_value()`."),
|
||||
"::MAX` or `self * rhs < ", stringify!($SelfT), "::MIN`."),
|
||||
#[unstable(
|
||||
feature = "unchecked_math",
|
||||
reason = "niche optimization path",
|
||||
@ -3309,7 +3309,8 @@ Basic usage:
|
||||
|
||||
```
|
||||
", $Feature, "assert_eq!(100", stringify!($SelfT), ".saturating_add(1), 101);
|
||||
assert_eq!(200u8.saturating_add(127), 255);", $EndFeature, "
|
||||
assert_eq!(", stringify!($SelfT), "::MAX.saturating_add(127), ", stringify!($SelfT), "::MAX);",
|
||||
$EndFeature, "
|
||||
```"),
|
||||
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
@ -3366,7 +3367,7 @@ assert_eq!((", stringify!($SelfT), "::MAX).saturating_mul(10), ", stringify!($Se
|
||||
pub const fn saturating_mul(self, rhs: Self) -> Self {
|
||||
match self.checked_mul(rhs) {
|
||||
Some(x) => x,
|
||||
None => Self::max_value(),
|
||||
None => Self::MAX,
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -3393,7 +3394,7 @@ $EndFeature, "
|
||||
pub const fn saturating_pow(self, exp: u32) -> Self {
|
||||
match self.checked_pow(exp) {
|
||||
Some(x) => x,
|
||||
None => Self::max_value(),
|
||||
None => Self::MAX,
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -4080,7 +4081,7 @@ Basic usage:
|
||||
}
|
||||
}
|
||||
|
||||
doc_comment! {
|
||||
doc_comment! {
|
||||
concat!("Performs Euclidean division.
|
||||
|
||||
Since, for the positive integers, all common
|
||||
@ -4178,7 +4179,7 @@ assert!(!10", stringify!($SelfT), ".is_power_of_two());", $EndFeature, "
|
||||
// (such as intel pre-haswell) have more efficient ctlz
|
||||
// intrinsics when the argument is non-zero.
|
||||
let z = unsafe { intrinsics::ctlz_nonzero(p) };
|
||||
<$SelfT>::max_value() >> z
|
||||
<$SelfT>::MAX >> z
|
||||
}
|
||||
|
||||
doc_comment! {
|
||||
@ -5160,9 +5161,9 @@ trait FromStrRadixHelper: PartialOrd + Copy {
|
||||
macro_rules! doit {
|
||||
($($t:ty)*) => ($(impl FromStrRadixHelper for $t {
|
||||
#[inline]
|
||||
fn min_value() -> Self { Self::min_value() }
|
||||
fn min_value() -> Self { Self::MIN }
|
||||
#[inline]
|
||||
fn max_value() -> Self { Self::max_value() }
|
||||
fn max_value() -> Self { Self::MAX }
|
||||
#[inline]
|
||||
fn from_u32(u: u32) -> Self { u as Self }
|
||||
#[inline]
|
||||
|
@ -694,7 +694,7 @@ Basic usage:
|
||||
#![feature(wrapping_int_impl)]
|
||||
use std::num::Wrapping;
|
||||
|
||||
let n = Wrapping(", stringify!($t), "::max_value()) >> 2;
|
||||
let n = Wrapping(", stringify!($t), "::MAX) >> 2;
|
||||
|
||||
assert_eq!(n.leading_zeros(), 3);
|
||||
```"),
|
||||
@ -723,8 +723,7 @@ use std::num::Wrapping;
|
||||
|
||||
assert_eq!(Wrapping(100", stringify!($t), ").abs(), Wrapping(100));
|
||||
assert_eq!(Wrapping(-100", stringify!($t), ").abs(), Wrapping(100));
|
||||
assert_eq!(Wrapping(", stringify!($t), "::min_value()).abs(), Wrapping(", stringify!($t),
|
||||
"::min_value()));
|
||||
assert_eq!(Wrapping(", stringify!($t), "::MIN).abs(), Wrapping(", stringify!($t), "::MIN));
|
||||
assert_eq!(Wrapping(-128i8).abs().0 as u8, 128u8);
|
||||
```"),
|
||||
#[inline]
|
||||
@ -823,7 +822,7 @@ Basic usage:
|
||||
#![feature(wrapping_int_impl)]
|
||||
use std::num::Wrapping;
|
||||
|
||||
let n = Wrapping(", stringify!($t), "::max_value()) >> 2;
|
||||
let n = Wrapping(", stringify!($t), "::MAX) >> 2;
|
||||
|
||||
assert_eq!(n.leading_zeros(), 2);
|
||||
```"),
|
||||
|
@ -926,7 +926,6 @@ impl<T> Option<T> {
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// #![feature(option_zip)]
|
||||
/// let x = Some(1);
|
||||
/// let y = Some("hi");
|
||||
/// let z = None::<u8>;
|
||||
@ -934,9 +933,12 @@ impl<T> Option<T> {
|
||||
/// assert_eq!(x.zip(y), Some((1, "hi")));
|
||||
/// assert_eq!(x.zip(z), None);
|
||||
/// ```
|
||||
#[unstable(feature = "option_zip", issue = "70086")]
|
||||
#[stable(feature = "option_zip_option", since = "1.46.0")]
|
||||
pub fn zip<U>(self, other: Option<U>) -> Option<(T, U)> {
|
||||
self.zip_with(other, |a, b| (a, b))
|
||||
match (self, other) {
|
||||
(Some(a), Some(b)) => Some((a, b)),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Zips `self` and another `Option` with function `f`.
|
||||
|
@ -291,7 +291,7 @@ impl<T: ?Sized> *const T {
|
||||
T: Sized,
|
||||
{
|
||||
let pointee_size = mem::size_of::<T>();
|
||||
assert!(0 < pointee_size && pointee_size <= isize::max_value() as usize);
|
||||
assert!(0 < pointee_size && pointee_size <= isize::MAX as usize);
|
||||
intrinsics::ptr_offset_from(self, origin)
|
||||
}
|
||||
|
||||
@ -336,7 +336,7 @@ impl<T: ?Sized> *const T {
|
||||
T: Sized,
|
||||
{
|
||||
let pointee_size = mem::size_of::<T>();
|
||||
assert!(0 < pointee_size && pointee_size <= isize::max_value() as usize);
|
||||
assert!(0 < pointee_size && pointee_size <= isize::MAX as usize);
|
||||
|
||||
let d = isize::wrapping_sub(self as _, origin as _);
|
||||
d.wrapping_div(pointee_size as _)
|
||||
|
@ -1128,7 +1128,7 @@ pub(crate) unsafe fn align_offset<T: Sized>(p: *const T, a: usize) -> usize {
|
||||
//
|
||||
// Note, that we use wrapping operations here intentionally – the original formula
|
||||
// uses e.g., subtraction `mod n`. It is entirely fine to do them `mod
|
||||
// usize::max_value()` instead, because we take the result `mod n` at the end
|
||||
// usize::MAX` instead, because we take the result `mod n` at the end
|
||||
// anyway.
|
||||
inverse = inverse.wrapping_mul(2usize.wrapping_sub(x.wrapping_mul(inverse)));
|
||||
if going_mod >= m {
|
||||
@ -1193,7 +1193,7 @@ pub(crate) unsafe fn align_offset<T: Sized>(p: *const T, a: usize) -> usize {
|
||||
}
|
||||
|
||||
// Cannot be aligned at all.
|
||||
usize::max_value()
|
||||
usize::MAX
|
||||
}
|
||||
|
||||
/// Compares raw pointers for equality.
|
||||
@ -1345,14 +1345,24 @@ macro_rules! fnptr_impls_safety_abi {
|
||||
#[stable(feature = "fnptr_impls", since = "1.4.0")]
|
||||
impl<Ret, $($Arg),*> fmt::Pointer for $FnTy {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
fmt::Pointer::fmt(&(*self as *const ()), f)
|
||||
// HACK: The intermediate cast as usize is required for AVR
|
||||
// so that the address space of the source function pointer
|
||||
// is preserved in the final function pointer.
|
||||
//
|
||||
// https://github.com/avr-rust/rust/issues/143
|
||||
fmt::Pointer::fmt(&(*self as usize as *const ()), f)
|
||||
}
|
||||
}
|
||||
|
||||
#[stable(feature = "fnptr_impls", since = "1.4.0")]
|
||||
impl<Ret, $($Arg),*> fmt::Debug for $FnTy {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
fmt::Pointer::fmt(&(*self as *const ()), f)
|
||||
// HACK: The intermediate cast as usize is required for AVR
|
||||
// so that the address space of the source function pointer
|
||||
// is preserved in the final function pointer.
|
||||
//
|
||||
// https://github.com/avr-rust/rust/issues/143
|
||||
fmt::Pointer::fmt(&(*self as usize as *const ()), f)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -3043,16 +3043,12 @@ impl<T> SliceIndex<[T]> for ops::RangeInclusive<usize> {
|
||||
|
||||
#[inline]
|
||||
fn get(self, slice: &[T]) -> Option<&[T]> {
|
||||
if *self.end() == usize::max_value() {
|
||||
None
|
||||
} else {
|
||||
(*self.start()..self.end() + 1).get(slice)
|
||||
}
|
||||
if *self.end() == usize::MAX { None } else { (*self.start()..self.end() + 1).get(slice) }
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn get_mut(self, slice: &mut [T]) -> Option<&mut [T]> {
|
||||
if *self.end() == usize::max_value() {
|
||||
if *self.end() == usize::MAX {
|
||||
None
|
||||
} else {
|
||||
(*self.start()..self.end() + 1).get_mut(slice)
|
||||
@ -3071,7 +3067,7 @@ impl<T> SliceIndex<[T]> for ops::RangeInclusive<usize> {
|
||||
|
||||
#[inline]
|
||||
fn index(self, slice: &[T]) -> &[T] {
|
||||
if *self.end() == usize::max_value() {
|
||||
if *self.end() == usize::MAX {
|
||||
slice_index_overflow_fail();
|
||||
}
|
||||
(*self.start()..self.end() + 1).index(slice)
|
||||
@ -3079,7 +3075,7 @@ impl<T> SliceIndex<[T]> for ops::RangeInclusive<usize> {
|
||||
|
||||
#[inline]
|
||||
fn index_mut(self, slice: &mut [T]) -> &mut [T] {
|
||||
if *self.end() == usize::max_value() {
|
||||
if *self.end() == usize::MAX {
|
||||
slice_index_overflow_fail();
|
||||
}
|
||||
(*self.start()..self.end() + 1).index_mut(slice)
|
||||
|
@ -1651,7 +1651,7 @@ fn run_utf8_validation(v: &[u8]) -> Result<(), Utf8Error> {
|
||||
// Ascii case, try to skip forward quickly.
|
||||
// When the pointer is aligned, read 2 words of data per iteration
|
||||
// until we find a word containing a non-ascii byte.
|
||||
if align != usize::max_value() && align.wrapping_sub(index) % usize_bytes == 0 {
|
||||
if align != usize::MAX && align.wrapping_sub(index) % usize_bytes == 0 {
|
||||
let ptr = v.as_ptr();
|
||||
while index < blocks_end {
|
||||
// SAFETY: since `align - index` and `ascii_block_size` are
|
||||
@ -2083,7 +2083,7 @@ mod traits {
|
||||
type Output = str;
|
||||
#[inline]
|
||||
fn get(self, slice: &str) -> Option<&Self::Output> {
|
||||
if *self.end() == usize::max_value() {
|
||||
if *self.end() == usize::MAX {
|
||||
None
|
||||
} else {
|
||||
(*self.start()..self.end() + 1).get(slice)
|
||||
@ -2091,7 +2091,7 @@ mod traits {
|
||||
}
|
||||
#[inline]
|
||||
fn get_mut(self, slice: &mut str) -> Option<&mut Self::Output> {
|
||||
if *self.end() == usize::max_value() {
|
||||
if *self.end() == usize::MAX {
|
||||
None
|
||||
} else {
|
||||
(*self.start()..self.end() + 1).get_mut(slice)
|
||||
@ -2107,14 +2107,14 @@ mod traits {
|
||||
}
|
||||
#[inline]
|
||||
fn index(self, slice: &str) -> &Self::Output {
|
||||
if *self.end() == usize::max_value() {
|
||||
if *self.end() == usize::MAX {
|
||||
str_index_overflow_fail();
|
||||
}
|
||||
(*self.start()..self.end() + 1).index(slice)
|
||||
}
|
||||
#[inline]
|
||||
fn index_mut(self, slice: &mut str) -> &mut Self::Output {
|
||||
if *self.end() == usize::max_value() {
|
||||
if *self.end() == usize::MAX {
|
||||
str_index_overflow_fail();
|
||||
}
|
||||
(*self.start()..self.end() + 1).index_mut(slice)
|
||||
@ -2140,11 +2140,11 @@ mod traits {
|
||||
type Output = str;
|
||||
#[inline]
|
||||
fn get(self, slice: &str) -> Option<&Self::Output> {
|
||||
if self.end == usize::max_value() { None } else { (..self.end + 1).get(slice) }
|
||||
if self.end == usize::MAX { None } else { (..self.end + 1).get(slice) }
|
||||
}
|
||||
#[inline]
|
||||
fn get_mut(self, slice: &mut str) -> Option<&mut Self::Output> {
|
||||
if self.end == usize::max_value() { None } else { (..self.end + 1).get_mut(slice) }
|
||||
if self.end == usize::MAX { None } else { (..self.end + 1).get_mut(slice) }
|
||||
}
|
||||
#[inline]
|
||||
unsafe fn get_unchecked(self, slice: &str) -> &Self::Output {
|
||||
@ -2156,14 +2156,14 @@ mod traits {
|
||||
}
|
||||
#[inline]
|
||||
fn index(self, slice: &str) -> &Self::Output {
|
||||
if self.end == usize::max_value() {
|
||||
if self.end == usize::MAX {
|
||||
str_index_overflow_fail();
|
||||
}
|
||||
(..self.end + 1).index(slice)
|
||||
}
|
||||
#[inline]
|
||||
fn index_mut(self, slice: &mut str) -> &mut Self::Output {
|
||||
if self.end == usize::max_value() {
|
||||
if self.end == usize::MAX {
|
||||
str_index_overflow_fail();
|
||||
}
|
||||
(..self.end + 1).index_mut(slice)
|
||||
|
@ -60,6 +60,43 @@ use crate::slice::memchr;
|
||||
/// The trait itself acts as a builder for an associated
|
||||
/// `Searcher` type, which does the actual work of finding
|
||||
/// occurrences of the pattern in a string.
|
||||
///
|
||||
/// Depending on the type of the pattern, the behaviour of methods like
|
||||
/// [`str::find`] and [`str::contains`] can change. The table below describes
|
||||
/// some of those behaviours.
|
||||
///
|
||||
/// | Pattern type | Match condition |
|
||||
/// |--------------------------|-------------------------------------------|
|
||||
/// | `&str` | is substring |
|
||||
/// | `char` | is contained in string |
|
||||
/// | `&[char]` | any char in slice is contained in string |
|
||||
/// | `F: FnMut(char) -> bool` | `F` returns `true` for a char in string |
|
||||
/// | `&&str` | is substring |
|
||||
/// | `&String` | is substring |
|
||||
///
|
||||
/// # Examples
|
||||
/// ```
|
||||
/// // &str
|
||||
/// assert_eq!("abaaa".find("ba"), Some(1));
|
||||
/// assert_eq!("abaaa".find("bac"), None);
|
||||
///
|
||||
/// // char
|
||||
/// assert_eq!("abaaa".find('a'), Some(0));
|
||||
/// assert_eq!("abaaa".find('b'), Some(1));
|
||||
/// assert_eq!("abaaa".find('c'), None);
|
||||
///
|
||||
/// // &[char]
|
||||
/// assert_eq!("ab".find(&['b', 'a'][..]), Some(0));
|
||||
/// assert_eq!("abaaa".find(&['a', 'z'][..]), Some(0));
|
||||
/// assert_eq!("abaaa".find(&['c', 'd'][..]), None);
|
||||
///
|
||||
/// // FnMut(char) -> bool
|
||||
/// assert_eq!("abcdef_z".find(|ch| ch > 'd' && ch < 'y'), Some(4));
|
||||
/// assert_eq!("abcddd_z".find(|ch| ch > 'd' && ch < 'y'), None);
|
||||
/// ```
|
||||
///
|
||||
/// [`str::find`]: ../../../std/primitive.str.html#method.find
|
||||
/// [`str::contains`]: ../../../std/primitive.str.html#method.contains
|
||||
pub trait Pattern<'a>: Sized {
|
||||
/// Associated searcher for this pattern
|
||||
type Searcher: Searcher<'a>;
|
||||
@ -80,6 +117,15 @@ pub trait Pattern<'a>: Sized {
|
||||
matches!(self.into_searcher(haystack).next(), SearchStep::Match(0, _))
|
||||
}
|
||||
|
||||
/// Checks whether the pattern matches at the back of the haystack
|
||||
#[inline]
|
||||
fn is_suffix_of(self, haystack: &'a str) -> bool
|
||||
where
|
||||
Self::Searcher: ReverseSearcher<'a>,
|
||||
{
|
||||
matches!(self.into_searcher(haystack).next_back(), SearchStep::Match(_, j) if haystack.len() == j)
|
||||
}
|
||||
|
||||
/// Removes the pattern from the front of haystack, if it matches.
|
||||
#[inline]
|
||||
fn strip_prefix_of(self, haystack: &'a str) -> Option<&'a str> {
|
||||
@ -96,15 +142,6 @@ pub trait Pattern<'a>: Sized {
|
||||
}
|
||||
}
|
||||
|
||||
/// Checks whether the pattern matches at the back of the haystack
|
||||
#[inline]
|
||||
fn is_suffix_of(self, haystack: &'a str) -> bool
|
||||
where
|
||||
Self::Searcher: ReverseSearcher<'a>,
|
||||
{
|
||||
matches!(self.into_searcher(haystack).next_back(), SearchStep::Match(_, j) if haystack.len() == j)
|
||||
}
|
||||
|
||||
/// Removes the pattern from the back of haystack, if it matches.
|
||||
#[inline]
|
||||
fn strip_suffix_of(self, haystack: &'a str) -> Option<&'a str>
|
||||
|
@ -2623,15 +2623,7 @@ unsafe fn atomic_umin<T: Copy>(dst: *mut T, val: T, order: Ordering) -> T {
|
||||
/// [`Relaxed`]: enum.Ordering.html#variant.Relaxed
|
||||
#[inline]
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
#[cfg_attr(target_arch = "wasm32", allow(unused_variables))]
|
||||
pub fn fence(order: Ordering) {
|
||||
// On wasm32 it looks like fences aren't implemented in LLVM yet in that
|
||||
// they will cause LLVM to abort. The wasm instruction set doesn't have
|
||||
// fences right now. There's discussion online about the best way for tools
|
||||
// to conventionally implement fences at
|
||||
// https://github.com/WebAssembly/tool-conventions/issues/59. We should
|
||||
// follow that discussion and implement a solution when one comes about!
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
// SAFETY: using an atomic fence is safe.
|
||||
unsafe {
|
||||
match order {
|
||||
|
@ -140,8 +140,8 @@ macro_rules! test_impl_from {
|
||||
($fn_name: ident, $Small: ty, $Large: ty) => {
|
||||
#[test]
|
||||
fn $fn_name() {
|
||||
let small_max = <$Small>::max_value();
|
||||
let small_min = <$Small>::min_value();
|
||||
let small_max = <$Small>::MAX;
|
||||
let small_min = <$Small>::MIN;
|
||||
let large_max: $Large = small_max.into();
|
||||
let large_min: $Large = small_min.into();
|
||||
assert_eq!(large_max as $Small, small_max);
|
||||
@ -248,8 +248,8 @@ macro_rules! test_impl_try_from_always_ok {
|
||||
($fn_name:ident, $source:ty, $target: ty) => {
|
||||
#[test]
|
||||
fn $fn_name() {
|
||||
let max = <$source>::max_value();
|
||||
let min = <$source>::min_value();
|
||||
let max = <$source>::MAX;
|
||||
let min = <$source>::MIN;
|
||||
let zero: $source = 0;
|
||||
assert_eq!(<$target as TryFrom<$source>>::try_from(max).unwrap(), max as $target);
|
||||
assert_eq!(<$target as TryFrom<$source>>::try_from(min).unwrap(), min as $target);
|
||||
@ -361,8 +361,8 @@ macro_rules! test_impl_try_from_signed_to_unsigned_upper_ok {
|
||||
($fn_name:ident, $source:ty, $target:ty) => {
|
||||
#[test]
|
||||
fn $fn_name() {
|
||||
let max = <$source>::max_value();
|
||||
let min = <$source>::min_value();
|
||||
let max = <$source>::MAX;
|
||||
let min = <$source>::MIN;
|
||||
let zero: $source = 0;
|
||||
let neg_one: $source = -1;
|
||||
assert_eq!(<$target as TryFrom<$source>>::try_from(max).unwrap(), max as $target);
|
||||
@ -426,8 +426,8 @@ macro_rules! test_impl_try_from_unsigned_to_signed_upper_err {
|
||||
($fn_name:ident, $source:ty, $target:ty) => {
|
||||
#[test]
|
||||
fn $fn_name() {
|
||||
let max = <$source>::max_value();
|
||||
let min = <$source>::min_value();
|
||||
let max = <$source>::MAX;
|
||||
let min = <$source>::MIN;
|
||||
let zero: $source = 0;
|
||||
assert!(<$target as TryFrom<$source>>::try_from(max).is_err());
|
||||
assert_eq!(<$target as TryFrom<$source>>::try_from(min).unwrap(), min as $target);
|
||||
@ -487,11 +487,11 @@ macro_rules! test_impl_try_from_same_sign_err {
|
||||
($fn_name:ident, $source:ty, $target:ty) => {
|
||||
#[test]
|
||||
fn $fn_name() {
|
||||
let max = <$source>::max_value();
|
||||
let min = <$source>::min_value();
|
||||
let max = <$source>::MAX;
|
||||
let min = <$source>::MIN;
|
||||
let zero: $source = 0;
|
||||
let t_max = <$target>::max_value();
|
||||
let t_min = <$target>::min_value();
|
||||
let t_max = <$target>::MAX;
|
||||
let t_min = <$target>::MIN;
|
||||
assert!(<$target as TryFrom<$source>>::try_from(max).is_err());
|
||||
if min != 0 {
|
||||
assert!(<$target as TryFrom<$source>>::try_from(min).is_err());
|
||||
@ -576,11 +576,11 @@ macro_rules! test_impl_try_from_signed_to_unsigned_err {
|
||||
($fn_name:ident, $source:ty, $target:ty) => {
|
||||
#[test]
|
||||
fn $fn_name() {
|
||||
let max = <$source>::max_value();
|
||||
let min = <$source>::min_value();
|
||||
let max = <$source>::MAX;
|
||||
let min = <$source>::MIN;
|
||||
let zero: $source = 0;
|
||||
let t_max = <$target>::max_value();
|
||||
let t_min = <$target>::min_value();
|
||||
let t_max = <$target>::MAX;
|
||||
let t_min = <$target>::MIN;
|
||||
assert!(<$target as TryFrom<$source>>::try_from(max).is_err());
|
||||
assert!(<$target as TryFrom<$source>>::try_from(min).is_err());
|
||||
assert_eq!(<$target as TryFrom<$source>>::try_from(zero).unwrap(), zero as $target);
|
||||
|
@ -357,7 +357,7 @@ fn align_offset_weird_strides() {
|
||||
|
||||
unsafe fn test_weird_stride<T>(ptr: *const T, align: usize) -> bool {
|
||||
let numptr = ptr as usize;
|
||||
let mut expected = usize::max_value();
|
||||
let mut expected = usize::MAX;
|
||||
// Naive but definitely correct way to find the *first* aligned element of stride::<T>.
|
||||
for el in 0..align {
|
||||
if (numptr + el * ::std::mem::size_of::<T>()) % align == 0 {
|
||||
|
@ -1691,8 +1691,8 @@ fn test_copy_within_panics_src_inverted() {
|
||||
#[should_panic(expected = "attempted to index slice up to maximum usize")]
|
||||
fn test_copy_within_panics_src_out_of_bounds() {
|
||||
let mut bytes = *b"Hello, World!";
|
||||
// an inclusive range ending at usize::max_value() would make src_end overflow
|
||||
bytes.copy_within(usize::max_value()..=usize::max_value(), 0);
|
||||
// an inclusive range ending at usize::MAX would make src_end overflow
|
||||
bytes.copy_within(usize::MAX..=usize::MAX, 0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -133,9 +133,9 @@ impl Neg for Round {
|
||||
pub type ExpInt = i16;
|
||||
|
||||
// \c ilogb error results.
|
||||
pub const IEK_INF: ExpInt = ExpInt::max_value();
|
||||
pub const IEK_NAN: ExpInt = ExpInt::min_value();
|
||||
pub const IEK_ZERO: ExpInt = ExpInt::min_value() + 1;
|
||||
pub const IEK_INF: ExpInt = ExpInt::MAX;
|
||||
pub const IEK_NAN: ExpInt = ExpInt::MIN;
|
||||
pub const IEK_ZERO: ExpInt = ExpInt::MIN + 1;
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Debug)]
|
||||
pub struct ParseError(pub &'static str);
|
||||
|
@ -2997,8 +2997,8 @@ fn scalbn() {
|
||||
assert!(smallest_f64.scalbn(2099).is_infinite());
|
||||
|
||||
// Test for integer overflows when adding to exponent.
|
||||
assert!(smallest_f64.scalbn(-ExpInt::max_value()).is_pos_zero());
|
||||
assert!(largest_f64.scalbn(ExpInt::max_value()).is_infinite());
|
||||
assert!(smallest_f64.scalbn(-ExpInt::MAX).is_pos_zero());
|
||||
assert!(largest_f64.scalbn(ExpInt::MAX).is_infinite());
|
||||
|
||||
assert!(largest_denormal_f64.bitwise_eq(largest_denormal_f64.scalbn(0),));
|
||||
assert!(neg_largest_denormal_f64.bitwise_eq(neg_largest_denormal_f64.scalbn(0),));
|
||||
|
@ -602,7 +602,7 @@ macro_rules! which_arena_for_type {
|
||||
|
||||
#[macro_export]
|
||||
macro_rules! declare_arena {
|
||||
([], [$($a:tt $name:ident: $ty:ty,)*], $tcx:lifetime) => {
|
||||
([], [$($a:tt $name:ident: $ty:ty, $gen_ty:ty;)*], $tcx:lifetime) => {
|
||||
#[derive(Default)]
|
||||
pub struct Arena<$tcx> {
|
||||
pub dropless: $crate::DroplessArena,
|
||||
@ -611,17 +611,17 @@ macro_rules! declare_arena {
|
||||
}
|
||||
|
||||
#[marker]
|
||||
pub trait ArenaAllocatable {}
|
||||
pub trait ArenaAllocatable<'tcx> {}
|
||||
|
||||
impl<T: Copy> ArenaAllocatable for T {}
|
||||
impl<'tcx, T: Copy> ArenaAllocatable<'tcx> for T {}
|
||||
|
||||
unsafe trait ArenaField<'tcx>: Sized {
|
||||
unsafe trait ArenaField<'tcx>: Sized + ArenaAllocatable<'tcx> {
|
||||
/// Returns a specific arena to allocate from.
|
||||
/// If `None` is returned, the `DropArena` will be used.
|
||||
fn arena<'a>(arena: &'a Arena<'tcx>) -> Option<&'a $crate::TypedArena<Self>>;
|
||||
}
|
||||
|
||||
unsafe impl<'tcx, T> ArenaField<'tcx> for T {
|
||||
unsafe impl<'tcx, T: ArenaAllocatable<'tcx>> ArenaField<'tcx> for T {
|
||||
#[inline]
|
||||
default fn arena<'a>(_: &'a Arena<'tcx>) -> Option<&'a $crate::TypedArena<Self>> {
|
||||
panic!()
|
||||
@ -630,18 +630,27 @@ macro_rules! declare_arena {
|
||||
|
||||
$(
|
||||
#[allow(unused_lifetimes)]
|
||||
impl<$tcx> ArenaAllocatable for $ty {}
|
||||
unsafe impl<$tcx> ArenaField<$tcx> for $ty {
|
||||
impl<$tcx> ArenaAllocatable<$tcx> for $ty {}
|
||||
unsafe impl<$tcx, '_x, '_y, '_z, '_w> ArenaField<$tcx> for $gen_ty where Self: ArenaAllocatable<$tcx> {
|
||||
#[inline]
|
||||
fn arena<'a>(_arena: &'a Arena<$tcx>) -> Option<&'a $crate::TypedArena<Self>> {
|
||||
$crate::which_arena_for_type!($a[&_arena.$name])
|
||||
// SAFETY: We only implement `ArenaAllocatable<$tcx>` for
|
||||
// `$ty`, so `$ty` and Self are the same type
|
||||
unsafe {
|
||||
::std::mem::transmute::<
|
||||
Option<&'a $crate::TypedArena<$ty>>,
|
||||
Option<&'a $crate::TypedArena<Self>>,
|
||||
>(
|
||||
$crate::which_arena_for_type!($a[&_arena.$name])
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
)*
|
||||
|
||||
impl<'tcx> Arena<'tcx> {
|
||||
#[inline]
|
||||
pub fn alloc<T: ArenaAllocatable>(&self, value: T) -> &mut T {
|
||||
pub fn alloc<T: ArenaAllocatable<'tcx>>(&self, value: T) -> &mut T {
|
||||
if !::std::mem::needs_drop::<T>() {
|
||||
return self.dropless.alloc(value);
|
||||
}
|
||||
@ -659,7 +668,7 @@ macro_rules! declare_arena {
|
||||
self.dropless.alloc_slice(value)
|
||||
}
|
||||
|
||||
pub fn alloc_from_iter<'a, T: ArenaAllocatable>(
|
||||
pub fn alloc_from_iter<'a, T: ArenaAllocatable<'tcx>>(
|
||||
&'a self,
|
||||
iter: impl ::std::iter::IntoIterator<Item = T>,
|
||||
) -> &'a mut [T] {
|
||||
|
@ -362,7 +362,11 @@ impl Default for Generics {
|
||||
fn default() -> Generics {
|
||||
Generics {
|
||||
params: Vec::new(),
|
||||
where_clause: WhereClause { predicates: Vec::new(), span: DUMMY_SP },
|
||||
where_clause: WhereClause {
|
||||
has_where_token: false,
|
||||
predicates: Vec::new(),
|
||||
span: DUMMY_SP,
|
||||
},
|
||||
span: DUMMY_SP,
|
||||
}
|
||||
}
|
||||
@ -371,6 +375,11 @@ impl Default for Generics {
|
||||
/// A where-clause in a definition.
|
||||
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
|
||||
pub struct WhereClause {
|
||||
/// `true` if we ate a `where` token: this can happen
|
||||
/// if we parsed no predicates (e.g. `struct Foo where {}
|
||||
/// This allows us to accurately pretty-print
|
||||
/// in `nt_to_tokenstream`
|
||||
pub has_where_token: bool,
|
||||
pub predicates: Vec<WherePredicate>,
|
||||
pub span: Span,
|
||||
}
|
||||
@ -1165,7 +1174,9 @@ pub enum ExprKind {
|
||||
/// and the remaining elements are the rest of the arguments.
|
||||
/// Thus, `x.foo::<Bar, Baz>(a, b, c, d)` is represented as
|
||||
/// `ExprKind::MethodCall(PathSegment { foo, [Bar, Baz] }, [x, a, b, c, d])`.
|
||||
MethodCall(PathSegment, Vec<P<Expr>>),
|
||||
/// This `Span` is the span of the function, without the dot and receiver
|
||||
/// (e.g. `foo(a, b)` in `x.foo(a, b)`
|
||||
MethodCall(PathSegment, Vec<P<Expr>>, Span),
|
||||
/// A tuple (e.g., `(a, b, c, d)`).
|
||||
Tup(Vec<P<Expr>>),
|
||||
/// A binary operation (e.g., `a + b`, `a * b`).
|
||||
@ -1849,15 +1860,6 @@ impl TyKind {
|
||||
pub fn is_unit(&self) -> bool {
|
||||
if let TyKind::Tup(ref tys) = *self { tys.is_empty() } else { false }
|
||||
}
|
||||
|
||||
/// HACK(type_alias_impl_trait, Centril): A temporary crutch used
|
||||
/// in lowering to avoid making larger changes there and beyond.
|
||||
pub fn opaque_top_hack(&self) -> Option<&GenericBounds> {
|
||||
match self {
|
||||
Self::ImplTrait(_, bounds) => Some(bounds),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Syntax used to declare a trait object.
|
||||
|
@ -786,7 +786,7 @@ pub fn noop_visit_generics<T: MutVisitor>(generics: &mut Generics, vis: &mut T)
|
||||
}
|
||||
|
||||
pub fn noop_visit_where_clause<T: MutVisitor>(wc: &mut WhereClause, vis: &mut T) {
|
||||
let WhereClause { predicates, span } = wc;
|
||||
let WhereClause { has_where_token: _, predicates, span } = wc;
|
||||
visit_vec(predicates, |predicate| vis.visit_where_predicate(predicate));
|
||||
vis.visit_span(span);
|
||||
}
|
||||
@ -1111,11 +1111,12 @@ pub fn noop_visit_expr<T: MutVisitor>(
|
||||
vis.visit_expr(f);
|
||||
visit_exprs(args, vis);
|
||||
}
|
||||
ExprKind::MethodCall(PathSegment { ident, id, args }, exprs) => {
|
||||
ExprKind::MethodCall(PathSegment { ident, id, args }, exprs, span) => {
|
||||
vis.visit_ident(ident);
|
||||
vis.visit_id(id);
|
||||
visit_opt(args, |args| vis.visit_generic_args(args));
|
||||
visit_exprs(exprs, vis);
|
||||
vis.visit_span(span);
|
||||
}
|
||||
ExprKind::Binary(_binop, lhs, rhs) => {
|
||||
vis.visit_expr(lhs);
|
||||
|
@ -392,7 +392,7 @@ impl TokenStream {
|
||||
break;
|
||||
}
|
||||
}
|
||||
token_trees = out.into_iter().map(|t| TokenTree::Token(t)).collect();
|
||||
token_trees = out.into_iter().map(TokenTree::Token).collect();
|
||||
if token_trees.len() != 1 {
|
||||
debug!("break_tokens: broke {:?} to {:?}", tree, token_trees);
|
||||
}
|
||||
|
@ -394,7 +394,7 @@ pub fn contains_exterior_struct_lit(value: &ast::Expr) -> bool {
|
||||
contains_exterior_struct_lit(&x)
|
||||
}
|
||||
|
||||
ast::ExprKind::MethodCall(.., ref exprs) => {
|
||||
ast::ExprKind::MethodCall(.., ref exprs, _) => {
|
||||
// X { y: 1 }.bar(...)
|
||||
contains_exterior_struct_lit(&exprs[0])
|
||||
}
|
||||
|
@ -726,7 +726,7 @@ pub fn walk_expr<'a, V: Visitor<'a>>(visitor: &mut V, expression: &'a Expr) {
|
||||
visitor.visit_expr(callee_expression);
|
||||
walk_list!(visitor, visit_expr, arguments);
|
||||
}
|
||||
ExprKind::MethodCall(ref segment, ref arguments) => {
|
||||
ExprKind::MethodCall(ref segment, ref arguments, _span) => {
|
||||
visitor.visit_path_segment(expression.span, segment);
|
||||
walk_list!(visitor, visit_expr, arguments);
|
||||
}
|
||||
|
@ -9,7 +9,7 @@ use rustc_data_structures::thin_vec::ThinVec;
|
||||
use rustc_errors::struct_span_err;
|
||||
use rustc_hir as hir;
|
||||
use rustc_hir::def::Res;
|
||||
use rustc_span::source_map::{respan, DesugaringKind, Span, Spanned};
|
||||
use rustc_span::source_map::{respan, DesugaringKind, ForLoopLoc, Span, Spanned};
|
||||
use rustc_span::symbol::{sym, Ident, Symbol};
|
||||
use rustc_target::asm;
|
||||
use std::collections::hash_map::Entry;
|
||||
@ -25,6 +25,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
||||
}
|
||||
|
||||
pub(super) fn lower_expr_mut(&mut self, e: &Expr) -> hir::Expr<'hir> {
|
||||
let mut span = e.span;
|
||||
ensure_sufficient_stack(|| {
|
||||
let kind = match e.kind {
|
||||
ExprKind::Box(ref inner) => hir::ExprKind::Box(self.lower_expr(inner)),
|
||||
@ -39,7 +40,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
||||
let f = self.lower_expr(f);
|
||||
hir::ExprKind::Call(f, self.lower_exprs(args))
|
||||
}
|
||||
ExprKind::MethodCall(ref seg, ref args) => {
|
||||
ExprKind::MethodCall(ref seg, ref args, span) => {
|
||||
let hir_seg = self.arena.alloc(self.lower_path_segment(
|
||||
e.span,
|
||||
seg,
|
||||
@ -50,9 +51,10 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
||||
None,
|
||||
));
|
||||
let args = self.lower_exprs(args);
|
||||
hir::ExprKind::MethodCall(hir_seg, seg.ident.span, args)
|
||||
hir::ExprKind::MethodCall(hir_seg, seg.ident.span, args, span)
|
||||
}
|
||||
ExprKind::Binary(binop, ref lhs, ref rhs) => {
|
||||
span = self.mark_span_with_reason(DesugaringKind::Operator, e.span, None);
|
||||
let binop = self.lower_binop(binop);
|
||||
let lhs = self.lower_expr(lhs);
|
||||
let rhs = self.lower_expr(rhs);
|
||||
@ -222,7 +224,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
||||
hir::Expr {
|
||||
hir_id: self.lower_node_id(e.id),
|
||||
kind,
|
||||
span: e.span,
|
||||
span,
|
||||
attrs: e.attrs.iter().map(|a| self.lower_attr(a)).collect::<Vec<_>>().into(),
|
||||
}
|
||||
})
|
||||
@ -237,6 +239,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
||||
}
|
||||
|
||||
fn lower_binop(&mut self, b: BinOp) -> hir::BinOp {
|
||||
let span = self.mark_span_with_reason(DesugaringKind::Operator, b.span, None);
|
||||
Spanned {
|
||||
node: match b.node {
|
||||
BinOpKind::Add => hir::BinOpKind::Add,
|
||||
@ -258,7 +261,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
||||
BinOpKind::Ge => hir::BinOpKind::Ge,
|
||||
BinOpKind::Gt => hir::BinOpKind::Gt,
|
||||
},
|
||||
span: b.span,
|
||||
span,
|
||||
}
|
||||
}
|
||||
|
||||
@ -1237,10 +1240,8 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
||||
) => {
|
||||
assert!(!*late);
|
||||
let out_op_sp = if input { op_sp2 } else { op_sp };
|
||||
let msg = &format!(
|
||||
"use `lateout` instead of \
|
||||
`out` to avoid conflict"
|
||||
);
|
||||
let msg = "use `lateout` instead of \
|
||||
`out` to avoid conflict";
|
||||
err.span_help(out_op_sp, msg);
|
||||
}
|
||||
_ => {}
|
||||
@ -1362,9 +1363,14 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
||||
body: &Block,
|
||||
opt_label: Option<Label>,
|
||||
) -> hir::Expr<'hir> {
|
||||
let orig_head_span = head.span;
|
||||
// expand <head>
|
||||
let mut head = self.lower_expr_mut(head);
|
||||
let desugared_span = self.mark_span_with_reason(DesugaringKind::ForLoop, head.span, None);
|
||||
let desugared_span = self.mark_span_with_reason(
|
||||
DesugaringKind::ForLoop(ForLoopLoc::Head),
|
||||
orig_head_span,
|
||||
None,
|
||||
);
|
||||
head.span = desugared_span;
|
||||
|
||||
let iter = Ident::with_dummy_span(sym::iter);
|
||||
@ -1459,10 +1465,16 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
||||
// `mut iter => { ... }`
|
||||
let iter_arm = self.arm(iter_pat, loop_expr);
|
||||
|
||||
let into_iter_span = self.mark_span_with_reason(
|
||||
DesugaringKind::ForLoop(ForLoopLoc::IntoIter),
|
||||
orig_head_span,
|
||||
None,
|
||||
);
|
||||
|
||||
// `match ::std::iter::IntoIterator::into_iter(<head>) { ... }`
|
||||
let into_iter_expr = {
|
||||
let into_iter_path = &[sym::iter, sym::IntoIterator, sym::into_iter];
|
||||
self.expr_call_std_path(desugared_span, into_iter_path, arena_vec![self; head])
|
||||
self.expr_call_std_path(into_iter_span, into_iter_path, arena_vec![self; head])
|
||||
};
|
||||
|
||||
let match_expr = self.arena.alloc(self.expr_match(
|
||||
|
@ -1,5 +1,5 @@
|
||||
use super::{AnonymousLifetimeMode, LoweringContext, ParamMode};
|
||||
use super::{ImplTraitContext, ImplTraitPosition, ImplTraitTypeIdVisitor};
|
||||
use super::{ImplTraitContext, ImplTraitPosition};
|
||||
use crate::Arena;
|
||||
|
||||
use rustc_ast::ast::*;
|
||||
@ -7,6 +7,7 @@ use rustc_ast::attr;
|
||||
use rustc_ast::node_id::NodeMap;
|
||||
use rustc_ast::ptr::P;
|
||||
use rustc_ast::visit::{self, AssocCtxt, Visitor};
|
||||
use rustc_data_structures::fx::FxHashSet;
|
||||
use rustc_errors::struct_span_err;
|
||||
use rustc_hir as hir;
|
||||
use rustc_hir::def::{DefKind, Res};
|
||||
@ -165,13 +166,6 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
||||
}
|
||||
ItemKind::MacroDef(..) => SmallVec::new(),
|
||||
ItemKind::Fn(..) | ItemKind::Impl { of_trait: None, .. } => smallvec![i.id],
|
||||
ItemKind::Static(ref ty, ..) | ItemKind::Const(_, ref ty, ..) => {
|
||||
let mut ids = smallvec![i.id];
|
||||
if self.sess.features_untracked().impl_trait_in_bindings {
|
||||
ImplTraitTypeIdVisitor { ids: &mut ids }.visit_ty(ty);
|
||||
}
|
||||
ids
|
||||
}
|
||||
_ => smallvec![i.id],
|
||||
};
|
||||
|
||||
@ -292,23 +286,25 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
||||
ItemKind::Mod(ref m) => hir::ItemKind::Mod(self.lower_mod(m)),
|
||||
ItemKind::ForeignMod(ref nm) => hir::ItemKind::ForeignMod(self.lower_foreign_mod(nm)),
|
||||
ItemKind::GlobalAsm(ref ga) => hir::ItemKind::GlobalAsm(self.lower_global_asm(ga)),
|
||||
ItemKind::TyAlias(_, ref gen, _, Some(ref ty)) => match ty.kind.opaque_top_hack() {
|
||||
None => {
|
||||
let ty = self.lower_ty(ty, ImplTraitContext::disallowed());
|
||||
let generics = self.lower_generics(gen, ImplTraitContext::disallowed());
|
||||
hir::ItemKind::TyAlias(ty, generics)
|
||||
}
|
||||
Some(bounds) => {
|
||||
let ctx = || ImplTraitContext::OpaqueTy(None, hir::OpaqueTyOrigin::Misc);
|
||||
let ty = hir::OpaqueTy {
|
||||
generics: self.lower_generics(gen, ctx()),
|
||||
bounds: self.lower_param_bounds(bounds, ctx()),
|
||||
impl_trait_fn: None,
|
||||
origin: hir::OpaqueTyOrigin::TypeAlias,
|
||||
};
|
||||
hir::ItemKind::OpaqueTy(ty)
|
||||
}
|
||||
},
|
||||
ItemKind::TyAlias(_, ref gen, _, Some(ref ty)) => {
|
||||
// We lower
|
||||
//
|
||||
// type Foo = impl Trait
|
||||
//
|
||||
// to
|
||||
//
|
||||
// type Foo = Foo1
|
||||
// opaque type Foo1: Trait
|
||||
let ty = self.lower_ty(
|
||||
ty,
|
||||
ImplTraitContext::OtherOpaqueTy {
|
||||
capturable_lifetimes: &mut FxHashSet::default(),
|
||||
origin: hir::OpaqueTyOrigin::Misc,
|
||||
},
|
||||
);
|
||||
let generics = self.lower_generics(gen, ImplTraitContext::disallowed());
|
||||
hir::ItemKind::TyAlias(ty, generics)
|
||||
}
|
||||
ItemKind::TyAlias(_, ref generics, _, None) => {
|
||||
let ty = self.arena.alloc(self.ty(span, hir::TyKind::Err));
|
||||
let generics = self.lower_generics(generics, ImplTraitContext::disallowed());
|
||||
@ -438,8 +434,13 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
||||
span: Span,
|
||||
body: Option<&Expr>,
|
||||
) -> (&'hir hir::Ty<'hir>, hir::BodyId) {
|
||||
let mut capturable_lifetimes;
|
||||
let itctx = if self.sess.features_untracked().impl_trait_in_bindings {
|
||||
ImplTraitContext::OpaqueTy(None, hir::OpaqueTyOrigin::Misc)
|
||||
capturable_lifetimes = FxHashSet::default();
|
||||
ImplTraitContext::OtherOpaqueTy {
|
||||
capturable_lifetimes: &mut capturable_lifetimes,
|
||||
origin: hir::OpaqueTyOrigin::Misc,
|
||||
}
|
||||
} else {
|
||||
ImplTraitContext::Disallowed(ImplTraitPosition::Binding)
|
||||
};
|
||||
@ -844,16 +845,16 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
||||
let ty = self.arena.alloc(self.ty(i.span, hir::TyKind::Err));
|
||||
hir::ImplItemKind::TyAlias(ty)
|
||||
}
|
||||
Some(ty) => match ty.kind.opaque_top_hack() {
|
||||
None => {
|
||||
let ty = self.lower_ty(ty, ImplTraitContext::disallowed());
|
||||
hir::ImplItemKind::TyAlias(ty)
|
||||
}
|
||||
Some(bs) => {
|
||||
let bs = self.lower_param_bounds(bs, ImplTraitContext::disallowed());
|
||||
hir::ImplItemKind::OpaqueTy(bs)
|
||||
}
|
||||
},
|
||||
Some(ty) => {
|
||||
let ty = self.lower_ty(
|
||||
ty,
|
||||
ImplTraitContext::OtherOpaqueTy {
|
||||
capturable_lifetimes: &mut FxHashSet::default(),
|
||||
origin: hir::OpaqueTyOrigin::Misc,
|
||||
},
|
||||
);
|
||||
hir::ImplItemKind::TyAlias(ty)
|
||||
}
|
||||
};
|
||||
(generics, kind)
|
||||
}
|
||||
@ -887,12 +888,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
||||
defaultness,
|
||||
kind: match &i.kind {
|
||||
AssocItemKind::Const(..) => hir::AssocItemKind::Const,
|
||||
AssocItemKind::TyAlias(.., ty) => {
|
||||
match ty.as_deref().and_then(|ty| ty.kind.opaque_top_hack()) {
|
||||
None => hir::AssocItemKind::Type,
|
||||
Some(_) => hir::AssocItemKind::OpaqueTy,
|
||||
}
|
||||
}
|
||||
AssocItemKind::TyAlias(..) => hir::AssocItemKind::Type,
|
||||
AssocItemKind::Fn(_, sig, ..) => {
|
||||
hir::AssocItemKind::Fn { has_self: sig.decl.has_self() }
|
||||
}
|
||||
|
@ -33,7 +33,7 @@
|
||||
#![feature(array_value_iter)]
|
||||
#![feature(crate_visibility_modifier)]
|
||||
#![feature(marker_trait_attr)]
|
||||
#![feature(specialization)] // FIXME: min_specialization does not work
|
||||
#![feature(min_specialization)]
|
||||
#![feature(or_patterns)]
|
||||
#![recursion_limit = "256"]
|
||||
|
||||
@ -224,11 +224,30 @@ enum ImplTraitContext<'b, 'a> {
|
||||
/// Example: `fn foo() -> impl Debug`, where `impl Debug` is conceptually
|
||||
/// equivalent to a new opaque type like `type T = impl Debug; fn foo() -> T`.
|
||||
///
|
||||
/// We optionally store a `DefId` for the parent item here so we can look up necessary
|
||||
/// information later. It is `None` when no information about the context should be stored
|
||||
/// (e.g., for consts and statics).
|
||||
OpaqueTy(Option<DefId> /* fn def-ID */, hir::OpaqueTyOrigin),
|
||||
|
||||
ReturnPositionOpaqueTy {
|
||||
/// `DefId` for the parent function, used to look up necessary
|
||||
/// information later.
|
||||
fn_def_id: DefId,
|
||||
/// Origin: Either OpaqueTyOrigin::FnReturn or OpaqueTyOrigin::AsyncFn,
|
||||
origin: hir::OpaqueTyOrigin,
|
||||
},
|
||||
/// Impl trait in type aliases, consts and statics.
|
||||
OtherOpaqueTy {
|
||||
/// Set of lifetimes that this opaque type can capture, if it uses
|
||||
/// them. This includes lifetimes bound since we entered this context.
|
||||
/// For example, in
|
||||
///
|
||||
/// type A<'b> = impl for<'a> Trait<'a, Out = impl Sized + 'a>;
|
||||
///
|
||||
/// the inner opaque type captures `'a` because it uses it. It doesn't
|
||||
/// need to capture `'b` because it already inherits the lifetime
|
||||
/// parameter from `A`.
|
||||
// FIXME(impl_trait): but `required_region_bounds` will ICE later
|
||||
// anyway.
|
||||
capturable_lifetimes: &'b mut FxHashSet<hir::LifetimeName>,
|
||||
/// Origin: Either OpaqueTyOrigin::Misc or OpaqueTyOrigin::Binding,
|
||||
origin: hir::OpaqueTyOrigin,
|
||||
},
|
||||
/// `impl Trait` is not accepted in this position.
|
||||
Disallowed(ImplTraitPosition),
|
||||
}
|
||||
@ -253,7 +272,12 @@ impl<'a> ImplTraitContext<'_, 'a> {
|
||||
use self::ImplTraitContext::*;
|
||||
match self {
|
||||
Universal(params) => Universal(params),
|
||||
OpaqueTy(fn_def_id, origin) => OpaqueTy(*fn_def_id, *origin),
|
||||
ReturnPositionOpaqueTy { fn_def_id, origin } => {
|
||||
ReturnPositionOpaqueTy { fn_def_id: *fn_def_id, origin: *origin }
|
||||
}
|
||||
OtherOpaqueTy { capturable_lifetimes, origin } => {
|
||||
OtherOpaqueTy { capturable_lifetimes, origin: *origin }
|
||||
}
|
||||
Disallowed(pos) => Disallowed(*pos),
|
||||
}
|
||||
}
|
||||
@ -1001,6 +1025,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
||||
hir::TypeBindingKind::Equality { ty: self.lower_ty(ty, itctx) }
|
||||
}
|
||||
AssocTyConstraintKind::Bound { ref bounds } => {
|
||||
let mut capturable_lifetimes;
|
||||
// Piggy-back on the `impl Trait` context to figure out the correct behavior.
|
||||
let (desugar_to_impl_trait, itctx) = match itctx {
|
||||
// We are in the return position:
|
||||
@ -1010,7 +1035,8 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
||||
// so desugar to
|
||||
//
|
||||
// fn foo() -> impl Iterator<Item = impl Debug>
|
||||
ImplTraitContext::OpaqueTy(..) => (true, itctx),
|
||||
ImplTraitContext::ReturnPositionOpaqueTy { .. }
|
||||
| ImplTraitContext::OtherOpaqueTy { .. } => (true, itctx),
|
||||
|
||||
// We are in the argument position, but within a dyn type:
|
||||
//
|
||||
@ -1028,7 +1054,14 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
||||
//
|
||||
// FIXME: this is only needed until `impl Trait` is allowed in type aliases.
|
||||
ImplTraitContext::Disallowed(_) if self.is_in_dyn_type => {
|
||||
(true, ImplTraitContext::OpaqueTy(None, hir::OpaqueTyOrigin::Misc))
|
||||
capturable_lifetimes = FxHashSet::default();
|
||||
(
|
||||
true,
|
||||
ImplTraitContext::OtherOpaqueTy {
|
||||
capturable_lifetimes: &mut capturable_lifetimes,
|
||||
origin: hir::OpaqueTyOrigin::Misc,
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
// We are in the parameter position, but not within a dyn type:
|
||||
@ -1270,10 +1303,31 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
||||
TyKind::ImplTrait(def_node_id, ref bounds) => {
|
||||
let span = t.span;
|
||||
match itctx {
|
||||
ImplTraitContext::OpaqueTy(fn_def_id, origin) => {
|
||||
self.lower_opaque_impl_trait(span, fn_def_id, origin, def_node_id, |this| {
|
||||
this.lower_param_bounds(bounds, itctx)
|
||||
})
|
||||
ImplTraitContext::ReturnPositionOpaqueTy { fn_def_id, origin } => self
|
||||
.lower_opaque_impl_trait(
|
||||
span,
|
||||
Some(fn_def_id),
|
||||
origin,
|
||||
def_node_id,
|
||||
None,
|
||||
|this| this.lower_param_bounds(bounds, itctx),
|
||||
),
|
||||
ImplTraitContext::OtherOpaqueTy { ref capturable_lifetimes, origin } => {
|
||||
// Reset capturable lifetimes, any nested impl trait
|
||||
// types will inherit lifetimes from this opaque type,
|
||||
// so don't need to capture them again.
|
||||
let nested_itctx = ImplTraitContext::OtherOpaqueTy {
|
||||
capturable_lifetimes: &mut FxHashSet::default(),
|
||||
origin,
|
||||
};
|
||||
self.lower_opaque_impl_trait(
|
||||
span,
|
||||
None,
|
||||
origin,
|
||||
def_node_id,
|
||||
Some(capturable_lifetimes),
|
||||
|this| this.lower_param_bounds(bounds, nested_itctx),
|
||||
)
|
||||
}
|
||||
ImplTraitContext::Universal(in_band_ty_params) => {
|
||||
// Add a definition for the in-band `Param`.
|
||||
@ -1351,6 +1405,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
||||
fn_def_id: Option<DefId>,
|
||||
origin: hir::OpaqueTyOrigin,
|
||||
opaque_ty_node_id: NodeId,
|
||||
capturable_lifetimes: Option<&FxHashSet<hir::LifetimeName>>,
|
||||
lower_bounds: impl FnOnce(&mut Self) -> hir::GenericBounds<'hir>,
|
||||
) -> hir::TyKind<'hir> {
|
||||
debug!(
|
||||
@ -1371,12 +1426,16 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
||||
|
||||
let hir_bounds = self.with_hir_id_owner(opaque_ty_node_id, lower_bounds);
|
||||
|
||||
let (lifetimes, lifetime_defs) =
|
||||
self.lifetimes_from_impl_trait_bounds(opaque_ty_node_id, opaque_ty_def_id, &hir_bounds);
|
||||
let (lifetimes, lifetime_defs) = self.lifetimes_from_impl_trait_bounds(
|
||||
opaque_ty_node_id,
|
||||
opaque_ty_def_id,
|
||||
&hir_bounds,
|
||||
capturable_lifetimes,
|
||||
);
|
||||
|
||||
debug!("lower_opaque_impl_trait: lifetimes={:#?}", lifetimes,);
|
||||
debug!("lower_opaque_impl_trait: lifetimes={:#?}", lifetimes);
|
||||
|
||||
debug!("lower_opaque_impl_trait: lifetime_defs={:#?}", lifetime_defs,);
|
||||
debug!("lower_opaque_impl_trait: lifetime_defs={:#?}", lifetime_defs);
|
||||
|
||||
self.with_hir_id_owner(opaque_ty_node_id, move |lctx| {
|
||||
let opaque_ty_item = hir::OpaqueTy {
|
||||
@ -1395,7 +1454,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
||||
lctx.generate_opaque_type(opaque_ty_node_id, opaque_ty_item, span, opaque_ty_span);
|
||||
|
||||
// `impl Trait` now just becomes `Foo<'a, 'b, ..>`.
|
||||
hir::TyKind::Def(hir::ItemId { id: opaque_ty_id }, lifetimes)
|
||||
hir::TyKind::OpaqueDef(hir::ItemId { id: opaque_ty_id }, lifetimes)
|
||||
})
|
||||
}
|
||||
|
||||
@ -1433,6 +1492,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
||||
opaque_ty_id: NodeId,
|
||||
parent_def_id: LocalDefId,
|
||||
bounds: hir::GenericBounds<'hir>,
|
||||
lifetimes_to_include: Option<&FxHashSet<hir::LifetimeName>>,
|
||||
) -> (&'hir [hir::GenericArg<'hir>], &'hir [hir::GenericParam<'hir>]) {
|
||||
debug!(
|
||||
"lifetimes_from_impl_trait_bounds(opaque_ty_id={:?}, \
|
||||
@ -1453,6 +1513,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
||||
already_defined_lifetimes: FxHashSet<hir::LifetimeName>,
|
||||
output_lifetimes: Vec<hir::GenericArg<'hir>>,
|
||||
output_lifetime_params: Vec<hir::GenericParam<'hir>>,
|
||||
lifetimes_to_include: Option<&'r FxHashSet<hir::LifetimeName>>,
|
||||
}
|
||||
|
||||
impl<'r, 'a, 'v, 'hir> intravisit::Visitor<'v> for ImplTraitLifetimeCollector<'r, 'a, 'hir> {
|
||||
@ -1538,6 +1599,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
||||
|
||||
if !self.currently_bound_lifetimes.contains(&name)
|
||||
&& !self.already_defined_lifetimes.contains(&name)
|
||||
&& self.lifetimes_to_include.map_or(true, |lifetimes| lifetimes.contains(&name))
|
||||
{
|
||||
self.already_defined_lifetimes.insert(name);
|
||||
|
||||
@ -1591,6 +1653,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
||||
already_defined_lifetimes: FxHashSet::default(),
|
||||
output_lifetimes: Vec::new(),
|
||||
output_lifetime_params: Vec::new(),
|
||||
lifetimes_to_include,
|
||||
};
|
||||
|
||||
for bound in bounds {
|
||||
@ -1614,15 +1677,16 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
||||
visitor.visit_ty(ty);
|
||||
}
|
||||
}
|
||||
let parent_def_id = self.current_hir_id_owner.last().unwrap().0;
|
||||
let ty = l.ty.as_ref().map(|t| {
|
||||
let mut capturable_lifetimes;
|
||||
self.lower_ty(
|
||||
t,
|
||||
if self.sess.features_untracked().impl_trait_in_bindings {
|
||||
ImplTraitContext::OpaqueTy(
|
||||
Some(parent_def_id.to_def_id()),
|
||||
hir::OpaqueTyOrigin::Misc,
|
||||
)
|
||||
capturable_lifetimes = FxHashSet::default();
|
||||
ImplTraitContext::OtherOpaqueTy {
|
||||
capturable_lifetimes: &mut capturable_lifetimes,
|
||||
origin: hir::OpaqueTyOrigin::Binding,
|
||||
}
|
||||
} else {
|
||||
ImplTraitContext::Disallowed(ImplTraitPosition::Binding)
|
||||
},
|
||||
@ -1725,7 +1789,10 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
||||
FnRetTy::Ty(ref ty) => {
|
||||
let context = match in_band_ty_params {
|
||||
Some((def_id, _)) if impl_trait_return_allow => {
|
||||
ImplTraitContext::OpaqueTy(Some(def_id), hir::OpaqueTyOrigin::FnReturn)
|
||||
ImplTraitContext::ReturnPositionOpaqueTy {
|
||||
fn_def_id: def_id,
|
||||
origin: hir::OpaqueTyOrigin::FnReturn,
|
||||
}
|
||||
}
|
||||
_ => ImplTraitContext::disallowed(),
|
||||
};
|
||||
@ -1944,7 +2011,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
||||
// Foo = impl Trait` is, internally, created as a child of the
|
||||
// async fn, so the *type parameters* are inherited. It's
|
||||
// only the lifetime parameters that we must supply.
|
||||
let opaque_ty_ref = hir::TyKind::Def(hir::ItemId { id: opaque_ty_id }, generic_args);
|
||||
let opaque_ty_ref = hir::TyKind::OpaqueDef(hir::ItemId { id: opaque_ty_id }, generic_args);
|
||||
let opaque_ty = self.ty(opaque_ty_span, opaque_ty_ref);
|
||||
hir::FnRetTy::Return(self.arena.alloc(opaque_ty))
|
||||
}
|
||||
@ -1962,8 +2029,10 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
||||
// Not `OpaqueTyOrigin::AsyncFn`: that's only used for the
|
||||
// `impl Future` opaque type that `async fn` implicitly
|
||||
// generates.
|
||||
let context =
|
||||
ImplTraitContext::OpaqueTy(Some(fn_def_id), hir::OpaqueTyOrigin::FnReturn);
|
||||
let context = ImplTraitContext::ReturnPositionOpaqueTy {
|
||||
fn_def_id,
|
||||
origin: hir::OpaqueTyOrigin::FnReturn,
|
||||
};
|
||||
self.lower_ty(ty, context)
|
||||
}
|
||||
FnRetTy::Default(ret_ty_span) => self.arena.alloc(self.ty_tup(*ret_ty_span, &[])),
|
||||
@ -2113,7 +2182,10 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
||||
default: default.as_ref().map(|x| {
|
||||
self.lower_ty(
|
||||
x,
|
||||
ImplTraitContext::OpaqueTy(None, hir::OpaqueTyOrigin::Misc),
|
||||
ImplTraitContext::OtherOpaqueTy {
|
||||
capturable_lifetimes: &mut FxHashSet::default(),
|
||||
origin: hir::OpaqueTyOrigin::Misc,
|
||||
},
|
||||
)
|
||||
}),
|
||||
synthetic: param
|
||||
@ -2169,8 +2241,28 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
||||
&NodeMap::default(),
|
||||
itctx.reborrow(),
|
||||
);
|
||||
|
||||
let trait_ref = self.with_in_scope_lifetime_defs(&p.bound_generic_params, |this| {
|
||||
this.lower_trait_ref(&p.trait_ref, itctx)
|
||||
// Any impl Trait types defined within this scope can capture
|
||||
// lifetimes bound on this predicate.
|
||||
let lt_def_names = p.bound_generic_params.iter().filter_map(|param| match param.kind {
|
||||
GenericParamKind::Lifetime { .. } => Some(hir::LifetimeName::Param(
|
||||
ParamName::Plain(param.ident.normalize_to_macros_2_0()),
|
||||
)),
|
||||
_ => None,
|
||||
});
|
||||
if let ImplTraitContext::OtherOpaqueTy { ref mut capturable_lifetimes, .. } = itctx {
|
||||
capturable_lifetimes.extend(lt_def_names.clone());
|
||||
}
|
||||
|
||||
let res = this.lower_trait_ref(&p.trait_ref, itctx.reborrow());
|
||||
|
||||
if let ImplTraitContext::OtherOpaqueTy { ref mut capturable_lifetimes, .. } = itctx {
|
||||
for param in lt_def_names {
|
||||
capturable_lifetimes.remove(¶m);
|
||||
}
|
||||
}
|
||||
res
|
||||
});
|
||||
|
||||
hir::PolyTraitRef { bound_generic_params, trait_ref, span: p.span }
|
||||
|
@ -121,6 +121,14 @@ impl<'a> PostExpansionVisitor<'a> {
|
||||
"amdgpu-kernel ABI is experimental and subject to change"
|
||||
);
|
||||
}
|
||||
"avr-interrupt" | "avr-non-blocking-interrupt" => {
|
||||
gate_feature_post!(
|
||||
&self,
|
||||
abi_avr_interrupt,
|
||||
span,
|
||||
"avr-interrupt and avr-non-blocking-interrupt ABIs are experimental and subject to change"
|
||||
);
|
||||
}
|
||||
"efiapi" => {
|
||||
gate_feature_post!(
|
||||
&self,
|
||||
|
@ -1818,7 +1818,7 @@ impl<'a> State<'a> {
|
||||
ast::ExprKind::Call(ref func, ref args) => {
|
||||
self.print_expr_call(func, &args[..]);
|
||||
}
|
||||
ast::ExprKind::MethodCall(ref segment, ref args) => {
|
||||
ast::ExprKind::MethodCall(ref segment, ref args, _) => {
|
||||
self.print_expr_method_call(segment, &args[..]);
|
||||
}
|
||||
ast::ExprKind::Binary(op, ref lhs, ref rhs) => {
|
||||
@ -2593,7 +2593,7 @@ impl<'a> State<'a> {
|
||||
}
|
||||
|
||||
crate fn print_where_clause(&mut self, where_clause: &ast::WhereClause) {
|
||||
if where_clause.predicates.is_empty() {
|
||||
if where_clause.predicates.is_empty() && !where_clause.has_where_token {
|
||||
return;
|
||||
}
|
||||
|
||||
@ -2739,7 +2739,11 @@ impl<'a> State<'a> {
|
||||
}
|
||||
let generics = ast::Generics {
|
||||
params: Vec::new(),
|
||||
where_clause: ast::WhereClause { predicates: Vec::new(), span: rustc_span::DUMMY_SP },
|
||||
where_clause: ast::WhereClause {
|
||||
has_where_token: false,
|
||||
predicates: Vec::new(),
|
||||
span: rustc_span::DUMMY_SP,
|
||||
},
|
||||
span: rustc_span::DUMMY_SP,
|
||||
};
|
||||
let header = ast::FnHeader { unsafety, ext, ..ast::FnHeader::default() };
|
||||
|
@ -391,7 +391,8 @@ fn expand_preparsed_asm(ecx: &mut ExtCtxt<'_>, sp: Span, args: AsmArgs) -> P<ast
|
||||
used[*pos] = true;
|
||||
}
|
||||
|
||||
let named_pos: FxHashSet<usize> = args.named_args.values().cloned().collect();
|
||||
let named_pos: FxHashMap<usize, Symbol> =
|
||||
args.named_args.iter().map(|(&sym, &idx)| (idx, sym)).collect();
|
||||
let mut arg_spans = parser.arg_places.iter().map(|span| template_span.from_inner(*span));
|
||||
let mut template = vec![];
|
||||
for piece in unverified_pieces {
|
||||
@ -405,7 +406,7 @@ fn expand_preparsed_asm(ecx: &mut ExtCtxt<'_>, sp: Span, args: AsmArgs) -> P<ast
|
||||
let operand_idx = match arg.position {
|
||||
parse::ArgumentIs(idx) | parse::ArgumentImplicitlyIs(idx) => {
|
||||
if idx >= args.operands.len()
|
||||
|| named_pos.contains(&idx)
|
||||
|| named_pos.contains_key(&idx)
|
||||
|| args.reg_args.contains(&idx)
|
||||
{
|
||||
let msg = format!("invalid reference to argument at index {}", idx);
|
||||
@ -426,7 +427,7 @@ fn expand_preparsed_asm(ecx: &mut ExtCtxt<'_>, sp: Span, args: AsmArgs) -> P<ast
|
||||
};
|
||||
err.note(&msg);
|
||||
|
||||
if named_pos.contains(&idx) {
|
||||
if named_pos.contains_key(&idx) {
|
||||
err.span_label(args.operands[idx].1, "named argument");
|
||||
err.span_note(
|
||||
args.operands[idx].1,
|
||||
@ -457,7 +458,7 @@ fn expand_preparsed_asm(ecx: &mut ExtCtxt<'_>, sp: Span, args: AsmArgs) -> P<ast
|
||||
|
||||
let mut chars = arg.format.ty.chars();
|
||||
let mut modifier = chars.next();
|
||||
if !chars.next().is_none() {
|
||||
if chars.next().is_some() {
|
||||
let span = arg
|
||||
.format
|
||||
.ty_span
|
||||
@ -480,27 +481,31 @@ fn expand_preparsed_asm(ecx: &mut ExtCtxt<'_>, sp: Span, args: AsmArgs) -> P<ast
|
||||
}
|
||||
}
|
||||
|
||||
let operands = args.operands;
|
||||
let unused_operands: Vec<_> = used
|
||||
.into_iter()
|
||||
.enumerate()
|
||||
.filter(|&(_, used)| !used)
|
||||
.map(|(idx, _)| {
|
||||
if named_pos.contains(&idx) {
|
||||
// named argument
|
||||
(operands[idx].1, "named argument never used")
|
||||
let mut unused_operands = vec![];
|
||||
let mut help_str = String::new();
|
||||
for (idx, used) in used.into_iter().enumerate() {
|
||||
if !used {
|
||||
let msg = if let Some(sym) = named_pos.get(&idx) {
|
||||
help_str.push_str(&format!(" {{{}}}", sym));
|
||||
"named argument never used"
|
||||
} else {
|
||||
// positional argument
|
||||
(operands[idx].1, "argument never used")
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
help_str.push_str(&format!(" {{{}}}", idx));
|
||||
"argument never used"
|
||||
};
|
||||
unused_operands.push((args.operands[idx].1, msg));
|
||||
}
|
||||
}
|
||||
match unused_operands.len() {
|
||||
0 => {}
|
||||
1 => {
|
||||
let (sp, msg) = unused_operands.into_iter().next().unwrap();
|
||||
let mut err = ecx.struct_span_err(sp, msg);
|
||||
err.span_label(sp, msg);
|
||||
err.help(&format!(
|
||||
"if this argument is intentionally unused, \
|
||||
consider using it in an asm comment: `\"/*{} */\"`",
|
||||
help_str
|
||||
));
|
||||
err.emit();
|
||||
}
|
||||
_ => {
|
||||
@ -511,6 +516,11 @@ fn expand_preparsed_asm(ecx: &mut ExtCtxt<'_>, sp: Span, args: AsmArgs) -> P<ast
|
||||
for (sp, msg) in unused_operands {
|
||||
err.span_label(sp, msg);
|
||||
}
|
||||
err.help(&format!(
|
||||
"if these arguments are intentionally unused, \
|
||||
consider using them in an asm comment: `\"/*{} */\"`",
|
||||
help_str
|
||||
));
|
||||
err.emit();
|
||||
}
|
||||
}
|
||||
@ -521,7 +531,8 @@ fn expand_preparsed_asm(ecx: &mut ExtCtxt<'_>, sp: Span, args: AsmArgs) -> P<ast
|
||||
parser.line_spans.iter().map(|span| template_span.from_inner(*span)).collect()
|
||||
};
|
||||
|
||||
let inline_asm = ast::InlineAsm { template, operands, options: args.options, line_spans };
|
||||
let inline_asm =
|
||||
ast::InlineAsm { template, operands: args.operands, options: args.options, line_spans };
|
||||
P(ast::Expr {
|
||||
id: ast::DUMMY_NODE_ID,
|
||||
kind: ast::ExprKind::InlineAsm(P(inline_asm)),
|
||||
|
@ -216,7 +216,11 @@ fn mk_ty_param(
|
||||
}
|
||||
|
||||
fn mk_generics(params: Vec<ast::GenericParam>, span: Span) -> Generics {
|
||||
Generics { params, where_clause: ast::WhereClause { predicates: Vec::new(), span }, span }
|
||||
Generics {
|
||||
params,
|
||||
where_clause: ast::WhereClause { has_where_token: false, predicates: Vec::new(), span },
|
||||
span,
|
||||
}
|
||||
}
|
||||
|
||||
/// Lifetimes and bounds on type parameters
|
||||
|
@ -122,6 +122,7 @@ pub fn expand_include<'cx>(
|
||||
|
||||
struct ExpandResult<'a> {
|
||||
p: Parser<'a>,
|
||||
node_id: ast::NodeId,
|
||||
}
|
||||
impl<'a> base::MacResult for ExpandResult<'a> {
|
||||
fn make_expr(mut self: Box<ExpandResult<'a>>) -> Option<P<ast::Expr>> {
|
||||
@ -130,7 +131,7 @@ pub fn expand_include<'cx>(
|
||||
self.p.sess.buffer_lint(
|
||||
&INCOMPLETE_INCLUDE,
|
||||
self.p.token.span,
|
||||
ast::CRATE_NODE_ID,
|
||||
self.node_id,
|
||||
"include macro expected single expression in source",
|
||||
);
|
||||
}
|
||||
@ -158,7 +159,7 @@ pub fn expand_include<'cx>(
|
||||
}
|
||||
}
|
||||
|
||||
Box::new(ExpandResult { p })
|
||||
Box::new(ExpandResult { p, node_id: cx.resolver.lint_node_id(cx.current_expansion.id) })
|
||||
}
|
||||
|
||||
// include_str! : read the given file, insert it as a literal string expr
|
||||
|
@ -375,6 +375,8 @@ impl<'tcx> FnAbiLlvmExt<'tcx> for FnAbi<'tcx, Ty<'tcx>> {
|
||||
match self.conv {
|
||||
Conv::C | Conv::Rust => llvm::CCallConv,
|
||||
Conv::AmdGpuKernel => llvm::AmdGpuKernel,
|
||||
Conv::AvrInterrupt => llvm::AvrInterrupt,
|
||||
Conv::AvrNonBlockingInterrupt => llvm::AvrNonBlockingInterrupt,
|
||||
Conv::ArmAapcs => llvm::ArmAapcsCallConv,
|
||||
Conv::Msp430Intr => llvm::Msp430Intr,
|
||||
Conv::PtxKernel => llvm::PtxKernel,
|
||||
|
@ -16,7 +16,7 @@ use rustc_codegen_ssa::back::write::{BitcodeSection, CodegenContext, EmitObj, Mo
|
||||
use rustc_codegen_ssa::traits::*;
|
||||
use rustc_codegen_ssa::{CompiledModule, ModuleCodegen};
|
||||
use rustc_data_structures::small_c_str::SmallCStr;
|
||||
use rustc_errors::{FatalError, Handler};
|
||||
use rustc_errors::{FatalError, Handler, Level};
|
||||
use rustc_fs_util::{link_or_copy, path_to_c_string};
|
||||
use rustc_hir::def_id::LOCAL_CRATE;
|
||||
use rustc_middle::bug;
|
||||
@ -242,6 +242,7 @@ impl<'a> Drop for DiagnosticHandlers<'a> {
|
||||
fn report_inline_asm(
|
||||
cgcx: &CodegenContext<LlvmCodegenBackend>,
|
||||
msg: String,
|
||||
level: llvm::DiagnosticLevel,
|
||||
mut cookie: c_uint,
|
||||
source: Option<(String, Vec<InnerSpan>)>,
|
||||
) {
|
||||
@ -251,7 +252,12 @@ fn report_inline_asm(
|
||||
if matches!(cgcx.lto, Lto::Fat | Lto::Thin) {
|
||||
cookie = 0;
|
||||
}
|
||||
cgcx.diag_emitter.inline_asm_error(cookie as u32, msg, source);
|
||||
let level = match level {
|
||||
llvm::DiagnosticLevel::Error => Level::Error,
|
||||
llvm::DiagnosticLevel::Warning => Level::Warning,
|
||||
llvm::DiagnosticLevel::Note | llvm::DiagnosticLevel::Remark => Level::Note,
|
||||
};
|
||||
cgcx.diag_emitter.inline_asm_error(cookie as u32, msg, level, source);
|
||||
}
|
||||
|
||||
unsafe extern "C" fn inline_asm_handler(diag: &SMDiagnostic, user: *const c_void, cookie: c_uint) {
|
||||
@ -264,6 +270,7 @@ unsafe extern "C" fn inline_asm_handler(diag: &SMDiagnostic, user: *const c_void
|
||||
// diagnostics.
|
||||
let mut have_source = false;
|
||||
let mut buffer = String::new();
|
||||
let mut level = llvm::DiagnosticLevel::Error;
|
||||
let mut loc = 0;
|
||||
let mut ranges = [0; 8];
|
||||
let mut num_ranges = ranges.len() / 2;
|
||||
@ -273,6 +280,7 @@ unsafe extern "C" fn inline_asm_handler(diag: &SMDiagnostic, user: *const c_void
|
||||
diag,
|
||||
msg,
|
||||
buffer,
|
||||
&mut level,
|
||||
&mut loc,
|
||||
ranges.as_mut_ptr(),
|
||||
&mut num_ranges,
|
||||
@ -290,7 +298,7 @@ unsafe extern "C" fn inline_asm_handler(diag: &SMDiagnostic, user: *const c_void
|
||||
(buffer, spans)
|
||||
});
|
||||
|
||||
report_inline_asm(cgcx, msg, cookie, source);
|
||||
report_inline_asm(cgcx, msg, level, cookie, source);
|
||||
}
|
||||
|
||||
unsafe extern "C" fn diagnostic_handler(info: &DiagnosticInfo, user: *mut c_void) {
|
||||
@ -301,7 +309,13 @@ unsafe extern "C" fn diagnostic_handler(info: &DiagnosticInfo, user: *mut c_void
|
||||
|
||||
match llvm::diagnostic::Diagnostic::unpack(info) {
|
||||
llvm::diagnostic::InlineAsm(inline) => {
|
||||
report_inline_asm(cgcx, llvm::twine_to_string(inline.message), inline.cookie, None);
|
||||
report_inline_asm(
|
||||
cgcx,
|
||||
llvm::twine_to_string(inline.message),
|
||||
inline.level,
|
||||
inline.cookie,
|
||||
None,
|
||||
);
|
||||
}
|
||||
|
||||
llvm::diagnostic::Optimization(opt) => {
|
||||
|
@ -88,6 +88,7 @@ impl OptimizationDiagnostic<'ll> {
|
||||
|
||||
#[derive(Copy, Clone)]
|
||||
pub struct InlineAsmDiagnostic<'ll> {
|
||||
pub level: super::DiagnosticLevel,
|
||||
pub cookie: c_uint,
|
||||
pub message: &'ll Twine,
|
||||
pub instruction: Option<&'ll Value>,
|
||||
@ -98,10 +99,17 @@ impl InlineAsmDiagnostic<'ll> {
|
||||
let mut cookie = 0;
|
||||
let mut message = None;
|
||||
let mut instruction = None;
|
||||
let mut level = super::DiagnosticLevel::Error;
|
||||
|
||||
super::LLVMRustUnpackInlineAsmDiagnostic(di, &mut cookie, &mut message, &mut instruction);
|
||||
super::LLVMRustUnpackInlineAsmDiagnostic(
|
||||
di,
|
||||
&mut level,
|
||||
&mut cookie,
|
||||
&mut message,
|
||||
&mut instruction,
|
||||
);
|
||||
|
||||
InlineAsmDiagnostic { cookie, message: message.unwrap(), instruction }
|
||||
InlineAsmDiagnostic { level, cookie, message: message.unwrap(), instruction }
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -45,6 +45,8 @@ pub enum CallConv {
|
||||
X86_64_Win64 = 79,
|
||||
X86_VectorCall = 80,
|
||||
X86_Intr = 83,
|
||||
AvrNonBlockingInterrupt = 84,
|
||||
AvrInterrupt = 85,
|
||||
AmdGpuKernel = 91,
|
||||
}
|
||||
|
||||
@ -489,6 +491,17 @@ pub enum DiagnosticKind {
|
||||
Linker,
|
||||
}
|
||||
|
||||
/// LLVMRustDiagnosticLevel
|
||||
#[derive(Copy, Clone)]
|
||||
#[repr(C)]
|
||||
#[allow(dead_code)] // Variants constructed by C++.
|
||||
pub enum DiagnosticLevel {
|
||||
Error,
|
||||
Warning,
|
||||
Note,
|
||||
Remark,
|
||||
}
|
||||
|
||||
/// LLVMRustArchiveKind
|
||||
#[derive(Copy, Clone)]
|
||||
#[repr(C)]
|
||||
@ -2054,6 +2067,7 @@ extern "C" {
|
||||
|
||||
pub fn LLVMRustUnpackInlineAsmDiagnostic(
|
||||
DI: &'a DiagnosticInfo,
|
||||
level_out: &mut DiagnosticLevel,
|
||||
cookie_out: &mut c_uint,
|
||||
message_out: &mut Option<&'a Twine>,
|
||||
instruction_out: &mut Option<&'a Value>,
|
||||
@ -2074,6 +2088,7 @@ extern "C" {
|
||||
d: &SMDiagnostic,
|
||||
message_out: &RustString,
|
||||
buffer_out: &RustString,
|
||||
level_out: &mut DiagnosticLevel,
|
||||
loc_out: &mut c_uint,
|
||||
ranges_out: *mut c_uint,
|
||||
num_ranges: &mut usize,
|
||||
|
@ -1551,7 +1551,7 @@ fn spawn_work<B: ExtraBackendMethods>(cgcx: CodegenContext<B>, work: WorkItem<B>
|
||||
|
||||
enum SharedEmitterMessage {
|
||||
Diagnostic(Diagnostic),
|
||||
InlineAsmError(u32, String, Option<(String, Vec<InnerSpan>)>),
|
||||
InlineAsmError(u32, String, Level, Option<(String, Vec<InnerSpan>)>),
|
||||
AbortIfErrors,
|
||||
Fatal(String),
|
||||
}
|
||||
@ -1576,9 +1576,10 @@ impl SharedEmitter {
|
||||
&self,
|
||||
cookie: u32,
|
||||
msg: String,
|
||||
level: Level,
|
||||
source: Option<(String, Vec<InnerSpan>)>,
|
||||
) {
|
||||
drop(self.sender.send(SharedEmitterMessage::InlineAsmError(cookie, msg, source)));
|
||||
drop(self.sender.send(SharedEmitterMessage::InlineAsmError(cookie, msg, level, source)));
|
||||
}
|
||||
|
||||
pub fn fatal(&self, msg: &str) {
|
||||
@ -1631,16 +1632,21 @@ impl SharedEmitterMain {
|
||||
}
|
||||
handler.emit_diagnostic(&d);
|
||||
}
|
||||
Ok(SharedEmitterMessage::InlineAsmError(cookie, msg, source)) => {
|
||||
Ok(SharedEmitterMessage::InlineAsmError(cookie, msg, level, source)) => {
|
||||
let msg = msg.strip_prefix("error: ").unwrap_or(&msg);
|
||||
|
||||
let mut err = match level {
|
||||
Level::Error => sess.struct_err(&msg),
|
||||
Level::Warning => sess.struct_warn(&msg),
|
||||
Level::Note => sess.struct_note_without_error(&msg),
|
||||
_ => bug!("Invalid inline asm diagnostic level"),
|
||||
};
|
||||
|
||||
// If the cookie is 0 then we don't have span information.
|
||||
let mut err = if cookie == 0 {
|
||||
sess.struct_err(&msg)
|
||||
} else {
|
||||
if cookie != 0 {
|
||||
let pos = BytePos::from_u32(cookie);
|
||||
let span = Span::with_root_ctxt(pos, pos);
|
||||
sess.struct_span_err(span, &msg)
|
||||
err.set_span(span);
|
||||
};
|
||||
|
||||
// Point to the generated assembly if it is available.
|
||||
|
@ -530,6 +530,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
|
||||
args: &Vec<mir::Operand<'tcx>>,
|
||||
destination: &Option<(mir::Place<'tcx>, mir::BasicBlock)>,
|
||||
cleanup: Option<mir::BasicBlock>,
|
||||
fn_span: Span,
|
||||
) {
|
||||
let span = terminator.source_info.span;
|
||||
// Create the callee. This is a fn ptr or zero-sized and hence a kind of scalar.
|
||||
@ -634,7 +635,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
|
||||
|
||||
if intrinsic == Some("caller_location") {
|
||||
if let Some((_, target)) = destination.as_ref() {
|
||||
let location = self.get_caller_location(&mut bx, span);
|
||||
let location = self.get_caller_location(&mut bx, fn_span);
|
||||
|
||||
if let ReturnDest::IndirectOperand(tmp, _) = ret_dest {
|
||||
location.val.store(&mut bx, tmp);
|
||||
@ -798,7 +799,12 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
|
||||
args.len() + 1,
|
||||
"#[track_caller] fn's must have 1 more argument in their ABI than in their MIR",
|
||||
);
|
||||
let location = self.get_caller_location(&mut bx, span);
|
||||
let location = self.get_caller_location(&mut bx, fn_span);
|
||||
debug!(
|
||||
"codegen_call_terminator({:?}): location={:?} (fn_span {:?})",
|
||||
terminator, location, fn_span
|
||||
);
|
||||
|
||||
let last_arg = fn_abi.args.last().unwrap();
|
||||
self.codegen_argument(&mut bx, location, &mut llargs, last_arg);
|
||||
}
|
||||
@ -921,12 +927,8 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
|
||||
span_bug!(span, "invalid type for asm sym (fn)");
|
||||
}
|
||||
}
|
||||
mir::InlineAsmOperand::SymStatic { ref value } => {
|
||||
if let Some(def_id) = value.check_static_ptr(bx.tcx()) {
|
||||
InlineAsmOperandRef::SymStatic { def_id }
|
||||
} else {
|
||||
span_bug!(span, "invalid type for asm sym (static)");
|
||||
}
|
||||
mir::InlineAsmOperand::SymStatic { def_id } => {
|
||||
InlineAsmOperandRef::SymStatic { def_id }
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
@ -1016,6 +1018,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
|
||||
ref destination,
|
||||
cleanup,
|
||||
from_hir_call: _,
|
||||
fn_span,
|
||||
} => {
|
||||
self.codegen_call_terminator(
|
||||
helper,
|
||||
@ -1025,6 +1028,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
|
||||
args,
|
||||
destination,
|
||||
cleanup,
|
||||
fn_span,
|
||||
);
|
||||
}
|
||||
mir::TerminatorKind::GeneratorDrop | mir::TerminatorKind::Yield { .. } => {
|
||||
|
@ -63,7 +63,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
|
||||
.tcx()
|
||||
.destructure_const(ty::ParamEnv::reveal_all().and(&c))
|
||||
.fields
|
||||
.into_iter()
|
||||
.iter()
|
||||
.map(|field| {
|
||||
if let Some(prim) = field.val.try_to_scalar() {
|
||||
let layout = bx.layout_of(field_ty);
|
||||
|
@ -12,8 +12,8 @@ fn test_encode() {
|
||||
test(35, base);
|
||||
test(36, base);
|
||||
test(37, base);
|
||||
test(u64::max_value() as u128, base);
|
||||
test(u128::max_value(), base);
|
||||
test(u64::MAX as u128, base);
|
||||
test(u128::MAX, base);
|
||||
|
||||
for i in 0..1_000 {
|
||||
test(i * 983, base);
|
||||
|
@ -409,6 +409,7 @@ E0718: include_str!("./error_codes/E0718.md"),
|
||||
E0719: include_str!("./error_codes/E0719.md"),
|
||||
E0720: include_str!("./error_codes/E0720.md"),
|
||||
E0723: include_str!("./error_codes/E0723.md"),
|
||||
E0724: include_str!("./error_codes/E0724.md"),
|
||||
E0725: include_str!("./error_codes/E0725.md"),
|
||||
E0727: include_str!("./error_codes/E0727.md"),
|
||||
E0728: include_str!("./error_codes/E0728.md"),
|
||||
@ -440,6 +441,7 @@ E0754: include_str!("./error_codes/E0754.md"),
|
||||
E0758: include_str!("./error_codes/E0758.md"),
|
||||
E0760: include_str!("./error_codes/E0760.md"),
|
||||
E0761: include_str!("./error_codes/E0761.md"),
|
||||
E0762: include_str!("./error_codes/E0762.md"),
|
||||
;
|
||||
// E0006, // merged with E0005
|
||||
// E0008, // cannot bind by-move into a pattern guard
|
||||
@ -616,7 +618,6 @@ E0761: include_str!("./error_codes/E0761.md"),
|
||||
E0717, // rustc_promotable without stability attribute
|
||||
// E0721, // `await` keyword
|
||||
E0722, // Malformed `#[optimize]` attribute
|
||||
E0724, // `#[ffi_returns_twice]` is only allowed in foreign functions
|
||||
E0726, // non-explicit (not `'_`) elided lifetime in unsupported position
|
||||
// E0738, // Removed; errored on `#[track_caller] fn`s in `extern "Rust" { ... }`.
|
||||
E0755, // `#[ffi_pure]` is only allowed on foreign functions
|
||||
|
@ -4,10 +4,10 @@ Erroneous code example:
|
||||
|
||||
```compile_fail,E0446
|
||||
#![deny(private_in_public)]
|
||||
struct Bar(u32);
|
||||
|
||||
mod Foo {
|
||||
struct Bar(u32);
|
||||
|
||||
mod foo {
|
||||
use crate::Bar;
|
||||
pub fn bar() -> Bar { // error: private type in public interface
|
||||
Bar(0)
|
||||
}
|
||||
@ -16,15 +16,31 @@ mod Foo {
|
||||
fn main() {}
|
||||
```
|
||||
|
||||
To solve this error, please ensure that the type is also public. The type
|
||||
can be made inaccessible if necessary by placing it into a private inner
|
||||
module, but it still has to be marked with `pub`.
|
||||
There are two ways to solve this error. The first is to make the public type
|
||||
signature only public to a module that also has access to the private type.
|
||||
This is done by using pub(crate) or pub(in crate::my_mod::etc)
|
||||
Example:
|
||||
|
||||
```
|
||||
mod Foo {
|
||||
pub struct Bar(u32); // we set the Bar type public
|
||||
struct Bar(u32);
|
||||
|
||||
mod foo {
|
||||
use crate::Bar;
|
||||
pub(crate) fn bar() -> Bar { // only public to crate root
|
||||
Bar(0)
|
||||
}
|
||||
}
|
||||
|
||||
fn main() {}
|
||||
```
|
||||
|
||||
The other way to solve this error is to make the private type public.
|
||||
Example:
|
||||
|
||||
```
|
||||
pub struct Bar(u32); // we set the Bar type public
|
||||
mod foo {
|
||||
use crate::Bar;
|
||||
pub fn bar() -> Bar { // ok!
|
||||
Bar(0)
|
||||
}
|
||||
|
@ -1,5 +1,4 @@
|
||||
A type with a `Drop` implementation was destructured when trying to initialize
|
||||
a static item.
|
||||
A value with a custom `Drop` implementation may be dropped during const-eval.
|
||||
|
||||
Erroneous code example:
|
||||
|
||||
@ -16,13 +15,14 @@ struct Foo {
|
||||
field1: DropType,
|
||||
}
|
||||
|
||||
static FOO: Foo = Foo { ..Foo { field1: DropType::A } }; // error!
|
||||
static FOO: Foo = Foo { field1: (DropType::A, DropType::A).1 }; // error!
|
||||
```
|
||||
|
||||
The problem here is that if the given type or one of its fields implements the
|
||||
`Drop` trait, this `Drop` implementation cannot be called during the static
|
||||
type initialization which might cause a memory leak. To prevent this issue,
|
||||
you need to instantiate all the static type's fields by hand.
|
||||
`Drop` trait, this `Drop` implementation cannot be called within a const
|
||||
context since it may run arbitrary, non-const-checked code. To prevent this
|
||||
issue, ensure all values with custom a custom `Drop` implementation escape the
|
||||
initializer.
|
||||
|
||||
```
|
||||
enum DropType {
|
||||
|
@ -1,6 +1,6 @@
|
||||
Trait methods currently cannot take patterns as arguments.
|
||||
|
||||
Example of erroneous code:
|
||||
Erroneous code example:
|
||||
|
||||
```compile_fail,E0642
|
||||
trait Foo {
|
||||
|
@ -1,6 +1,15 @@
|
||||
`export_name` attributes may not contain null characters (`\0`).
|
||||
An `export_name` attribute contains null characters (`\0`).
|
||||
|
||||
Erroneous code example:
|
||||
|
||||
```compile_fail,E0648
|
||||
#[export_name="\0foo"] // error: `export_name` may not contain null characters
|
||||
pub fn bar() {}
|
||||
```
|
||||
|
||||
To fix this error, remove the null characters:
|
||||
|
||||
```
|
||||
#[export_name="foo"] // ok!
|
||||
pub fn bar() {}
|
||||
```
|
||||
|
@ -1,21 +1,25 @@
|
||||
`impl Trait` types cannot appear nested in the
|
||||
generic arguments of other `impl Trait` types.
|
||||
`impl Trait` types cannot appear nested in the generic arguments of other
|
||||
`impl Trait` types.
|
||||
|
||||
Example of erroneous code:
|
||||
Erroneous code example:
|
||||
|
||||
```compile_fail,E0666
|
||||
trait MyGenericTrait<T> {}
|
||||
trait MyInnerTrait {}
|
||||
|
||||
fn foo(bar: impl MyGenericTrait<impl MyInnerTrait>) {}
|
||||
fn foo(
|
||||
bar: impl MyGenericTrait<impl MyInnerTrait>, // error!
|
||||
) {}
|
||||
```
|
||||
|
||||
Type parameters for `impl Trait` types must be
|
||||
explicitly defined as named generic parameters:
|
||||
Type parameters for `impl Trait` types must be explicitly defined as named
|
||||
generic parameters:
|
||||
|
||||
```
|
||||
trait MyGenericTrait<T> {}
|
||||
trait MyInnerTrait {}
|
||||
|
||||
fn foo<T: MyInnerTrait>(bar: impl MyGenericTrait<T>) {}
|
||||
fn foo<T: MyInnerTrait>(
|
||||
bar: impl MyGenericTrait<T>, // ok!
|
||||
) {}
|
||||
```
|
||||
|
24
src/librustc_error_codes/error_codes/E0724.md
Normal file
24
src/librustc_error_codes/error_codes/E0724.md
Normal file
@ -0,0 +1,24 @@
|
||||
`#[ffi_returns_twice]` was used on non-foreign function.
|
||||
|
||||
Erroneous code example:
|
||||
|
||||
```compile_fail,E0724
|
||||
#![feature(ffi_returns_twice)]
|
||||
#![crate_type = "lib"]
|
||||
|
||||
#[ffi_returns_twice] // error!
|
||||
pub fn foo() {}
|
||||
```
|
||||
|
||||
`#[ffi_returns_twice]` can only be used on foreign function declarations.
|
||||
For example, we might correct the previous example by declaring
|
||||
the function inside of an `extern` block.
|
||||
|
||||
```
|
||||
#![feature(ffi_returns_twice)]
|
||||
|
||||
extern {
|
||||
#[ffi_returns_twice] // ok!
|
||||
pub fn foo();
|
||||
}
|
||||
```
|
13
src/librustc_error_codes/error_codes/E0762.md
Normal file
13
src/librustc_error_codes/error_codes/E0762.md
Normal file
@ -0,0 +1,13 @@
|
||||
A character literal wasn't ended with a quote.
|
||||
|
||||
Erroneous code example:
|
||||
|
||||
```compile_fail,E0762
|
||||
static C: char = '●; // error!
|
||||
```
|
||||
|
||||
To fix this error, add the missing quote:
|
||||
|
||||
```
|
||||
static C: char = '●'; // ok!
|
||||
```
|
@ -159,14 +159,10 @@ impl AnnotateSnippetEmitterWriter {
|
||||
// FIXME(#59346): Not really sure when `fold` should be true or false
|
||||
fold: false,
|
||||
annotations: annotations
|
||||
.into_iter()
|
||||
.iter()
|
||||
.map(|annotation| SourceAnnotation {
|
||||
range: (annotation.start_col, annotation.end_col),
|
||||
label: annotation
|
||||
.label
|
||||
.as_ref()
|
||||
.map(|s| s.as_str())
|
||||
.unwrap_or_default(),
|
||||
label: annotation.label.as_deref().unwrap_or_default(),
|
||||
annotation_type: annotation_type_for_level(*level),
|
||||
})
|
||||
.collect(),
|
||||
|
@ -296,6 +296,29 @@ impl Diagnostic {
|
||||
self
|
||||
}
|
||||
|
||||
pub fn multipart_suggestions(
|
||||
&mut self,
|
||||
msg: &str,
|
||||
suggestions: Vec<Vec<(Span, String)>>,
|
||||
applicability: Applicability,
|
||||
) -> &mut Self {
|
||||
self.suggestions.push(CodeSuggestion {
|
||||
substitutions: suggestions
|
||||
.into_iter()
|
||||
.map(|suggestion| Substitution {
|
||||
parts: suggestion
|
||||
.into_iter()
|
||||
.map(|(span, snippet)| SubstitutionPart { snippet, span })
|
||||
.collect(),
|
||||
})
|
||||
.collect(),
|
||||
msg: msg.to_owned(),
|
||||
style: SuggestionStyle::ShowCode,
|
||||
applicability,
|
||||
});
|
||||
self
|
||||
}
|
||||
|
||||
/// Prints out a message with for a multipart suggestion without showing the suggested code.
|
||||
///
|
||||
/// This is intended to be used for suggestions that are obvious in what the changes need to
|
||||
|
@ -260,6 +260,19 @@ impl<'a> DiagnosticBuilder<'a> {
|
||||
self
|
||||
}
|
||||
|
||||
pub fn multipart_suggestions(
|
||||
&mut self,
|
||||
msg: &str,
|
||||
suggestions: Vec<Vec<(Span, String)>>,
|
||||
applicability: Applicability,
|
||||
) -> &mut Self {
|
||||
if !self.0.allow_suggestions {
|
||||
return self;
|
||||
}
|
||||
self.0.diagnostic.multipart_suggestions(msg, suggestions, applicability);
|
||||
self
|
||||
}
|
||||
|
||||
pub fn tool_only_multipart_suggestion(
|
||||
&mut self,
|
||||
msg: &str,
|
||||
|
@ -581,6 +581,11 @@ impl Handler {
|
||||
DiagnosticBuilder::new(self, Level::Help, msg)
|
||||
}
|
||||
|
||||
/// Construct a builder at the `Note` level with the `msg`.
|
||||
pub fn struct_note_without_error(&self, msg: &str) -> DiagnosticBuilder<'_> {
|
||||
DiagnosticBuilder::new(self, Level::Note, msg)
|
||||
}
|
||||
|
||||
pub fn span_fatal(&self, span: impl Into<MultiSpan>, msg: &str) -> FatalError {
|
||||
self.emit_diag_at_span(Diagnostic::new(Fatal, msg), span);
|
||||
FatalError
|
||||
|
@ -915,6 +915,9 @@ pub trait Resolver {
|
||||
|
||||
fn check_unused_macros(&mut self);
|
||||
|
||||
/// Some parent node that is close enough to the given macro call.
|
||||
fn lint_node_id(&mut self, expn_id: ExpnId) -> NodeId;
|
||||
|
||||
fn has_derive_copy(&self, expn_id: ExpnId) -> bool;
|
||||
fn add_derive_copy(&mut self, expn_id: ExpnId);
|
||||
fn cfg_accessible(&mut self, expn_id: ExpnId, path: &ast::Path) -> Result<bool, Indeterminate>;
|
||||
|
@ -272,7 +272,7 @@ impl<'a> ExtCtxt<'a> {
|
||||
) -> P<ast::Expr> {
|
||||
args.insert(0, expr);
|
||||
let segment = ast::PathSegment::from_ident(ident.with_span_pos(span));
|
||||
self.expr(span, ast::ExprKind::MethodCall(segment, args))
|
||||
self.expr(span, ast::ExprKind::MethodCall(segment, args, span))
|
||||
}
|
||||
pub fn expr_block(&self, b: P<ast::Block>) -> P<ast::Expr> {
|
||||
self.expr(b.span, ast::ExprKind::Block(b, None))
|
||||
|
@ -106,7 +106,7 @@
|
||||
//! bound.
|
||||
use crate::mbe::{KleeneToken, TokenTree};
|
||||
|
||||
use rustc_ast::ast::NodeId;
|
||||
use rustc_ast::ast::{NodeId, DUMMY_NODE_ID};
|
||||
use rustc_ast::token::{DelimToken, Token, TokenKind};
|
||||
use rustc_data_structures::fx::FxHashMap;
|
||||
use rustc_session::lint::builtin::META_VARIABLE_MISUSE;
|
||||
@ -626,5 +626,8 @@ fn ops_is_prefix(
|
||||
}
|
||||
|
||||
fn buffer_lint(sess: &ParseSess, span: MultiSpan, node_id: NodeId, message: &str) {
|
||||
sess.buffer_lint(&META_VARIABLE_MISUSE, span, node_id, message);
|
||||
// Macros loaded from other crates have dummy node ids.
|
||||
if node_id != DUMMY_NODE_ID {
|
||||
sess.buffer_lint(&META_VARIABLE_MISUSE, span, node_id, message);
|
||||
}
|
||||
}
|
||||
|
@ -383,7 +383,7 @@ fn nameize<I: Iterator<Item = NamedMatch>>(
|
||||
}
|
||||
}
|
||||
TokenTree::MetaVarDecl(span, _, id) if id.name == kw::Invalid => {
|
||||
if sess.missing_fragment_specifiers.borrow_mut().remove(&span) {
|
||||
if sess.missing_fragment_specifiers.borrow_mut().remove(&span).is_some() {
|
||||
return Err((span, "missing fragment specifier".to_string()));
|
||||
}
|
||||
}
|
||||
@ -566,7 +566,7 @@ fn inner_parse_loop<'root, 'tt>(
|
||||
|
||||
// We need to match a metavar (but the identifier is invalid)... this is an error
|
||||
TokenTree::MetaVarDecl(span, _, id) if id.name == kw::Invalid => {
|
||||
if sess.missing_fragment_specifiers.borrow_mut().remove(&span) {
|
||||
if sess.missing_fragment_specifiers.borrow_mut().remove(&span).is_some() {
|
||||
return Error(span, "missing fragment specifier".to_string());
|
||||
}
|
||||
}
|
||||
|
@ -474,7 +474,9 @@ pub fn compile_declarative_macro(
|
||||
.map(|m| {
|
||||
if let MatchedNonterminal(ref nt) = *m {
|
||||
if let NtTT(ref tt) = **nt {
|
||||
let tt = mbe::quoted::parse(tt.clone().into(), true, sess).pop().unwrap();
|
||||
let tt = mbe::quoted::parse(tt.clone().into(), true, sess, def.id)
|
||||
.pop()
|
||||
.unwrap();
|
||||
valid &= check_lhs_nt_follows(sess, features, &def.attrs, &tt);
|
||||
return tt;
|
||||
}
|
||||
@ -491,7 +493,9 @@ pub fn compile_declarative_macro(
|
||||
.map(|m| {
|
||||
if let MatchedNonterminal(ref nt) = *m {
|
||||
if let NtTT(ref tt) = **nt {
|
||||
return mbe::quoted::parse(tt.clone().into(), false, sess).pop().unwrap();
|
||||
return mbe::quoted::parse(tt.clone().into(), false, sess, def.id)
|
||||
.pop()
|
||||
.unwrap();
|
||||
}
|
||||
}
|
||||
sess.span_diagnostic.span_bug(def.span, "wrong-structured lhs")
|
||||
@ -509,9 +513,7 @@ pub fn compile_declarative_macro(
|
||||
valid &= check_lhs_no_empty_seq(sess, slice::from_ref(lhs));
|
||||
}
|
||||
|
||||
// We use CRATE_NODE_ID instead of `def.id` otherwise we may emit buffered lints for a node id
|
||||
// that is not lint-checked and trigger the "failed to process buffered lint here" bug.
|
||||
valid &= macro_check::check_meta_variables(sess, ast::CRATE_NODE_ID, def.span, &lhses, &rhses);
|
||||
valid &= macro_check::check_meta_variables(sess, def.id, def.span, &lhses, &rhses);
|
||||
|
||||
let (transparency, transparency_error) = attr::find_transparency(&def.attrs, macro_rules);
|
||||
match transparency_error {
|
||||
|
@ -1,6 +1,7 @@
|
||||
use crate::mbe::macro_parser;
|
||||
use crate::mbe::{Delimited, KleeneOp, KleeneToken, SequenceRepetition, TokenTree};
|
||||
|
||||
use rustc_ast::ast::{NodeId, DUMMY_NODE_ID};
|
||||
use rustc_ast::token::{self, Token};
|
||||
use rustc_ast::tokenstream;
|
||||
use rustc_ast_pretty::pprust;
|
||||
@ -36,6 +37,7 @@ pub(super) fn parse(
|
||||
input: tokenstream::TokenStream,
|
||||
expect_matchers: bool,
|
||||
sess: &ParseSess,
|
||||
node_id: NodeId,
|
||||
) -> Vec<TokenTree> {
|
||||
// Will contain the final collection of `self::TokenTree`
|
||||
let mut result = Vec::new();
|
||||
@ -46,7 +48,7 @@ pub(super) fn parse(
|
||||
while let Some(tree) = trees.next() {
|
||||
// Given the parsed tree, if there is a metavar and we are expecting matchers, actually
|
||||
// parse out the matcher (i.e., in `$id:ident` this would parse the `:` and `ident`).
|
||||
let tree = parse_tree(tree, &mut trees, expect_matchers, sess);
|
||||
let tree = parse_tree(tree, &mut trees, expect_matchers, sess, node_id);
|
||||
match tree {
|
||||
TokenTree::MetaVar(start_sp, ident) if expect_matchers => {
|
||||
let span = match trees.next() {
|
||||
@ -65,7 +67,10 @@ pub(super) fn parse(
|
||||
}
|
||||
tree => tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(start_sp),
|
||||
};
|
||||
sess.missing_fragment_specifiers.borrow_mut().insert(span);
|
||||
if node_id != DUMMY_NODE_ID {
|
||||
// Macros loaded from other crates have dummy node ids.
|
||||
sess.missing_fragment_specifiers.borrow_mut().insert(span, node_id);
|
||||
}
|
||||
result.push(TokenTree::MetaVarDecl(span, ident, Ident::invalid()));
|
||||
}
|
||||
|
||||
@ -96,6 +101,7 @@ fn parse_tree(
|
||||
trees: &mut impl Iterator<Item = tokenstream::TokenTree>,
|
||||
expect_matchers: bool,
|
||||
sess: &ParseSess,
|
||||
node_id: NodeId,
|
||||
) -> TokenTree {
|
||||
// Depending on what `tree` is, we could be parsing different parts of a macro
|
||||
match tree {
|
||||
@ -111,7 +117,7 @@ fn parse_tree(
|
||||
sess.span_diagnostic.span_err(span.entire(), &msg);
|
||||
}
|
||||
// Parse the contents of the sequence itself
|
||||
let sequence = parse(tts, expect_matchers, sess);
|
||||
let sequence = parse(tts, expect_matchers, sess, node_id);
|
||||
// Get the Kleene operator and optional separator
|
||||
let (separator, kleene) = parse_sep_and_kleene_op(trees, span.entire(), sess);
|
||||
// Count the number of captured "names" (i.e., named metavars)
|
||||
@ -158,7 +164,7 @@ fn parse_tree(
|
||||
// descend into the delimited set and further parse it.
|
||||
tokenstream::TokenTree::Delimited(span, delim, tts) => TokenTree::Delimited(
|
||||
span,
|
||||
Lrc::new(Delimited { delim, tts: parse(tts, expect_matchers, sess) }),
|
||||
Lrc::new(Delimited { delim, tts: parse(tts, expect_matchers, sess, node_id) }),
|
||||
),
|
||||
}
|
||||
}
|
||||
|
@ -582,10 +582,10 @@ impl server::Literal for Rustc<'_> {
|
||||
};
|
||||
|
||||
// Bounds check the values, preventing addition overflow and OOB spans.
|
||||
if start > u32::max_value() as usize
|
||||
|| end > u32::max_value() as usize
|
||||
|| (u32::max_value() - start as u32) < span.lo().to_u32()
|
||||
|| (u32::max_value() - end as u32) < span.lo().to_u32()
|
||||
if start > u32::MAX as usize
|
||||
|| end > u32::MAX as usize
|
||||
|| (u32::MAX - start as u32) < span.lo().to_u32()
|
||||
|| (u32::MAX - end as u32) < span.lo().to_u32()
|
||||
|| start >= end
|
||||
|| end > length
|
||||
{
|
||||
|
@ -574,6 +574,12 @@ declare_features! (
|
||||
/// No longer treat an unsafe function as an unsafe block.
|
||||
(active, unsafe_block_in_unsafe_fn, "1.45.0", Some(71668), None),
|
||||
|
||||
/// Allows `extern "avr-interrupt" fn()` and `extern "avr-non-blocking-interrupt" fn()`.
|
||||
(active, abi_avr_interrupt, "1.45.0", Some(69664), None),
|
||||
|
||||
/// Be more precise when looking for live drops in a const context.
|
||||
(active, const_precise_live_drops, "1.46.0", Some(73255), None),
|
||||
|
||||
// -------------------------------------------------------------------------
|
||||
// feature-group-end: actual feature gates
|
||||
// -------------------------------------------------------------------------
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user