Auto merge of #62355 - Centril:rollup-xnxtcgm, r=Centril

Rollup of 16 pull requests

Successful merges:

 - #62039 (Remove needless lifetimes (rustc))
 - #62173 (rename InterpretCx -> InterpCx)
 - #62240 (wfcheck: resolve the type-vars in `AdtField` types)
 - #62249 (Use mem::take instead of mem::replace with default)
 - #62252 (Update mem::replace example to not be identical to mem::take)
 - #62258 (syntax: Unsupport `foo! bar { ... }` macros in the parser)
 - #62268 (Clean up inherent_impls)
 - #62287 (Use link attributes on extern "C" blocks with llvm-libuwind)
 - #62295 (miri realloc: do not require giving old size+align)
 - #62297 (refactor check_for_substitution)
 - #62316 (When possible without changing semantics, implement Iterator::last in terms of DoubleEndedIterator::next_back for types in liballoc and libcore.)
 - #62317 (Migrate `compile-pass` annotations to `build-pass`)
 - #62337 (Fix bucket in CPU usage script)
 - #62344 (simplify Option::get_or_insert)
 - #62346 (enable a few more tests in Miri and update the comment for others)
 - #62351 (remove bogus example from drop_in_place)

Failed merges:

r? @ghost
This commit is contained in:
bors 2019-07-03 23:39:36 +00:00
commit b43eb4235a
919 changed files with 1638 additions and 1664 deletions

View File

@ -16,7 +16,7 @@
set -ex
bucket=rust-lang-ci-evalazure
bucket=rust-lang-ci2
commit=$1
builder=$2

View File

@ -728,6 +728,14 @@ impl<I: Iterator + ?Sized> Iterator for Box<I> {
(**self).nth(n)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<I: Iterator + Sized> Iterator for Box<I> {
fn last(self) -> Option<I::Item> where I: Sized {
(*self).last()
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<I: DoubleEndedIterator + ?Sized> DoubleEndedIterator for Box<I> {
fn next_back(&mut self) -> Option<I::Item> {

View File

@ -1035,6 +1035,11 @@ impl<'a, T> Iterator for Iter<'a, T> {
fn size_hint(&self) -> (usize, Option<usize>) {
self.iter.size_hint()
}
#[inline]
fn last(self) -> Option<&'a T> {
self.iter.last()
}
}
#[stable(feature = "rust1", since = "1.0.0")]

View File

@ -770,8 +770,8 @@ impl<K: Ord, V> BTreeMap<K, V> {
}
// First, we merge `self` and `other` into a sorted sequence in linear time.
let self_iter = mem::replace(self, BTreeMap::new()).into_iter();
let other_iter = mem::replace(other, BTreeMap::new()).into_iter();
let self_iter = mem::take(self).into_iter();
let other_iter = mem::take(other).into_iter();
let iter = MergeIter {
left: self_iter.peekable(),
right: other_iter.peekable(),
@ -1193,6 +1193,10 @@ impl<'a, K: 'a, V: 'a> Iterator for Iter<'a, K, V> {
fn size_hint(&self) -> (usize, Option<usize>) {
(self.length, Some(self.length))
}
fn last(mut self) -> Option<(&'a K, &'a V)> {
self.next_back()
}
}
#[stable(feature = "fused", since = "1.26.0")]
@ -1253,6 +1257,10 @@ impl<'a, K: 'a, V: 'a> Iterator for IterMut<'a, K, V> {
fn size_hint(&self) -> (usize, Option<usize>) {
(self.length, Some(self.length))
}
fn last(mut self) -> Option<(&'a K, &'a mut V)> {
self.next_back()
}
}
#[stable(feature = "rust1", since = "1.0.0")]
@ -1421,6 +1429,10 @@ impl<'a, K, V> Iterator for Keys<'a, K, V> {
fn size_hint(&self) -> (usize, Option<usize>) {
self.inner.size_hint()
}
fn last(mut self) -> Option<&'a K> {
self.next_back()
}
}
#[stable(feature = "rust1", since = "1.0.0")]
@ -1458,6 +1470,10 @@ impl<'a, K, V> Iterator for Values<'a, K, V> {
fn size_hint(&self) -> (usize, Option<usize>) {
self.inner.size_hint()
}
fn last(mut self) -> Option<&'a V> {
self.next_back()
}
}
#[stable(feature = "rust1", since = "1.0.0")]
@ -1495,6 +1511,10 @@ impl<'a, K, V> Iterator for Range<'a, K, V> {
unsafe { Some(self.next_unchecked()) }
}
}
fn last(mut self) -> Option<(&'a K, &'a V)> {
self.next_back()
}
}
#[stable(feature = "map_values_mut", since = "1.10.0")]
@ -1508,6 +1528,10 @@ impl<'a, K, V> Iterator for ValuesMut<'a, K, V> {
fn size_hint(&self) -> (usize, Option<usize>) {
self.inner.size_hint()
}
fn last(mut self) -> Option<&'a mut V> {
self.next_back()
}
}
#[stable(feature = "map_values_mut", since = "1.10.0")]
@ -1626,6 +1650,10 @@ impl<'a, K, V> Iterator for RangeMut<'a, K, V> {
unsafe { Some(self.next_unchecked()) }
}
}
fn last(mut self) -> Option<(&'a K, &'a mut V)> {
self.next_back()
}
}
impl<'a, K, V> RangeMut<'a, K, V> {

View File

@ -1019,6 +1019,9 @@ impl<'a, T> Iterator for Iter<'a, T> {
fn size_hint(&self) -> (usize, Option<usize>) {
self.iter.size_hint()
}
fn last(mut self) -> Option<&'a T> {
self.next_back()
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T> DoubleEndedIterator for Iter<'a, T> {
@ -1073,6 +1076,10 @@ impl<'a, T> Iterator for Range<'a, T> {
fn next(&mut self) -> Option<&'a T> {
self.iter.next().map(|(k, _)| k)
}
fn last(mut self) -> Option<&'a T> {
self.next_back()
}
}
#[stable(feature = "btree_range", since = "1.17.0")]

View File

@ -708,7 +708,7 @@ impl<T> LinkedList<T> {
let len = self.len();
assert!(at <= len, "Cannot split off at a nonexistent index");
if at == 0 {
return mem::replace(self, Self::new());
return mem::take(self);
} else if at == len {
return Self::new();
}
@ -832,6 +832,11 @@ impl<'a, T> Iterator for Iter<'a, T> {
fn size_hint(&self) -> (usize, Option<usize>) {
(self.len, Some(self.len))
}
#[inline]
fn last(mut self) -> Option<&'a T> {
self.next_back()
}
}
#[stable(feature = "rust1", since = "1.0.0")]
@ -881,6 +886,11 @@ impl<'a, T> Iterator for IterMut<'a, T> {
fn size_hint(&self) -> (usize, Option<usize>) {
(self.len, Some(self.len))
}
#[inline]
fn last(mut self) -> Option<&'a mut T> {
self.next_back()
}
}
#[stable(feature = "rust1", since = "1.0.0")]

View File

@ -2206,6 +2206,11 @@ impl<'a, T> Iterator for Iter<'a, T> {
self.tail = self.head - iter.len();
final_res
}
#[inline]
fn last(mut self) -> Option<&'a T> {
self.next_back()
}
}
#[stable(feature = "rust1", since = "1.0.0")]
@ -2319,6 +2324,11 @@ impl<'a, T> Iterator for IterMut<'a, T> {
accum = front.iter_mut().fold(accum, &mut f);
back.iter_mut().fold(accum, &mut f)
}
#[inline]
fn last(mut self) -> Option<&'a mut T> {
self.next_back()
}
}
#[stable(feature = "rust1", since = "1.0.0")]

View File

@ -112,6 +112,7 @@
#![feature(maybe_uninit_extra, maybe_uninit_slice, maybe_uninit_array)]
#![feature(alloc_layout_extra)]
#![feature(try_trait)]
#![feature(mem_take)]
// Allow testing this library

View File

@ -203,7 +203,7 @@ impl ToOwned for str {
}
fn clone_into(&self, target: &mut String) {
let mut b = mem::replace(target, String::new()).into_bytes();
let mut b = mem::take(target).into_bytes();
self.as_bytes().clone_into(&mut b);
*target = unsafe { String::from_utf8_unchecked(b) }
}

View File

@ -2385,6 +2385,11 @@ impl Iterator for Drain<'_> {
fn size_hint(&self) -> (usize, Option<usize>) {
self.iter.size_hint()
}
#[inline]
fn last(mut self) -> Option<char> {
self.next_back()
}
}
#[stable(feature = "drain", since = "1.6.0")]

View File

@ -761,7 +761,6 @@ fn from_into_inner() {
it.next().unwrap();
let vec = it.collect::<Vec<_>>();
assert_eq!(vec, [2, 3]);
#[cfg(not(miri))] // Miri does not support comparing dangling pointers
assert!(ptr != vec.as_ptr());
}

View File

@ -117,6 +117,7 @@ impl Iterator for EscapeDefault {
type Item = u8;
fn next(&mut self) -> Option<u8> { self.range.next().map(|i| self.data[i]) }
fn size_hint(&self) -> (usize, Option<usize>) { self.range.size_hint() }
fn last(mut self) -> Option<u8> { self.next_back() }
}
#[stable(feature = "rust1", since = "1.0.0")]
impl DoubleEndedIterator for EscapeDefault {

View File

@ -126,6 +126,7 @@
#![feature(adx_target_feature)]
#![feature(maybe_uninit_slice, maybe_uninit_array)]
#![feature(external_doc)]
#![feature(mem_take)]
#[prelude_import]
#[allow(unused)]

View File

@ -552,6 +552,12 @@ pub fn swap<T>(x: &mut T, y: &mut T) {
/// mem::take(&mut self.buf)
/// }
/// }
///
/// let mut buffer = Buffer { buf: vec![0, 1] };
/// assert_eq!(buffer.buf.len(), 2);
///
/// assert_eq!(buffer.get_and_reset(), vec![0, 1]);
/// assert_eq!(buffer.buf.len(), 0);
/// ```
///
/// [`Clone`]: ../../std/clone/trait.Clone.html
@ -586,17 +592,17 @@ pub fn take<T: Default>(dest: &mut T) -> T {
/// struct Buffer<T> { buf: Vec<T> }
///
/// impl<T> Buffer<T> {
/// fn get_and_reset(&mut self) -> Vec<T> {
/// fn replace_index(&mut self, i: usize, v: T) -> T {
/// // error: cannot move out of dereference of `&mut`-pointer
/// let buf = self.buf;
/// self.buf = Vec::new();
/// buf
/// let t = self.buf[i];
/// self.buf[i] = v;
/// t
/// }
/// }
/// ```
///
/// Note that `T` does not necessarily implement [`Clone`], so it can't even clone and reset
/// `self.buf`. But `replace` can be used to disassociate the original value of `self.buf` from
/// Note that `T` does not necessarily implement [`Clone`], so we can't even clone `self.buf[i]` to
/// avoid the move. But `replace` can be used to disassociate the original value at that index from
/// `self`, allowing it to be returned:
///
/// ```
@ -605,10 +611,16 @@ pub fn take<T: Default>(dest: &mut T) -> T {
///
/// # struct Buffer<T> { buf: Vec<T> }
/// impl<T> Buffer<T> {
/// fn get_and_reset(&mut self) -> Vec<T> {
/// mem::replace(&mut self.buf, Vec::new())
/// fn replace_index(&mut self, i: usize, v: T) -> T {
/// mem::replace(&mut self.buf[i], v)
/// }
/// }
///
/// let mut buffer = Buffer { buf: vec![0, 1] };
/// assert_eq!(buffer.buf[0], 0);
///
/// assert_eq!(buffer.replace_index(0, 2), 0);
/// assert_eq!(buffer.buf[0], 2);
/// ```
///
/// [`Clone`]: ../../std/clone/trait.Clone.html

View File

@ -777,15 +777,7 @@ impl<T> Option<T> {
#[inline]
#[stable(feature = "option_entry", since = "1.20.0")]
pub fn get_or_insert(&mut self, v: T) -> &mut T {
match *self {
None => *self = Some(v),
_ => (),
}
match *self {
Some(ref mut v) => v,
None => unsafe { hint::unreachable_unchecked() },
}
self.get_or_insert_with(|| v)
}
/// Inserts a value computed from `f` into the option if it is [`None`], then
@ -845,7 +837,7 @@ impl<T> Option<T> {
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn take(&mut self) -> Option<T> {
mem::replace(self, None)
mem::take(self)
}
/// Replaces the actual value in the option by the value given in parameter,

View File

@ -100,7 +100,11 @@ pub use unique::Unique;
/// as the compiler doesn't need to prove that it's sound to elide the
/// copy.
///
/// Unaligned values cannot be dropped in place, they must be copied to an aligned
/// location first using [`ptr::read_unaligned`].
///
/// [`ptr::read`]: ../ptr/fn.read.html
/// [`ptr::read_unaligned`]: ../ptr/fn.read_unaligned.html
///
/// # Safety
///
@ -108,8 +112,7 @@ pub use unique::Unique;
///
/// * `to_drop` must be [valid] for reads.
///
/// * `to_drop` must be properly aligned. See the example below for how to drop
/// an unaligned pointer.
/// * `to_drop` must be properly aligned.
///
/// Additionally, if `T` is not [`Copy`], using the pointed-to value after
/// calling `drop_in_place` can cause undefined behavior. Note that `*to_drop =
@ -153,31 +156,6 @@ pub use unique::Unique;
/// assert!(weak.upgrade().is_none());
/// ```
///
/// Unaligned values cannot be dropped in place, they must be copied to an aligned
/// location first:
/// ```
/// use std::ptr;
/// use std::mem::{self, MaybeUninit};
///
/// unsafe fn drop_after_copy<T>(to_drop: *mut T) {
/// let mut copy: MaybeUninit<T> = MaybeUninit::uninit();
/// ptr::copy(to_drop, copy.as_mut_ptr(), 1);
/// drop(copy.assume_init());
/// }
///
/// #[repr(packed, C)]
/// struct Packed {
/// _padding: u8,
/// unaligned: Vec<i32>,
/// }
///
/// let mut p = Packed { _padding: 0, unaligned: vec![42] };
/// unsafe {
/// drop_after_copy(&mut p.unaligned as *mut _);
/// mem::forget(p);
/// }
/// ```
///
/// Notice that the compiler performs this copy automatically when dropping packed structs,
/// i.e., you do not usually have to worry about such issues unless you call `drop_in_place`
/// manually.

View File

@ -1333,6 +1333,11 @@ impl<'a> Iterator for Lines<'a> {
fn size_hint(&self) -> (usize, Option<usize>) {
self.0.size_hint()
}
#[inline]
fn last(mut self) -> Option<&'a str> {
self.next_back()
}
}
#[stable(feature = "rust1", since = "1.0.0")]
@ -4241,6 +4246,11 @@ impl<'a> Iterator for SplitWhitespace<'a> {
fn size_hint(&self) -> (usize, Option<usize>) {
self.inner.size_hint()
}
#[inline]
fn last(mut self) -> Option<&'a str> {
self.next_back()
}
}
#[stable(feature = "split_whitespace", since = "1.1.0")]
@ -4267,6 +4277,11 @@ impl<'a> Iterator for SplitAsciiWhitespace<'a> {
fn size_hint(&self) -> (usize, Option<usize>) {
self.inner.size_hint()
}
#[inline]
fn last(mut self) -> Option<&'a str> {
self.next_back()
}
}
#[stable(feature = "split_ascii_whitespace", since = "1.34.0")]

View File

@ -3,7 +3,6 @@ mod float;
mod num;
#[test]
#[cfg(not(miri))] // Miri cannot print pointers
fn test_format_flags() {
// No residual flags left by pointer formatting
let p = "".as_ptr();
@ -13,7 +12,6 @@ fn test_format_flags() {
}
#[test]
#[cfg(not(miri))] // Miri cannot print pointers
fn test_pointer_formats_data_pointer() {
let b: &[u8] = b"";
let s: &str = "";

View File

@ -253,7 +253,6 @@ fn test_unsized_nonnull() {
#[test]
#[allow(warnings)]
#[cfg(not(miri))] // Miri cannot hash pointers
// Have a symbol for the test below. It doesnt need to be an actual variadic function, match the
// ABI, or even point to an actual executable code, because the function itself is never invoked.
#[no_mangle]
@ -293,7 +292,7 @@ fn write_unaligned_drop() {
}
#[test]
#[cfg(not(miri))] // Miri cannot compute actual alignment of an allocation
#[cfg(not(miri))] // Miri does not compute a maximal `mid` for `align_offset`
fn align_offset_zst() {
// For pointers of stride = 0, the pointer is already aligned or it cannot be aligned at
// all, because no amount of elements will align the pointer.
@ -308,7 +307,7 @@ fn align_offset_zst() {
}
#[test]
#[cfg(not(miri))] // Miri cannot compute actual alignment of an allocation
#[cfg(not(miri))] // Miri does not compute a maximal `mid` for `align_offset`
fn align_offset_stride1() {
// For pointers of stride = 1, the pointer can always be aligned. The offset is equal to
// number of bytes.

View File

@ -1415,7 +1415,7 @@ pub mod memchr {
}
#[test]
#[cfg(not(miri))] // Miri cannot compute actual alignment of an allocation
#[cfg(not(miri))] // Miri does not compute a maximal `mid` for `align_offset`
fn test_align_to_simple() {
let bytes = [1u8, 2, 3, 4, 5, 6, 7];
let (prefix, aligned, suffix) = unsafe { bytes.align_to::<u16>() };
@ -1439,7 +1439,7 @@ fn test_align_to_zst() {
}
#[test]
#[cfg(not(miri))] // Miri cannot compute actual alignment of an allocation
#[cfg(not(miri))] // Miri does not compute a maximal `mid` for `align_offset`
fn test_align_to_non_trivial() {
#[repr(align(8))] struct U64(u64, u64);
#[repr(align(8))] struct U64U64U32(u64, u64, u32);

View File

@ -78,7 +78,7 @@ impl<T: Copy> Buffer<T> {
}
pub(super) fn take(&mut self) -> Self {
mem::replace(self, Self::default())
mem::take(self)
}
pub(super) fn extend_from_slice(&mut self, xs: &[T]) {

View File

@ -74,7 +74,7 @@ impl<T: LambdaL> ScopedCell<T> {
}
/// Sets the value in `self` to `value` while running `f`.
pub fn set<'a, R>(&self, value: <T as ApplyL<'a>>::Out, f: impl FnOnce() -> R) -> R {
pub fn set<R>(&self, value: <T as ApplyL<'_>>::Out, f: impl FnOnce() -> R) -> R {
self.replace(value, |_| f())
}
}

View File

@ -25,6 +25,7 @@
#![feature(extern_types)]
#![feature(in_band_lifetimes)]
#![feature(optin_builtin_traits)]
#![feature(mem_take)]
#![feature(non_exhaustive)]
#![feature(specialization)]

View File

@ -30,7 +30,7 @@ struct LoopScope {
break_index: CFGIndex, // where to go on a `break`
}
pub fn construct<'tcx>(tcx: TyCtxt<'tcx>, body: &hir::Body) -> CFG {
pub fn construct(tcx: TyCtxt<'_>, body: &hir::Body) -> CFG {
let mut graph = graph::Graph::new();
let entry = graph.add_node(CFGNodeData::Entry);

View File

@ -49,7 +49,7 @@ pub type CFGNode = graph::Node<CFGNodeData>;
pub type CFGEdge = graph::Edge<CFGEdgeData>;
impl CFG {
pub fn new<'tcx>(tcx: TyCtxt<'tcx>, body: &hir::Body) -> CFG {
pub fn new(tcx: TyCtxt<'_>, body: &hir::Body) -> CFG {
construct::construct(tcx, body)
}

View File

@ -841,7 +841,7 @@ impl DepGraph {
//
// This method will only load queries that will end up in the disk cache.
// Other queries will not be executed.
pub fn exec_cache_promotions<'tcx>(&self, tcx: TyCtxt<'tcx>) {
pub fn exec_cache_promotions(&self, tcx: TyCtxt<'_>) {
let data = self.data.as_ref().unwrap();
for prev_index in data.colors.values.indices() {
match data.colors.get(prev_index) {

View File

@ -347,7 +347,7 @@ fn is_c_like_enum(item: &hir::Item) -> bool {
}
}
fn check_mod_attrs<'tcx>(tcx: TyCtxt<'tcx>, module_def_id: DefId) {
fn check_mod_attrs(tcx: TyCtxt<'_>, module_def_id: DefId) {
tcx.hir().visit_item_likes_in_module(
module_def_id,
&mut CheckAttrVisitor { tcx }.as_deep_visitor()

View File

@ -1280,8 +1280,8 @@ impl<'a> LoweringContext<'a> {
let was_in_loop_condition = self.is_in_loop_condition;
self.is_in_loop_condition = false;
let catch_scopes = mem::replace(&mut self.catch_scopes, Vec::new());
let loop_scopes = mem::replace(&mut self.loop_scopes, Vec::new());
let catch_scopes = mem::take(&mut self.catch_scopes);
let loop_scopes = mem::take(&mut self.loop_scopes);
let ret = f(self);
self.catch_scopes = catch_scopes;
self.loop_scopes = loop_scopes;

View File

@ -4,7 +4,7 @@ use crate::hir::itemlikevisit::ItemLikeVisitor;
use rustc_data_structures::fx::FxHashSet;
use rustc_data_structures::sync::{Lock, ParallelIterator, par_iter};
pub fn check_crate<'hir>(hir_map: &hir::map::Map<'hir>) {
pub fn check_crate(hir_map: &hir::map::Map<'_>) {
hir_map.dep_graph.assert_ignored();
let errors = Lock::new(Vec::new());

View File

@ -147,7 +147,7 @@ impl Forest {
}
}
pub fn krate<'hir>(&'hir self) -> &'hir Crate {
pub fn krate(&self) -> &Crate {
self.dep_graph.read(DepNode::new_no_params(DepKind::Krate));
&self.krate
}
@ -155,7 +155,7 @@ impl Forest {
/// This is used internally in the dependency tracking system.
/// Use the `krate` method to ensure your dependency on the
/// crate is tracked.
pub fn untracked_krate<'hir>(&'hir self) -> &'hir Crate {
pub fn untracked_krate(&self) -> &Crate {
&self.krate
}
}
@ -1085,7 +1085,7 @@ impl<'a> NodesMatchingSuffix<'a> {
// If `id` itself is a mod named `m` with parent `p`, then
// returns `Some(id, m, p)`. If `id` has no mod in its parent
// chain, then returns `None`.
fn find_first_mod_parent<'a>(map: &'a Map<'_>, mut id: HirId) -> Option<(HirId, Name)> {
fn find_first_mod_parent(map: &Map<'_>, mut id: HirId) -> Option<(HirId, Name)> {
loop {
if let Node::Item(item) = map.find(id)? {
if item_is_mod(&item) {

View File

@ -364,7 +364,7 @@ where
// been fully instantiated and hence the set of scopes we have
// doesn't matter -- just to be sure, put an empty vector
// in there.
let old_a_scopes = ::std::mem::replace(pair.vid_scopes(self), vec![]);
let old_a_scopes = ::std::mem::take(pair.vid_scopes(self));
// Relate the generalized kind to the original one.
let result = pair.relate_generalized_ty(self, generalized_ty);

View File

@ -112,7 +112,7 @@ impl<'cx, 'tcx> InferCtxt<'cx, 'tcx> {
/// Trait queries just want to pass back type obligations "as is"
pub fn take_registered_region_obligations(&self) -> Vec<(hir::HirId, RegionObligation<'tcx>)> {
::std::mem::replace(&mut *self.region_obligations.borrow_mut(), vec![])
::std::mem::take(&mut *self.region_obligations.borrow_mut())
}
/// Process the region obligations that must be proven (during

View File

@ -455,7 +455,7 @@ impl<'tcx> RegionConstraintCollector<'tcx> {
*any_unifications = false;
}
mem::replace(data, RegionConstraintData::default())
mem::take(data)
}
pub fn data(&self) -> &RegionConstraintData<'tcx> {

View File

@ -115,7 +115,7 @@ impl<'tcx> TypeVariableTable<'tcx> {
///
/// Note that this function does not return care whether
/// `vid` has been unified with something else or not.
pub fn var_diverges<'a>(&'a self, vid: ty::TyVid) -> bool {
pub fn var_diverges(&self, vid: ty::TyVid) -> bool {
self.values.get(vid.index as usize).diverging
}

View File

@ -67,6 +67,7 @@
#![feature(crate_visibility_modifier)]
#![feature(proc_macro_hygiene)]
#![feature(log_syntax)]
#![feature(mem_take)]
#![recursion_limit="512"]

View File

@ -765,7 +765,7 @@ pub fn maybe_lint_level_root(tcx: TyCtxt<'_>, id: hir::HirId) -> bool {
attrs.iter().any(|attr| Level::from_symbol(attr.name_or_empty()).is_some())
}
fn lint_levels<'tcx>(tcx: TyCtxt<'tcx>, cnum: CrateNum) -> &'tcx LintLevelMap {
fn lint_levels(tcx: TyCtxt<'_>, cnum: CrateNum) -> &LintLevelMap {
assert_eq!(cnum, LOCAL_CRATE);
let mut builder = LintLevelMapBuilder {
levels: LintLevelSets::builder(tcx.sess),

View File

@ -211,7 +211,7 @@ pub trait CrateStore {
fn crates_untracked(&self) -> Vec<CrateNum>;
// utility functions
fn encode_metadata<'tcx>(&self, tcx: TyCtxt<'tcx>) -> EncodedMetadata;
fn encode_metadata(&self, tcx: TyCtxt<'_>) -> EncodedMetadata;
fn metadata_encoding_version(&self) -> &[u8];
}

View File

@ -26,7 +26,7 @@ use syntax_pos;
// explored. For example, if it's a live Node::Item that is a
// function, then we should explore its block to check for codes that
// may need to be marked as live.
fn should_explore<'tcx>(tcx: TyCtxt<'tcx>, hir_id: hir::HirId) -> bool {
fn should_explore(tcx: TyCtxt<'_>, hir_id: hir::HirId) -> bool {
match tcx.hir().find(hir_id) {
Some(Node::Item(..)) |
Some(Node::ImplItem(..)) |
@ -662,7 +662,7 @@ impl Visitor<'tcx> for DeadVisitor<'tcx> {
}
}
pub fn check_crate<'tcx>(tcx: TyCtxt<'tcx>) {
pub fn check_crate(tcx: TyCtxt<'_>) {
let access_levels = &tcx.privacy_access_levels(LOCAL_CRATE);
let krate = tcx.hir().krate();
let live_symbols = find_live(tcx, access_levels, krate);

View File

@ -81,7 +81,7 @@ pub enum Linkage {
Dynamic,
}
pub fn calculate<'tcx>(tcx: TyCtxt<'tcx>) {
pub fn calculate(tcx: TyCtxt<'_>) {
let sess = &tcx.sess;
let fmts = sess.crate_types.borrow().iter().map(|&ty| {
let linkage = calculate_type(tcx, ty);
@ -92,7 +92,7 @@ pub fn calculate<'tcx>(tcx: TyCtxt<'tcx>) {
sess.dependency_formats.set(fmts);
}
fn calculate_type<'tcx>(tcx: TyCtxt<'tcx>, ty: config::CrateType) -> DependencyList {
fn calculate_type(tcx: TyCtxt<'_>, ty: config::CrateType) -> DependencyList {
let sess = &tcx.sess;
if !sess.opts.output_types.should_codegen() {
@ -267,7 +267,7 @@ fn add_library(
}
}
fn attempt_static<'tcx>(tcx: TyCtxt<'tcx>) -> Option<DependencyList> {
fn attempt_static(tcx: TyCtxt<'_>) -> Option<DependencyList> {
let sess = &tcx.sess;
let crates = cstore::used_crates(tcx, RequireStatic);
if !crates.iter().by_ref().all(|&(_, ref p)| p.is_some()) {
@ -324,7 +324,7 @@ fn activate_injected_dep(injected: Option<CrateNum>,
// After the linkage for a crate has been determined we need to verify that
// there's only going to be one allocator in the output.
fn verify_ok<'tcx>(tcx: TyCtxt<'tcx>, list: &[Linkage]) {
fn verify_ok(tcx: TyCtxt<'_>, list: &[Linkage]) {
let sess = &tcx.sess;
if list.len() == 0 {
return

View File

@ -10,7 +10,7 @@ use syntax_pos::{Span, sym};
use crate::hir::intravisit::{self, Visitor, NestedVisitorMap};
use crate::hir;
fn check_mod_intrinsics<'tcx>(tcx: TyCtxt<'tcx>, module_def_id: DefId) {
fn check_mod_intrinsics(tcx: TyCtxt<'_>, module_def_id: DefId) {
tcx.hir().visit_item_likes_in_module(
module_def_id,
&mut ItemVisitor { tcx }.as_deep_visitor()

View File

@ -142,7 +142,7 @@ impl Visitor<'tcx> for LibFeatureCollector<'tcx> {
}
}
pub fn collect<'tcx>(tcx: TyCtxt<'tcx>) -> LibFeatures {
pub fn collect(tcx: TyCtxt<'_>) -> LibFeatures {
let mut collector = LibFeatureCollector::new(tcx);
intravisit::walk_crate(&mut collector, tcx.hir().krate());
collector.lib_features

View File

@ -181,7 +181,7 @@ impl<'tcx> Visitor<'tcx> for IrMaps<'tcx> {
fn visit_arm(&mut self, a: &'tcx hir::Arm) { visit_arm(self, a); }
}
fn check_mod_liveness<'tcx>(tcx: TyCtxt<'tcx>, module_def_id: DefId) {
fn check_mod_liveness(tcx: TyCtxt<'_>, module_def_id: DefId) {
tcx.hir().visit_item_likes_in_module(
module_def_id,
&mut IrMaps::new(tcx, module_def_id).as_deep_visitor(),

View File

@ -42,8 +42,8 @@ fn item_might_be_inlined(tcx: TyCtxt<'tcx>, item: &hir::Item, attrs: CodegenFnAt
}
}
fn method_might_be_inlined<'tcx>(
tcx: TyCtxt<'tcx>,
fn method_might_be_inlined(
tcx: TyCtxt<'_>,
impl_item: &hir::ImplItem,
impl_src: DefId,
) -> bool {
@ -391,7 +391,7 @@ impl<'a, 'tcx> ItemLikeVisitor<'tcx> for CollectPrivateImplItemsVisitor<'a, 'tcx
#[derive(Clone, HashStable)]
pub struct ReachableSet(pub Lrc<HirIdSet>);
fn reachable_set<'tcx>(tcx: TyCtxt<'tcx>, crate_num: CrateNum) -> ReachableSet {
fn reachable_set(tcx: TyCtxt<'_>, crate_num: CrateNum) -> ReachableSet {
debug_assert!(crate_num == LOCAL_CRATE);
let access_levels = &tcx.privacy_access_levels(LOCAL_CRATE);

View File

@ -1375,7 +1375,7 @@ impl<'tcx> Visitor<'tcx> for RegionResolutionVisitor<'tcx> {
let outer_ec = mem::replace(&mut self.expr_and_pat_count, 0);
let outer_cx = self.cx;
let outer_ts = mem::replace(&mut self.terminating_scopes, FxHashSet::default());
let outer_ts = mem::take(&mut self.terminating_scopes);
self.terminating_scopes.insert(body.value.hir_id.local_id);
if let Some(root_id) = self.cx.root_id {
@ -1446,7 +1446,7 @@ impl<'tcx> Visitor<'tcx> for RegionResolutionVisitor<'tcx> {
}
}
fn region_scope_tree<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId) -> &'tcx ScopeTree {
fn region_scope_tree(tcx: TyCtxt<'_>, def_id: DefId) -> &ScopeTree {
let closure_base_def_id = tcx.closure_base_def_id(def_id);
if closure_base_def_id != def_id {
return tcx.region_scope_tree(closure_base_def_id);

View File

@ -19,7 +19,7 @@ use errors::{Applicability, DiagnosticBuilder};
use rustc_macros::HashStable;
use std::borrow::Cow;
use std::cell::Cell;
use std::mem::replace;
use std::mem::{replace, take};
use syntax::ast;
use syntax::attr;
use syntax::symbol::{kw, sym};
@ -368,7 +368,7 @@ pub fn provide(providers: &mut ty::query::Providers<'_>) {
/// entire crate. You should not read the result of this query
/// directly, but rather use `named_region_map`, `is_late_bound_map`,
/// etc.
fn resolve_lifetimes<'tcx>(tcx: TyCtxt<'tcx>, for_krate: CrateNum) -> &'tcx ResolveLifetimes {
fn resolve_lifetimes(tcx: TyCtxt<'_>, for_krate: CrateNum) -> &ResolveLifetimes {
assert_eq!(for_krate, LOCAL_CRATE);
let named_region_map = krate(tcx);
@ -395,7 +395,7 @@ fn resolve_lifetimes<'tcx>(tcx: TyCtxt<'tcx>, for_krate: CrateNum) -> &'tcx Reso
tcx.arena.alloc(rl)
}
fn krate<'tcx>(tcx: TyCtxt<'tcx>) -> NamedRegionMap {
fn krate(tcx: TyCtxt<'_>) -> NamedRegionMap {
let krate = tcx.hir().krate();
let mut map = NamedRegionMap {
defs: Default::default(),
@ -441,7 +441,7 @@ impl<'a, 'tcx> Visitor<'tcx> for LifetimeContext<'a, 'tcx> {
fn visit_nested_body(&mut self, body: hir::BodyId) {
// Each body has their own set of labels, save labels.
let saved = replace(&mut self.labels_in_fn, vec![]);
let saved = take(&mut self.labels_in_fn);
let body = self.tcx.hir().body(body);
extract_labels(self, body);
self.with(
@ -1405,9 +1405,8 @@ impl<'a, 'tcx> LifetimeContext<'a, 'tcx> {
lifetime_uses,
..
} = self;
let labels_in_fn = replace(&mut self.labels_in_fn, vec![]);
let xcrate_object_lifetime_defaults =
replace(&mut self.xcrate_object_lifetime_defaults, DefIdMap::default());
let labels_in_fn = take(&mut self.labels_in_fn);
let xcrate_object_lifetime_defaults = take(&mut self.xcrate_object_lifetime_defaults);
let mut this = LifetimeContext {
tcx: *tcx,
map: map,

View File

@ -466,7 +466,7 @@ impl<'tcx> Index<'tcx> {
/// Cross-references the feature names of unstable APIs with enabled
/// features and possibly prints errors.
fn check_mod_unstable_api_usage<'tcx>(tcx: TyCtxt<'tcx>, module_def_id: DefId) {
fn check_mod_unstable_api_usage(tcx: TyCtxt<'_>, module_def_id: DefId) {
tcx.hir().visit_item_likes_in_module(module_def_id, &mut Checker { tcx }.as_deep_visitor());
}
@ -836,7 +836,7 @@ impl<'tcx> TyCtxt<'tcx> {
/// Given the list of enabled features that were not language features (i.e., that
/// were expected to be library features), and the list of features used from
/// libraries, identify activated features that don't exist and error about them.
pub fn check_unused_or_stable_features<'tcx>(tcx: TyCtxt<'tcx>) {
pub fn check_unused_or_stable_features(tcx: TyCtxt<'_>) {
let access_levels = &tcx.privacy_access_levels(LOCAL_CRATE);
if tcx.stability().staged_api[&LOCAL_CRATE] {
@ -920,8 +920,8 @@ pub fn check_unused_or_stable_features<'tcx>(tcx: TyCtxt<'tcx>) {
// don't lint about unused features. We should reenable this one day!
}
fn unnecessary_stable_feature_lint<'tcx>(
tcx: TyCtxt<'tcx>,
fn unnecessary_stable_feature_lint(
tcx: TyCtxt<'_>,
span: Span,
feature: Symbol,
since: Symbol,

View File

@ -182,7 +182,7 @@ pub fn struct_error<'tcx>(tcx: TyCtxtAt<'tcx>, msg: &str) -> DiagnosticBuilder<'
/// up with a Rust-level backtrace of where the error occured.
/// Thsese should always be constructed by calling `.into()` on
/// a `InterpError`. In `librustc_mir::interpret`, we have the `err!`
/// macro for this
/// macro for this.
#[derive(Debug, Clone)]
pub struct InterpErrorInfo<'tcx> {
pub kind: InterpError<'tcx, u64>,

View File

@ -2867,19 +2867,19 @@ impl<'tcx> graph::WithStartNode for Body<'tcx> {
}
impl<'tcx> graph::WithPredecessors for Body<'tcx> {
fn predecessors<'graph>(
&'graph self,
fn predecessors(
&self,
node: Self::Node,
) -> <Self as GraphPredecessors<'graph>>::Iter {
) -> <Self as GraphPredecessors<'_>>::Iter {
self.predecessors_for(node).clone().into_iter()
}
}
impl<'tcx> graph::WithSuccessors for Body<'tcx> {
fn successors<'graph>(
&'graph self,
fn successors(
&self,
node: Self::Node,
) -> <Self as GraphSuccessors<'graph>>::Iter {
) -> <Self as GraphSuccessors<'_>>::Iter {
self.basic_blocks[node].terminator().successors().cloned()
}
}

View File

@ -269,11 +269,11 @@ impl OutputTypes {
self.0.contains_key(key)
}
pub fn keys<'a>(&'a self) -> BTreeMapKeysIter<'a, OutputType, Option<PathBuf>> {
pub fn keys(&self) -> BTreeMapKeysIter<'_, OutputType, Option<PathBuf>> {
self.0.keys()
}
pub fn values<'a>(&'a self) -> BTreeMapValuesIter<'a, OutputType, Option<PathBuf>> {
pub fn values(&self) -> BTreeMapValuesIter<'_, OutputType, Option<PathBuf>> {
self.0.values()
}
@ -316,7 +316,7 @@ impl Externs {
self.0.get(key)
}
pub fn iter<'a>(&'a self) -> BTreeMapIter<'a, String, ExternEntry> {
pub fn iter(&self) -> BTreeMapIter<'_, String, ExternEntry> {
self.0.iter()
}
}

View File

@ -215,66 +215,66 @@ impl Session {
*self.crate_disambiguator.get()
}
pub fn struct_span_warn<'a, S: Into<MultiSpan>>(
&'a self,
pub fn struct_span_warn<S: Into<MultiSpan>>(
&self,
sp: S,
msg: &str,
) -> DiagnosticBuilder<'a> {
) -> DiagnosticBuilder<'_> {
self.diagnostic().struct_span_warn(sp, msg)
}
pub fn struct_span_warn_with_code<'a, S: Into<MultiSpan>>(
&'a self,
pub fn struct_span_warn_with_code<S: Into<MultiSpan>>(
&self,
sp: S,
msg: &str,
code: DiagnosticId,
) -> DiagnosticBuilder<'a> {
) -> DiagnosticBuilder<'_> {
self.diagnostic().struct_span_warn_with_code(sp, msg, code)
}
pub fn struct_warn<'a>(&'a self, msg: &str) -> DiagnosticBuilder<'a> {
pub fn struct_warn(&self, msg: &str) -> DiagnosticBuilder<'_> {
self.diagnostic().struct_warn(msg)
}
pub fn struct_span_err<'a, S: Into<MultiSpan>>(
&'a self,
pub fn struct_span_err<S: Into<MultiSpan>>(
&self,
sp: S,
msg: &str,
) -> DiagnosticBuilder<'a> {
) -> DiagnosticBuilder<'_> {
self.diagnostic().struct_span_err(sp, msg)
}
pub fn struct_span_err_with_code<'a, S: Into<MultiSpan>>(
&'a self,
pub fn struct_span_err_with_code<S: Into<MultiSpan>>(
&self,
sp: S,
msg: &str,
code: DiagnosticId,
) -> DiagnosticBuilder<'a> {
) -> DiagnosticBuilder<'_> {
self.diagnostic().struct_span_err_with_code(sp, msg, code)
}
// FIXME: This method should be removed (every error should have an associated error code).
pub fn struct_err<'a>(&'a self, msg: &str) -> DiagnosticBuilder<'a> {
pub fn struct_err(&self, msg: &str) -> DiagnosticBuilder<'_> {
self.diagnostic().struct_err(msg)
}
pub fn struct_err_with_code<'a>(
&'a self,
pub fn struct_err_with_code(
&self,
msg: &str,
code: DiagnosticId,
) -> DiagnosticBuilder<'a> {
) -> DiagnosticBuilder<'_> {
self.diagnostic().struct_err_with_code(msg, code)
}
pub fn struct_span_fatal<'a, S: Into<MultiSpan>>(
&'a self,
pub fn struct_span_fatal<S: Into<MultiSpan>>(
&self,
sp: S,
msg: &str,
) -> DiagnosticBuilder<'a> {
) -> DiagnosticBuilder<'_> {
self.diagnostic().struct_span_fatal(sp, msg)
}
pub fn struct_span_fatal_with_code<'a, S: Into<MultiSpan>>(
&'a self,
pub fn struct_span_fatal_with_code<S: Into<MultiSpan>>(
&self,
sp: S,
msg: &str,
code: DiagnosticId,
) -> DiagnosticBuilder<'a> {
) -> DiagnosticBuilder<'_> {
self.diagnostic().struct_span_fatal_with_code(sp, msg, code)
}
pub fn struct_fatal<'a>(&'a self, msg: &str) -> DiagnosticBuilder<'a> {
pub fn struct_fatal(&self, msg: &str) -> DiagnosticBuilder<'_> {
self.diagnostic().struct_fatal(msg)
}
@ -416,7 +416,7 @@ impl Session {
pub fn next_node_id(&self) -> NodeId {
self.reserve_node_ids(1)
}
pub fn diagnostic<'a>(&'a self) -> &'a errors::Handler {
pub fn diagnostic(&self) -> &errors::Handler {
&self.parse_sess.span_diagnostic
}
@ -504,7 +504,7 @@ impl Session {
);
}
pub fn source_map<'a>(&'a self) -> &'a source_map::SourceMap {
pub fn source_map(&self) -> &source_map::SourceMap {
self.parse_sess.source_map()
}
pub fn verbose(&self) -> bool {

View File

@ -48,8 +48,8 @@ pub fn add_placeholder_note(err: &mut errors::DiagnosticBuilder<'_>) {
/// If there are types that satisfy both impls, invokes `on_overlap`
/// with a suitably-freshened `ImplHeader` with those types
/// substituted. Otherwise, invokes `no_overlap`.
pub fn overlapping_impls<'tcx, F1, F2, R>(
tcx: TyCtxt<'tcx>,
pub fn overlapping_impls<F1, F2, R>(
tcx: TyCtxt<'_>,
impl1_def_id: DefId,
impl2_def_id: DefId,
intercrate_mode: IntercrateMode,
@ -247,10 +247,10 @@ pub enum OrphanCheckErr<'tcx> {
///
/// 1. All type parameters in `Self` must be "covered" by some local type constructor.
/// 2. Some local type must appear in `Self`.
pub fn orphan_check<'tcx>(
tcx: TyCtxt<'tcx>,
pub fn orphan_check(
tcx: TyCtxt<'_>,
impl_def_id: DefId,
) -> Result<(), OrphanCheckErr<'tcx>> {
) -> Result<(), OrphanCheckErr<'_>> {
debug!("orphan_check({:?})", impl_def_id);
// We only except this routine to be invoked on implementations

View File

@ -247,7 +247,7 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> {
fn fuzzy_match_tys(&self, a: Ty<'tcx>, b: Ty<'tcx>) -> bool {
/// returns the fuzzy category of a given type, or None
/// if the type can be equated to any type.
fn type_category<'tcx>(t: Ty<'tcx>) -> Option<u32> {
fn type_category(t: Ty<'_>) -> Option<u32> {
match t.sty {
ty::Bool => Some(0),
ty::Char => Some(1),

View File

@ -702,6 +702,6 @@ impl<'tcx> TyCtxt<'tcx> {
}
}
pub(super) fn is_object_safe_provider<'tcx>(tcx: TyCtxt<'tcx>, trait_def_id: DefId) -> bool {
pub(super) fn is_object_safe_provider(tcx: TyCtxt<'_>, trait_def_id: DefId) -> bool {
tcx.object_safety_violations(trait_def_id).is_empty()
}

View File

@ -1509,8 +1509,8 @@ fn confirm_impl_candidate<'cx, 'tcx>(
///
/// Based on the "projection mode", this lookup may in fact only examine the
/// topmost impl. See the comments for `Reveal` for more details.
fn assoc_ty_def<'cx, 'tcx>(
selcx: &SelectionContext<'cx, 'tcx>,
fn assoc_ty_def(
selcx: &SelectionContext<'_, '_>,
impl_def_id: DefId,
assoc_ty_def_id: DefId,
) -> specialization_graph::NodeItem<ty::AssocItem> {

View File

@ -145,8 +145,8 @@ pub fn find_associated_item<'tcx>(
/// Specialization is determined by the sets of types to which the impls apply;
/// `impl1` specializes `impl2` if it applies to a subset of the types `impl2` applies
/// to.
pub(super) fn specializes<'tcx>(
tcx: TyCtxt<'tcx>,
pub(super) fn specializes(
tcx: TyCtxt<'_>,
(impl1_def_id, impl2_def_id): (DefId, DefId),
) -> bool {
debug!("specializes({:?}, {:?})", impl1_def_id, impl2_def_id);
@ -282,10 +282,10 @@ fn fulfill_implication<'a, 'tcx>(
}
// Query provider for `specialization_graph_of`.
pub(super) fn specialization_graph_provider<'tcx>(
tcx: TyCtxt<'tcx>,
pub(super) fn specialization_graph_provider(
tcx: TyCtxt<'_>,
trait_id: DefId,
) -> &'tcx specialization_graph::Graph {
) -> &specialization_graph::Graph {
let mut sg = specialization_graph::Graph::new();
let mut trait_impls = tcx.all_impls(trait_id);

View File

@ -417,7 +417,7 @@ pub struct SupertraitDefIds<'tcx> {
visited: FxHashSet<DefId>,
}
pub fn supertrait_def_ids<'tcx>(tcx: TyCtxt<'tcx>, trait_def_id: DefId) -> SupertraitDefIds<'tcx> {
pub fn supertrait_def_ids(tcx: TyCtxt<'_>, trait_def_id: DefId) -> SupertraitDefIds<'_> {
SupertraitDefIds {
tcx,
stack: vec![trait_def_id],

View File

@ -306,9 +306,9 @@ impl<'sess> OnDiskCache<'sess> {
}
/// Loads a diagnostic emitted during the previous compilation session.
pub fn load_diagnostics<'tcx>(
pub fn load_diagnostics(
&self,
tcx: TyCtxt<'tcx>,
tcx: TyCtxt<'_>,
dep_node_index: SerializedDepNodeIndex,
) -> Vec<Diagnostic> {
let diagnostics: Option<EncodedDiagnostics> = self.load_indexed(
@ -335,9 +335,9 @@ impl<'sess> OnDiskCache<'sess> {
/// Returns the cached query result if there is something in the cache for
/// the given `SerializedDepNodeIndex`; otherwise returns `None`.
pub fn try_load_query_result<'tcx, T>(
pub fn try_load_query_result<T>(
&self,
tcx: TyCtxt<'tcx>,
tcx: TyCtxt<'_>,
dep_node_index: SerializedDepNodeIndex,
) -> Option<T>
where

View File

@ -1166,7 +1166,7 @@ macro_rules! define_provider_struct {
/// then `force_from_dep_node()` should not fail for it. Otherwise, you can just
/// add it to the "We don't have enough information to reconstruct..." group in
/// the match below.
pub fn force_from_dep_node<'tcx>(tcx: TyCtxt<'tcx>, dep_node: &DepNode) -> bool {
pub fn force_from_dep_node(tcx: TyCtxt<'_>, dep_node: &DepNode) -> bool {
use crate::dep_graph::RecoverKey;
// We must avoid ever having to call force_from_dep_node() for a

View File

@ -354,7 +354,7 @@ impl<'a, 'tcx> CheckLoanCtxt<'a, 'tcx> {
cmt: &mc::cmt_<'tcx>,
loan_region: ty::Region<'tcx>,
borrow_span: Span) {
pub fn borrow_of_local_data<'tcx>(cmt: &mc::cmt_<'tcx>) -> bool {
pub fn borrow_of_local_data(cmt: &mc::cmt_<'_>) -> bool {
match cmt.cat {
// Borrows of static items is allowed
Categorization::StaticItem => false,

View File

@ -53,7 +53,7 @@ pub struct LoanDataFlowOperator;
pub type LoanDataFlow<'tcx> = DataFlowContext<'tcx, LoanDataFlowOperator>;
pub fn check_crate<'tcx>(tcx: TyCtxt<'tcx>) {
pub fn check_crate(tcx: TyCtxt<'_>) {
tcx.par_body_owners(|body_owner_def_id| {
tcx.ensure().borrowck(body_owner_def_id);
});
@ -73,7 +73,7 @@ pub struct AnalysisData<'tcx> {
pub move_data: move_data::FlowedMoveData<'tcx>,
}
fn borrowck<'tcx>(tcx: TyCtxt<'tcx>, owner_def_id: DefId) -> &'tcx BorrowCheckResult {
fn borrowck(tcx: TyCtxt<'_>, owner_def_id: DefId) -> &BorrowCheckResult {
assert!(tcx.use_ast_borrowck() || tcx.migrate_borrowck());
debug!("borrowck(body_owner_def_id={:?})", owner_def_id);

View File

@ -84,9 +84,9 @@ struct PropagationContext<'a, 'tcx, O> {
changed: bool,
}
fn get_cfg_indices<'a>(id: hir::ItemLocalId,
index: &'a FxHashMap<hir::ItemLocalId, Vec<CFGIndex>>)
-> &'a [CFGIndex] {
fn get_cfg_indices(id: hir::ItemLocalId,
index: &FxHashMap<hir::ItemLocalId, Vec<CFGIndex>>)
-> &[CFGIndex] {
index.get(&id).map_or(&[], |v| &v[..])
}

View File

@ -205,8 +205,8 @@ impl<'a> LlvmArchiveBuilder<'a> {
}
fn build_with_llvm(&mut self, kind: ArchiveKind) -> io::Result<()> {
let removals = mem::replace(&mut self.removals, Vec::new());
let mut additions = mem::replace(&mut self.additions, Vec::new());
let removals = mem::take(&mut self.removals);
let mut additions = mem::take(&mut self.additions);
let mut strings = Vec::new();
let mut members = Vec::new();

View File

@ -239,9 +239,9 @@ impl<'a> Drop for DiagnosticHandlers<'a> {
}
}
unsafe extern "C" fn report_inline_asm<'a, 'b>(cgcx: &'a CodegenContext<LlvmCodegenBackend>,
msg: &'b str,
cookie: c_uint) {
unsafe extern "C" fn report_inline_asm(cgcx: &CodegenContext<LlvmCodegenBackend>,
msg: &str,
cookie: c_uint) {
cgcx.diag_emitter.inline_asm_error(cookie as u32, msg.to_owned());
}

View File

@ -123,8 +123,8 @@ pub fn compile_codegen_unit(tcx: TyCtxt<'tcx>, cgu_name: InternedString) {
submit_codegened_module_to_llvm(&LlvmCodegenBackend(()), tcx, module, cost);
fn module_codegen<'tcx>(
tcx: TyCtxt<'tcx>,
fn module_codegen(
tcx: TyCtxt<'_>,
cgu_name: InternedString,
) -> ModuleCodegen<ModuleLlvm> {
let cgu = tcx.codegen_unit(cgu_name);

View File

@ -144,7 +144,7 @@ impl BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
}
}
fn build_sibling_block<'b>(&self, name: &'b str) -> Self {
fn build_sibling_block(&self, name: &str) -> Self {
Builder::new_block(self.cx, self.llfn(), name)
}

View File

@ -21,6 +21,7 @@
#![feature(link_args)]
#![feature(static_nobundle)]
#![feature(trusted_len)]
#![feature(mem_take)]
#![deny(rust_2018_idioms)]
#![deny(internal)]
#![deny(unused_lifetimes)]
@ -124,7 +125,7 @@ impl ExtraBackendMethods for LlvmCodegenBackend {
) {
unsafe { allocator::codegen(tcx, mods, kind) }
}
fn compile_codegen_unit<'tcx>(&self, tcx: TyCtxt<'tcx>, cgu_name: InternedString) {
fn compile_codegen_unit(&self, tcx: TyCtxt<'_>, cgu_name: InternedString) {
base::compile_codegen_unit(tcx, cgu_name);
}
fn target_machine_factory(

View File

@ -110,7 +110,7 @@ impl Command {
}
pub fn take_args(&mut self) -> Vec<OsString> {
mem::replace(&mut self.args, Vec::new())
mem::take(&mut self.args)
}
/// Returns a `true` if we're pretty sure that this'll blow OS spawn limits,

View File

@ -46,10 +46,10 @@ pub fn crates_export_threshold(crate_types: &[config::CrateType]) -> SymbolExpor
}
}
fn reachable_non_generics_provider<'tcx>(
tcx: TyCtxt<'tcx>,
fn reachable_non_generics_provider(
tcx: TyCtxt<'_>,
cnum: CrateNum,
) -> &'tcx DefIdMap<SymbolExportLevel> {
) -> &DefIdMap<SymbolExportLevel> {
assert_eq!(cnum, LOCAL_CRATE);
if !tcx.sess.opts.output_types.should_codegen() {
@ -157,7 +157,7 @@ fn reachable_non_generics_provider<'tcx>(
tcx.arena.alloc(reachable_non_generics)
}
fn is_reachable_non_generic_provider_local<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId) -> bool {
fn is_reachable_non_generic_provider_local(tcx: TyCtxt<'_>, def_id: DefId) -> bool {
let export_threshold = threshold(tcx);
if let Some(&level) = tcx.reachable_non_generics(def_id.krate).get(&def_id) {
@ -167,14 +167,14 @@ fn is_reachable_non_generic_provider_local<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefI
}
}
fn is_reachable_non_generic_provider_extern<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId) -> bool {
fn is_reachable_non_generic_provider_extern(tcx: TyCtxt<'_>, def_id: DefId) -> bool {
tcx.reachable_non_generics(def_id.krate).contains_key(&def_id)
}
fn exported_symbols_provider_local<'tcx>(
tcx: TyCtxt<'tcx>,
fn exported_symbols_provider_local(
tcx: TyCtxt<'_>,
cnum: CrateNum,
) -> Arc<Vec<(ExportedSymbol<'tcx>, SymbolExportLevel)>> {
) -> Arc<Vec<(ExportedSymbol<'_>, SymbolExportLevel)>> {
assert_eq!(cnum, LOCAL_CRATE);
if !tcx.sess.opts.output_types.should_codegen() {
@ -273,10 +273,10 @@ fn exported_symbols_provider_local<'tcx>(
Arc::new(symbols)
}
fn upstream_monomorphizations_provider<'tcx>(
tcx: TyCtxt<'tcx>,
fn upstream_monomorphizations_provider(
tcx: TyCtxt<'_>,
cnum: CrateNum,
) -> &'tcx DefIdMap<FxHashMap<SubstsRef<'tcx>, CrateNum>> {
) -> &DefIdMap<FxHashMap<SubstsRef<'_>, CrateNum>> {
debug_assert!(cnum == LOCAL_CRATE);
let cnums = tcx.all_crate_nums(LOCAL_CRATE);
@ -322,10 +322,10 @@ fn upstream_monomorphizations_provider<'tcx>(
tcx.arena.alloc(instances)
}
fn upstream_monomorphizations_for_provider<'tcx>(
tcx: TyCtxt<'tcx>,
fn upstream_monomorphizations_for_provider(
tcx: TyCtxt<'_>,
def_id: DefId,
) -> Option<&'tcx FxHashMap<SubstsRef<'tcx>, CrateNum>> {
) -> Option<&FxHashMap<SubstsRef<'_>, CrateNum>> {
debug_assert!(!def_id.is_local());
tcx.upstream_monomorphizations(LOCAL_CRATE).get(&def_id)
}

View File

@ -1345,12 +1345,9 @@ fn start_executing_work<B: ExtraBackendMethods>(
assert!(!started_lto);
started_lto = true;
let needs_fat_lto =
mem::replace(&mut needs_fat_lto, Vec::new());
let needs_thin_lto =
mem::replace(&mut needs_thin_lto, Vec::new());
let import_only_modules =
mem::replace(&mut lto_import_only_modules, Vec::new());
let needs_fat_lto = mem::take(&mut needs_fat_lto);
let needs_thin_lto = mem::take(&mut needs_thin_lto);
let import_only_modules = mem::take(&mut lto_import_only_modules);
for (work, cost) in generate_lto_work(&cgcx, needs_fat_lto,
needs_thin_lto, import_only_modules) {

View File

@ -700,7 +700,7 @@ impl<B: ExtraBackendMethods> Drop for AbortCodegenOnDrop<B> {
}
}
fn assert_and_save_dep_graph<'tcx>(tcx: TyCtxt<'tcx>) {
fn assert_and_save_dep_graph(tcx: TyCtxt<'_>) {
time(tcx.sess,
"assert dep graph",
|| ::rustc_incremental::assert_dep_graph(tcx));

View File

@ -10,7 +10,7 @@ pub enum FunctionDebugContext<D> {
}
impl<D> FunctionDebugContext<D> {
pub fn get_ref<'a>(&'a self, span: Span) -> &'a FunctionDebugContextData<D> {
pub fn get_ref(&self, span: Span) -> &FunctionDebugContextData<D> {
match *self {
FunctionDebugContext::RegularContext(ref data) => data,
FunctionDebugContext::DebugInfoDisabled => {

View File

@ -10,6 +10,7 @@
#![feature(in_band_lifetimes)]
#![feature(nll)]
#![feature(trusted_len)]
#![feature(mem_take)]
#![allow(unused_attributes)]
#![allow(dead_code)]
#![deny(rust_2018_idioms)]

View File

@ -273,7 +273,7 @@ impl CleanupKind {
}
}
pub fn cleanup_kinds<'tcx>(mir: &mir::Body<'tcx>) -> IndexVec<mir::BasicBlock, CleanupKind> {
pub fn cleanup_kinds(mir: &mir::Body<'_>) -> IndexVec<mir::BasicBlock, CleanupKind> {
fn discover_masters<'tcx>(result: &mut IndexVec<mir::BasicBlock, CleanupKind>,
mir: &mir::Body<'tcx>) {
for (bb, data) in mir.basic_blocks().iter_enumerated() {

View File

@ -44,7 +44,7 @@ pub trait ExtraBackendMethods: CodegenBackend + WriteBackendMethods + Sized + Se
mods: &mut Self::Module,
kind: AllocatorKind,
);
fn compile_codegen_unit<'tcx>(&self, tcx: TyCtxt<'tcx>, cgu_name: InternedString);
fn compile_codegen_unit(&self, tcx: TyCtxt<'_>, cgu_name: InternedString);
// If find_features is true this won't access `sess.crate_types` by assuming
// that `is_pie_binary` is false. When we discover LLVM target features
// `sess.crate_types` is uninitialized so we cannot access it.

View File

@ -36,7 +36,7 @@ pub trait BuilderMethods<'a, 'tcx>:
{
fn new_block<'b>(cx: &'a Self::CodegenCx, llfn: Self::Value, name: &'b str) -> Self;
fn with_cx(cx: &'a Self::CodegenCx) -> Self;
fn build_sibling_block<'b>(&self, name: &'b str) -> Self;
fn build_sibling_block(&self, name: &str) -> Self;
fn cx(&self) -> &Self::CodegenCx;
fn llbb(&self) -> Self::BasicBlock;

View File

@ -11,7 +11,7 @@ use syntax::symbol::{Symbol, sym};
const SYMBOL_NAME: Symbol = sym::rustc_symbol_name;
const DEF_PATH: Symbol = sym::rustc_def_path;
pub fn report_symbol_names<'tcx>(tcx: TyCtxt<'tcx>) {
pub fn report_symbol_names(tcx: TyCtxt<'_>) {
// if the `rustc_attrs` feature is not enabled, then the
// attributes we are interested in cannot be present anyway, so
// skip the walk.

View File

@ -168,7 +168,7 @@ impl<T: Idx> BitSet<T> {
/// Iterates over the indices of set bits in a sorted order.
#[inline]
pub fn iter<'a>(&'a self) -> BitIter<'a, T> {
pub fn iter(&self) -> BitIter<'_, T> {
BitIter {
cur: None,
iter: self.words.iter().enumerate(),
@ -849,7 +849,7 @@ impl<R: Idx, C: Idx> BitMatrix<R, C> {
/// Iterates through all the columns set to true in a given row of
/// the matrix.
pub fn iter<'a>(&'a self, row: R) -> BitIter<'a, C> {
pub fn iter(&self, row: R) -> BitIter<'_, C> {
assert!(row.index() < self.num_rows);
let (start, end) = self.range(row);
BitIter {

View File

@ -58,7 +58,7 @@ impl Fingerprint {
Ok(())
}
pub fn decode_opaque<'a>(decoder: &mut Decoder<'a>) -> Result<Fingerprint, String> {
pub fn decode_opaque(decoder: &mut Decoder<'_>) -> Result<Fingerprint, String> {
let mut bytes = [0; 16];
decoder.read_raw_bytes(&mut bytes)?;

View File

@ -247,11 +247,11 @@ impl<N: Debug, E: Debug> Graph<N, E> {
self.incoming_edges(target).sources()
}
pub fn depth_traverse<'a>(
&'a self,
pub fn depth_traverse(
&self,
start: NodeIndex,
direction: Direction,
) -> DepthFirstTraversal<'a, N, E> {
) -> DepthFirstTraversal<'_, N, E> {
DepthFirstTraversal::with_start_node(self, start, direction)
}

View File

@ -26,10 +26,10 @@ pub trait WithSuccessors: DirectedGraph
where
Self: for<'graph> GraphSuccessors<'graph, Item = <Self as DirectedGraph>::Node>,
{
fn successors<'graph>(
&'graph self,
fn successors(
&self,
node: Self::Node,
) -> <Self as GraphSuccessors<'graph>>::Iter;
) -> <Self as GraphSuccessors<'_>>::Iter;
fn depth_first_search(&self, from: Self::Node) -> iterate::DepthFirstSearch<'_, Self>
where
@ -48,10 +48,10 @@ pub trait WithPredecessors: DirectedGraph
where
Self: for<'graph> GraphPredecessors<'graph, Item = <Self as DirectedGraph>::Node>,
{
fn predecessors<'graph>(
&'graph self,
fn predecessors(
&self,
node: Self::Node,
) -> <Self as GraphPredecessors<'graph>>::Iter;
) -> <Self as GraphPredecessors<'_>>::Iter;
}
pub trait GraphPredecessors<'graph> {

View File

@ -17,15 +17,15 @@ impl<'graph, G: WithStartNode> WithStartNode for &'graph G {
}
impl<'graph, G: WithSuccessors> WithSuccessors for &'graph G {
fn successors<'iter>(&'iter self, node: Self::Node) -> <Self as GraphSuccessors<'iter>>::Iter {
fn successors(&self, node: Self::Node) -> <Self as GraphSuccessors<'_>>::Iter {
(**self).successors(node)
}
}
impl<'graph, G: WithPredecessors> WithPredecessors for &'graph G {
fn predecessors<'iter>(&'iter self,
node: Self::Node)
-> <Self as GraphPredecessors<'iter>>::Iter {
fn predecessors(&self,
node: Self::Node)
-> <Self as GraphPredecessors<'_>>::Iter {
(**self).predecessors(node)
}
}

View File

@ -51,15 +51,15 @@ impl WithNumNodes for TestGraph {
}
impl WithPredecessors for TestGraph {
fn predecessors<'graph>(&'graph self,
node: usize)
-> <Self as GraphPredecessors<'graph>>::Iter {
fn predecessors(&self,
node: usize)
-> <Self as GraphPredecessors<'_>>::Iter {
self.predecessors[&node].iter().cloned()
}
}
impl WithSuccessors for TestGraph {
fn successors<'graph>(&'graph self, node: usize) -> <Self as GraphSuccessors<'graph>>::Iter {
fn successors(&self, node: usize) -> <Self as GraphSuccessors<'_>>::Iter {
self.successors[&node].iter().cloned()
}
}

View File

@ -188,7 +188,7 @@ impl PpSourceMode {
_ => panic!("Should use call_with_pp_support_hir"),
}
}
fn call_with_pp_support_hir<'tcx, A, F>(&self, tcx: TyCtxt<'tcx>, f: F) -> A
fn call_with_pp_support_hir<A, F>(&self, tcx: TyCtxt<'_>, f: F) -> A
where
F: FnOnce(&dyn HirPrinterSupport<'_>, &hir::Crate) -> A,
{
@ -228,7 +228,7 @@ impl PpSourceMode {
trait PrinterSupport: pprust::PpAnn {
/// Provides a uniform interface for re-extracting a reference to a
/// `Session` from a value that now owns it.
fn sess<'a>(&'a self) -> &'a Session;
fn sess(&self) -> &Session;
/// Produces the pretty-print annotation object.
///
@ -240,7 +240,7 @@ trait PrinterSupport: pprust::PpAnn {
trait HirPrinterSupport<'hir>: pprust_hir::PpAnn {
/// Provides a uniform interface for re-extracting a reference to a
/// `Session` from a value that now owns it.
fn sess<'a>(&'a self) -> &'a Session;
fn sess(&self) -> &Session;
/// Provides a uniform interface for re-extracting a reference to an
/// `hir_map::Map` from a value that now owns it.
@ -272,7 +272,7 @@ struct NoAnn<'hir> {
}
impl<'hir> PrinterSupport for NoAnn<'hir> {
fn sess<'a>(&'a self) -> &'a Session {
fn sess(&self) -> &Session {
self.sess
}
@ -282,7 +282,7 @@ impl<'hir> PrinterSupport for NoAnn<'hir> {
}
impl<'hir> HirPrinterSupport<'hir> for NoAnn<'hir> {
fn sess<'a>(&'a self) -> &'a Session {
fn sess(&self) -> &Session {
self.sess
}
@ -313,7 +313,7 @@ struct IdentifiedAnnotation<'hir> {
}
impl<'hir> PrinterSupport for IdentifiedAnnotation<'hir> {
fn sess<'a>(&'a self) -> &'a Session {
fn sess(&self) -> &Session {
self.sess
}
@ -360,7 +360,7 @@ impl<'hir> pprust::PpAnn for IdentifiedAnnotation<'hir> {
}
impl<'hir> HirPrinterSupport<'hir> for IdentifiedAnnotation<'hir> {
fn sess<'a>(&'a self) -> &'a Session {
fn sess(&self) -> &Session {
self.sess
}
@ -458,7 +458,7 @@ struct TypedAnnotation<'a, 'tcx> {
}
impl<'b, 'tcx> HirPrinterSupport<'tcx> for TypedAnnotation<'b, 'tcx> {
fn sess<'a>(&'a self) -> &'a Session {
fn sess(&self) -> &Session {
&self.tcx.sess
}
@ -866,8 +866,8 @@ pub fn print_after_hir_lowering<'tcx>(
// analysis is performed. However, we want to call `phase_3_run_analysis_passes`
// with a different callback than the standard driver, so that isn't easy.
// Instead, we call that function ourselves.
fn print_with_analysis<'tcx>(
tcx: TyCtxt<'tcx>,
fn print_with_analysis(
tcx: TyCtxt<'_>,
ppm: PpMode,
uii: Option<UserIdentifiedItem>,
ofile: Option<&Path>,

View File

@ -1635,7 +1635,7 @@ impl Destination {
}
}
fn writable<'a>(&'a mut self) -> WritableDst<'a> {
fn writable(&mut self) -> WritableDst<'_> {
match *self {
Destination::Terminal(ref mut t) => WritableDst::Terminal(t),
Destination::Buffered(ref mut t) => {

View File

@ -438,14 +438,14 @@ impl Handler {
self.err_count.store(0, SeqCst);
}
pub fn struct_dummy<'a>(&'a self) -> DiagnosticBuilder<'a> {
pub fn struct_dummy(&self) -> DiagnosticBuilder<'_> {
DiagnosticBuilder::new(self, Level::Cancelled, "")
}
pub fn struct_span_warn<'a, S: Into<MultiSpan>>(&'a self,
sp: S,
msg: &str)
-> DiagnosticBuilder<'a> {
pub fn struct_span_warn<S: Into<MultiSpan>>(&self,
sp: S,
msg: &str)
-> DiagnosticBuilder<'_> {
let mut result = DiagnosticBuilder::new(self, Level::Warning, msg);
result.set_span(sp);
if !self.flags.can_emit_warnings {
@ -453,11 +453,11 @@ impl Handler {
}
result
}
pub fn struct_span_warn_with_code<'a, S: Into<MultiSpan>>(&'a self,
sp: S,
msg: &str,
code: DiagnosticId)
-> DiagnosticBuilder<'a> {
pub fn struct_span_warn_with_code<S: Into<MultiSpan>>(&self,
sp: S,
msg: &str,
code: DiagnosticId)
-> DiagnosticBuilder<'_> {
let mut result = DiagnosticBuilder::new(self, Level::Warning, msg);
result.set_span(sp);
result.code(code);
@ -466,63 +466,63 @@ impl Handler {
}
result
}
pub fn struct_warn<'a>(&'a self, msg: &str) -> DiagnosticBuilder<'a> {
pub fn struct_warn(&self, msg: &str) -> DiagnosticBuilder<'_> {
let mut result = DiagnosticBuilder::new(self, Level::Warning, msg);
if !self.flags.can_emit_warnings {
result.cancel();
}
result
}
pub fn struct_span_err<'a, S: Into<MultiSpan>>(&'a self,
sp: S,
msg: &str)
-> DiagnosticBuilder<'a> {
pub fn struct_span_err<S: Into<MultiSpan>>(&self,
sp: S,
msg: &str)
-> DiagnosticBuilder<'_> {
let mut result = DiagnosticBuilder::new(self, Level::Error, msg);
result.set_span(sp);
result
}
pub fn struct_span_err_with_code<'a, S: Into<MultiSpan>>(&'a self,
sp: S,
msg: &str,
code: DiagnosticId)
-> DiagnosticBuilder<'a> {
pub fn struct_span_err_with_code<S: Into<MultiSpan>>(&self,
sp: S,
msg: &str,
code: DiagnosticId)
-> DiagnosticBuilder<'_> {
let mut result = DiagnosticBuilder::new(self, Level::Error, msg);
result.set_span(sp);
result.code(code);
result
}
// FIXME: This method should be removed (every error should have an associated error code).
pub fn struct_err<'a>(&'a self, msg: &str) -> DiagnosticBuilder<'a> {
pub fn struct_err(&self, msg: &str) -> DiagnosticBuilder<'_> {
DiagnosticBuilder::new(self, Level::Error, msg)
}
pub fn struct_err_with_code<'a>(
&'a self,
pub fn struct_err_with_code(
&self,
msg: &str,
code: DiagnosticId,
) -> DiagnosticBuilder<'a> {
) -> DiagnosticBuilder<'_> {
let mut result = DiagnosticBuilder::new(self, Level::Error, msg);
result.code(code);
result
}
pub fn struct_span_fatal<'a, S: Into<MultiSpan>>(&'a self,
sp: S,
msg: &str)
-> DiagnosticBuilder<'a> {
pub fn struct_span_fatal<S: Into<MultiSpan>>(&self,
sp: S,
msg: &str)
-> DiagnosticBuilder<'_> {
let mut result = DiagnosticBuilder::new(self, Level::Fatal, msg);
result.set_span(sp);
result
}
pub fn struct_span_fatal_with_code<'a, S: Into<MultiSpan>>(&'a self,
sp: S,
msg: &str,
code: DiagnosticId)
-> DiagnosticBuilder<'a> {
pub fn struct_span_fatal_with_code<S: Into<MultiSpan>>(&self,
sp: S,
msg: &str,
code: DiagnosticId)
-> DiagnosticBuilder<'_> {
let mut result = DiagnosticBuilder::new(self, Level::Fatal, msg);
result.set_span(sp);
result.code(code);
result
}
pub fn struct_fatal<'a>(&'a self, msg: &str) -> DiagnosticBuilder<'a> {
pub fn struct_fatal(&self, msg: &str) -> DiagnosticBuilder<'_> {
DiagnosticBuilder::new(self, Level::Fatal, msg)
}
@ -563,10 +563,10 @@ impl Handler {
pub fn span_err<S: Into<MultiSpan>>(&self, sp: S, msg: &str) {
self.emit(&sp.into(), msg, Error);
}
pub fn mut_span_err<'a, S: Into<MultiSpan>>(&'a self,
sp: S,
msg: &str)
-> DiagnosticBuilder<'a> {
pub fn mut_span_err<S: Into<MultiSpan>>(&self,
sp: S,
msg: &str)
-> DiagnosticBuilder<'_> {
let mut result = DiagnosticBuilder::new(self, Level::Error, msg);
result.set_span(sp);
result
@ -605,10 +605,10 @@ impl Handler {
pub fn span_note_without_error<S: Into<MultiSpan>>(&self, sp: S, msg: &str) {
self.emit(&sp.into(), msg, Note);
}
pub fn span_note_diag<'a>(&'a self,
sp: Span,
msg: &str)
-> DiagnosticBuilder<'a> {
pub fn span_note_diag(&self,
sp: Span,
msg: &str)
-> DiagnosticBuilder<'_> {
let mut db = DiagnosticBuilder::new(self, Note, msg);
db.set_span(sp);
db

View File

@ -51,7 +51,7 @@ use std::io::Write;
use syntax::ast;
use syntax_pos::Span;
pub fn assert_dep_graph<'tcx>(tcx: TyCtxt<'tcx>) {
pub fn assert_dep_graph(tcx: TyCtxt<'_>) {
tcx.dep_graph.with_ignore(|| {
if tcx.sess.opts.debugging_opts.dump_dep_graph {
dump_graph(tcx);

View File

@ -35,7 +35,7 @@ const MODULE: Symbol = sym::module;
const CFG: Symbol = sym::cfg;
const KIND: Symbol = sym::kind;
pub fn assert_module_sources<'tcx>(tcx: TyCtxt<'tcx>) {
pub fn assert_module_sources(tcx: TyCtxt<'_>) {
tcx.dep_graph.with_ignore(|| {
if tcx.sess.opts.incremental.is_none() {
return;

View File

@ -206,7 +206,7 @@ impl Assertion {
}
}
pub fn check_dirty_clean_annotations<'tcx>(tcx: TyCtxt<'tcx>) {
pub fn check_dirty_clean_annotations(tcx: TyCtxt<'_>) {
// can't add `#[rustc_dirty]` etc without opting in to this feature
if !tcx.features().rustc_attrs {
return;

View File

@ -15,7 +15,7 @@ use super::fs::*;
use super::file_format;
use super::work_product;
pub fn dep_graph_tcx_init<'tcx>(tcx: TyCtxt<'tcx>) {
pub fn dep_graph_tcx_init(tcx: TyCtxt<'_>) {
if !tcx.dep_graph.is_fully_enabled() {
return
}
@ -192,7 +192,7 @@ pub fn load_dep_graph(sess: &Session) -> DepGraphFuture {
}))
}
pub fn load_query_result_cache<'sess>(sess: &'sess Session) -> OnDiskCache<'sess> {
pub fn load_query_result_cache(sess: &Session) -> OnDiskCache<'_> {
if sess.opts.incremental.is_none() ||
!sess.opts.debugging_opts.incremental_queries {
return OnDiskCache::new_empty(sess.source_map());

View File

@ -15,7 +15,7 @@ use super::dirty_clean;
use super::file_format;
use super::work_product;
pub fn save_dep_graph<'tcx>(tcx: TyCtxt<'tcx>) {
pub fn save_dep_graph(tcx: TyCtxt<'_>) {
debug!("save_dep_graph()");
tcx.dep_graph.with_ignore(|| {
let sess = tcx.sess;

View File

@ -878,7 +878,7 @@ pub fn create_global_ctxt(
/// Runs the resolution, type-checking, region checking and other
/// miscellaneous analysis passes on the crate.
fn analysis<'tcx>(tcx: TyCtxt<'tcx>, cnum: CrateNum) -> Result<()> {
fn analysis(tcx: TyCtxt<'_>, cnum: CrateNum) -> Result<()> {
assert_eq!(cnum, LOCAL_CRATE);
let sess = tcx.sess;
@ -995,8 +995,8 @@ fn analysis<'tcx>(tcx: TyCtxt<'tcx>, cnum: CrateNum) -> Result<()> {
Ok(())
}
fn encode_and_write_metadata<'tcx>(
tcx: TyCtxt<'tcx>,
fn encode_and_write_metadata(
tcx: TyCtxt<'_>,
outputs: &OutputFilenames,
) -> (middle::cstore::EncodedMetadata, bool) {
#[derive(PartialEq, Eq, PartialOrd, Ord)]

View File

@ -6,11 +6,11 @@ use rustc::ty::query::Providers;
use syntax::attr;
use syntax::symbol::sym;
pub fn find<'tcx>(tcx: TyCtxt<'tcx>) -> Option<DefId> {
pub fn find(tcx: TyCtxt<'_>) -> Option<DefId> {
tcx.proc_macro_decls_static(LOCAL_CRATE)
}
fn proc_macro_decls_static<'tcx>(tcx: TyCtxt<'tcx>, cnum: CrateNum) -> Option<DefId> {
fn proc_macro_decls_static(tcx: TyCtxt<'_>, cnum: CrateNum) -> Option<DefId> {
assert_eq!(cnum, LOCAL_CRATE);
let mut finder = Finder { decls: None };

View File

@ -74,7 +74,7 @@ pub fn provide(providers: &mut Providers<'_>) {
};
}
fn lint_mod<'tcx>(tcx: TyCtxt<'tcx>, module_def_id: DefId) {
fn lint_mod(tcx: TyCtxt<'_>, module_def_id: DefId) {
lint::late_lint_mod(tcx, module_def_id, BuiltinCombinedModuleLateLintPass::new());
}

View File

@ -250,7 +250,7 @@ provide! { <'tcx> tcx, def_id, other, cdata,
exported_symbols => { Arc::new(cdata.exported_symbols(tcx)) }
}
pub fn provide<'tcx>(providers: &mut Providers<'tcx>) {
pub fn provide(providers: &mut Providers<'_>) {
// FIXME(#44234) - almost all of these queries have no sub-queries and
// therefore no actual inputs, they're just reading tables calculated in
// resolve! Does this work? Unsure! That's what the issue is about
@ -550,7 +550,7 @@ impl CrateStore for cstore::CStore {
self.do_postorder_cnums_untracked()
}
fn encode_metadata<'tcx>(&self, tcx: TyCtxt<'tcx>) -> EncodedMetadata {
fn encode_metadata(&self, tcx: TyCtxt<'_>) -> EncodedMetadata {
encoder::encode_metadata(tcx)
}

View File

@ -1863,7 +1863,7 @@ impl<'tcx, 'v> ItemLikeVisitor<'v> for ImplVisitor<'tcx> {
// will allow us to slice the metadata to the precise length that we just
// generated regardless of trailing bytes that end up in it.
pub fn encode_metadata<'tcx>(tcx: TyCtxt<'tcx>) -> EncodedMetadata {
pub fn encode_metadata(tcx: TyCtxt<'_>) -> EncodedMetadata {
let mut encoder = opaque::Encoder::new(vec![]);
encoder.emit_raw_bytes(METADATA_HEADER);
@ -1905,7 +1905,7 @@ pub fn encode_metadata<'tcx>(tcx: TyCtxt<'tcx>) -> EncodedMetadata {
EncodedMetadata { raw_data: result }
}
pub fn get_repr_options<'tcx>(tcx: TyCtxt<'tcx>, did: DefId) -> ReprOptions {
pub fn get_repr_options(tcx: TyCtxt<'_>, did: DefId) -> ReprOptions {
let ty = tcx.type_of(did);
match ty.sty {
ty::Adt(ref def, _) => return def.repr,

View File

@ -3,7 +3,7 @@ use rustc::hir;
use rustc::middle::cstore::ForeignModule;
use rustc::ty::TyCtxt;
pub fn collect<'tcx>(tcx: TyCtxt<'tcx>) -> Vec<ForeignModule> {
pub fn collect(tcx: TyCtxt<'_>) -> Vec<ForeignModule> {
let mut collector = Collector {
tcx,
modules: Vec::new(),

View File

@ -4,7 +4,7 @@ use rustc::ty::TyCtxt;
use rustc_target::spec::abi::Abi;
use syntax::symbol::sym;
pub fn collect<'tcx>(tcx: TyCtxt<'tcx>) -> Vec<String> {
pub fn collect(tcx: TyCtxt<'_>) -> Vec<String> {
let mut collector = Collector {
args: Vec::new(),
};

View File

@ -11,7 +11,7 @@ use syntax::feature_gate::{self, GateIssue};
use syntax::symbol::{Symbol, sym};
use syntax::{span_err, struct_span_err};
pub fn collect<'tcx>(tcx: TyCtxt<'tcx>) -> Vec<NativeLibrary> {
pub fn collect(tcx: TyCtxt<'_>) -> Vec<NativeLibrary> {
let mut collector = Collector {
tcx,
libs: Vec::new(),

View File

@ -87,7 +87,7 @@ pub fn provide(providers: &mut Providers<'_>) {
};
}
fn mir_borrowck<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId) -> BorrowCheckResult<'tcx> {
fn mir_borrowck(tcx: TyCtxt<'_>, def_id: DefId) -> BorrowCheckResult<'_> {
let input_body = tcx.mir_validated(def_id);
debug!("run query mir_borrowck: {}", tcx.def_path_str(def_id));
@ -275,7 +275,7 @@ fn do_mir_borrowck<'a, 'tcx>(
mbcx.analyze_results(&mut state); // entry point for DataflowResultsConsumer
// Convert any reservation warnings into lints.
let reservation_warnings = mem::replace(&mut mbcx.reservation_warnings, Default::default());
let reservation_warnings = mem::take(&mut mbcx.reservation_warnings);
for (_, (place, span, location, bk, borrow)) in reservation_warnings {
let mut initial_diag =
mbcx.report_conflicting_borrow(location, (&place, span), bk, &borrow);

View File

@ -234,10 +234,10 @@ impl<'s, D: ConstraintGraphDirecton> graph::WithNumNodes for RegionGraph<'s, D>
}
impl<'s, D: ConstraintGraphDirecton> graph::WithSuccessors for RegionGraph<'s, D> {
fn successors<'graph>(
&'graph self,
fn successors(
&self,
node: Self::Node,
) -> <Self as graph::GraphSuccessors<'graph>>::Iter {
) -> <Self as graph::GraphSuccessors<'_>>::Iter {
self.outgoing_regions(node)
}
}

Some files were not shown because too many files have changed in this diff Show More