Auto merge of #44678 - alexcrichton:rollup, r=alexcrichton
Rollup of 11 pull requests - Successful merges: #44364, #44466, #44537, #44548, #44640, #44651, #44657, #44661, #44668, #44671, #44675 - Failed merges:
This commit is contained in:
commit
0701b37d97
@ -1,14 +1,13 @@
|
||||
# `splice`
|
||||
|
||||
The tracking issue for this feature is: [#32310]
|
||||
The tracking issue for this feature is: [#44643]
|
||||
|
||||
[#32310]: https://github.com/rust-lang/rust/issues/32310
|
||||
[#44643]: https://github.com/rust-lang/rust/issues/44643
|
||||
|
||||
------------------------
|
||||
|
||||
The `splice()` method on `Vec` and `String` allows you to replace a range
|
||||
of values in a vector or string with another range of values, and returns
|
||||
the replaced values.
|
||||
The `splice()` method on `String` allows you to replace a range
|
||||
of values in a string with another range of values.
|
||||
|
||||
A simple example:
|
||||
|
||||
@ -20,4 +19,4 @@ let beta_offset = s.find('β').unwrap_or(s.len());
|
||||
// Replace the range up until the β from the string
|
||||
s.splice(..beta_offset, "Α is capital alpha; ");
|
||||
assert_eq!(s, "Α is capital alpha; β is beta");
|
||||
```
|
||||
```
|
||||
|
@ -1451,7 +1451,7 @@ impl String {
|
||||
/// s.splice(..beta_offset, "Α is capital alpha; ");
|
||||
/// assert_eq!(s, "Α is capital alpha; β is beta");
|
||||
/// ```
|
||||
#[unstable(feature = "splice", reason = "recently added", issue = "32310")]
|
||||
#[unstable(feature = "splice", reason = "recently added", issue = "44643")]
|
||||
pub fn splice<R>(&mut self, range: R, replace_with: &str)
|
||||
where R: RangeArgument<usize>
|
||||
{
|
||||
|
@ -1943,7 +1943,6 @@ impl<T> Vec<T> {
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// #![feature(splice)]
|
||||
/// let mut v = vec![1, 2, 3];
|
||||
/// let new = [7, 8];
|
||||
/// let u: Vec<_> = v.splice(..2, new.iter().cloned()).collect();
|
||||
@ -1951,7 +1950,7 @@ impl<T> Vec<T> {
|
||||
/// assert_eq!(u, &[1, 2]);
|
||||
/// ```
|
||||
#[inline]
|
||||
#[unstable(feature = "splice", reason = "recently added", issue = "32310")]
|
||||
#[stable(feature = "vec_splice", since = "1.22.0")]
|
||||
pub fn splice<R, I>(&mut self, range: R, replace_with: I) -> Splice<I::IntoIter>
|
||||
where R: RangeArgument<usize>, I: IntoIterator<Item=T>
|
||||
{
|
||||
@ -2554,13 +2553,13 @@ impl<'a, T> InPlace<T> for PlaceBack<'a, T> {
|
||||
/// [`splice()`]: struct.Vec.html#method.splice
|
||||
/// [`Vec`]: struct.Vec.html
|
||||
#[derive(Debug)]
|
||||
#[unstable(feature = "splice", reason = "recently added", issue = "32310")]
|
||||
#[stable(feature = "vec_splice", since = "1.22.0")]
|
||||
pub struct Splice<'a, I: Iterator + 'a> {
|
||||
drain: Drain<'a, I::Item>,
|
||||
replace_with: I,
|
||||
}
|
||||
|
||||
#[unstable(feature = "splice", reason = "recently added", issue = "32310")]
|
||||
#[stable(feature = "vec_splice", since = "1.22.0")]
|
||||
impl<'a, I: Iterator> Iterator for Splice<'a, I> {
|
||||
type Item = I::Item;
|
||||
|
||||
@ -2573,18 +2572,18 @@ impl<'a, I: Iterator> Iterator for Splice<'a, I> {
|
||||
}
|
||||
}
|
||||
|
||||
#[unstable(feature = "splice", reason = "recently added", issue = "32310")]
|
||||
#[stable(feature = "vec_splice", since = "1.22.0")]
|
||||
impl<'a, I: Iterator> DoubleEndedIterator for Splice<'a, I> {
|
||||
fn next_back(&mut self) -> Option<Self::Item> {
|
||||
self.drain.next_back()
|
||||
}
|
||||
}
|
||||
|
||||
#[unstable(feature = "splice", reason = "recently added", issue = "32310")]
|
||||
#[stable(feature = "vec_splice", since = "1.22.0")]
|
||||
impl<'a, I: Iterator> ExactSizeIterator for Splice<'a, I> {}
|
||||
|
||||
|
||||
#[unstable(feature = "splice", reason = "recently added", issue = "32310")]
|
||||
#[stable(feature = "vec_splice", since = "1.22.0")]
|
||||
impl<'a, I: Iterator> Drop for Splice<'a, I> {
|
||||
fn drop(&mut self) {
|
||||
// exhaust drain first
|
||||
|
@ -1381,38 +1381,7 @@ extern "rust-intrinsic" {
|
||||
}
|
||||
|
||||
#[cfg(stage0)]
|
||||
/// Computes the byte offset that needs to be applied to `ptr` in order to
|
||||
/// make it aligned to `align`.
|
||||
/// If it is not possible to align `ptr`, the implementation returns
|
||||
/// `usize::max_value()`.
|
||||
///
|
||||
/// There are no guarantees whatsover that offsetting the pointer will not
|
||||
/// overflow or go beyond the allocation that `ptr` points into.
|
||||
/// It is up to the caller to ensure that the returned offset is correct
|
||||
/// in all terms other than alignment.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// Accessing adjacent `u8` as `u16`
|
||||
///
|
||||
/// ```
|
||||
/// # #![feature(core_intrinsics)]
|
||||
/// # fn foo(n: usize) {
|
||||
/// # use std::intrinsics::align_offset;
|
||||
/// # use std::mem::align_of;
|
||||
/// # unsafe {
|
||||
/// let x = [5u8, 6u8, 7u8, 8u8, 9u8];
|
||||
/// let ptr = &x[n] as *const u8;
|
||||
/// let offset = align_offset(ptr as *const (), align_of::<u16>());
|
||||
/// if offset < x.len() - n - 1 {
|
||||
/// let u16_ptr = ptr.offset(offset as isize) as *const u16;
|
||||
/// assert_ne!(*u16_ptr, 500);
|
||||
/// } else {
|
||||
/// // while the pointer can be aligned via `offset`, it would point
|
||||
/// // outside the allocation
|
||||
/// }
|
||||
/// # } }
|
||||
/// ```
|
||||
/// remove me after the next release
|
||||
pub unsafe fn align_offset(ptr: *const (), align: usize) -> usize {
|
||||
let offset = ptr as usize % align;
|
||||
if offset == 0 {
|
||||
|
@ -196,6 +196,23 @@ pub trait FromIterator<A>: Sized {
|
||||
/// assert_eq!(i as i32, n);
|
||||
/// }
|
||||
/// ```
|
||||
///
|
||||
/// It is common to use `IntoIterator` as a trait bound. This allows
|
||||
/// the input collection type to change, so long as it is still an
|
||||
/// iterator. Additional bounds can be specified by restricting on
|
||||
/// `Item`:
|
||||
///
|
||||
/// ```rust
|
||||
/// fn collect_as_strings<T>(collection: T) -> Vec<String>
|
||||
/// where T: IntoIterator,
|
||||
/// T::Item : std::fmt::Debug,
|
||||
/// {
|
||||
/// collection
|
||||
/// .into_iter()
|
||||
/// .map(|item| format!("{:?}", item))
|
||||
/// .collect()
|
||||
/// }
|
||||
/// ```
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
pub trait IntoIterator {
|
||||
/// The type of the elements being iterated over.
|
||||
|
@ -1064,7 +1064,43 @@ impl<T: ?Sized> *const T {
|
||||
copy_nonoverlapping(self, dest, count)
|
||||
}
|
||||
|
||||
|
||||
/// Computes the byte offset that needs to be applied in order to
|
||||
/// make the pointer aligned to `align`.
|
||||
/// If it is not possible to align the pointer, the implementation returns
|
||||
/// `usize::max_value()`.
|
||||
///
|
||||
/// There are no guarantees whatsover that offsetting the pointer will not
|
||||
/// overflow or go beyond the allocation that the pointer points into.
|
||||
/// It is up to the caller to ensure that the returned offset is correct
|
||||
/// in all terms other than alignment.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// Accessing adjacent `u8` as `u16`
|
||||
///
|
||||
/// ```
|
||||
/// # #![feature(align_offset)]
|
||||
/// # fn foo(n: usize) {
|
||||
/// # use std::mem::align_of;
|
||||
/// # unsafe {
|
||||
/// let x = [5u8, 6u8, 7u8, 8u8, 9u8];
|
||||
/// let ptr = &x[n] as *const u8;
|
||||
/// let offset = ptr.align_offset(align_of::<u16>());
|
||||
/// if offset < x.len() - n - 1 {
|
||||
/// let u16_ptr = ptr.offset(offset as isize) as *const u16;
|
||||
/// assert_ne!(*u16_ptr, 500);
|
||||
/// } else {
|
||||
/// // while the pointer can be aligned via `offset`, it would point
|
||||
/// // outside the allocation
|
||||
/// }
|
||||
/// # } }
|
||||
/// ```
|
||||
#[unstable(feature = "align_offset", issue = "44488")]
|
||||
pub fn align_offset(self, align: usize) -> usize {
|
||||
unsafe {
|
||||
intrinsics::align_offset(self as *const _, align)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[lang = "mut_ptr"]
|
||||
@ -1284,6 +1320,43 @@ impl<T: ?Sized> *mut T {
|
||||
}
|
||||
}
|
||||
|
||||
/// Computes the byte offset that needs to be applied in order to
|
||||
/// make the pointer aligned to `align`.
|
||||
/// If it is not possible to align the pointer, the implementation returns
|
||||
/// `usize::max_value()`.
|
||||
///
|
||||
/// There are no guarantees whatsover that offsetting the pointer will not
|
||||
/// overflow or go beyond the allocation that the pointer points into.
|
||||
/// It is up to the caller to ensure that the returned offset is correct
|
||||
/// in all terms other than alignment.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// Accessing adjacent `u8` as `u16`
|
||||
///
|
||||
/// ```
|
||||
/// # #![feature(align_offset)]
|
||||
/// # fn foo(n: usize) {
|
||||
/// # use std::mem::align_of;
|
||||
/// # unsafe {
|
||||
/// let x = [5u8, 6u8, 7u8, 8u8, 9u8];
|
||||
/// let ptr = &x[n] as *const u8;
|
||||
/// let offset = ptr.align_offset(align_of::<u16>());
|
||||
/// if offset < x.len() - n - 1 {
|
||||
/// let u16_ptr = ptr.offset(offset as isize) as *const u16;
|
||||
/// assert_ne!(*u16_ptr, 500);
|
||||
/// } else {
|
||||
/// // while the pointer can be aligned via `offset`, it would point
|
||||
/// // outside the allocation
|
||||
/// }
|
||||
/// # } }
|
||||
/// ```
|
||||
#[unstable(feature = "align_offset", issue = "44488")]
|
||||
pub fn align_offset(self, align: usize) -> usize {
|
||||
unsafe {
|
||||
intrinsics::align_offset(self as *const _, align)
|
||||
}
|
||||
}
|
||||
|
||||
/// Calculates the offset from a pointer (convenience for `.offset(count as isize)`).
|
||||
///
|
||||
|
@ -23,7 +23,6 @@ use fmt;
|
||||
use iter::{Map, Cloned, FusedIterator};
|
||||
use slice::{self, SliceIndex};
|
||||
use mem;
|
||||
use intrinsics::align_offset;
|
||||
|
||||
pub mod pattern;
|
||||
|
||||
@ -404,7 +403,7 @@ unsafe fn from_raw_parts_mut<'a>(p: *mut u8, len: usize) -> &'a mut str {
|
||||
#[inline]
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
pub unsafe fn from_utf8_unchecked(v: &[u8]) -> &str {
|
||||
mem::transmute(v)
|
||||
&*(v as *const [u8] as *const str)
|
||||
}
|
||||
|
||||
/// Converts a slice of bytes to a string slice without checking
|
||||
@ -429,7 +428,7 @@ pub unsafe fn from_utf8_unchecked(v: &[u8]) -> &str {
|
||||
#[inline]
|
||||
#[stable(feature = "str_mut_extras", since = "1.20.0")]
|
||||
pub unsafe fn from_utf8_unchecked_mut(v: &mut [u8]) -> &mut str {
|
||||
mem::transmute(v)
|
||||
&mut *(v as *mut [u8] as *mut str)
|
||||
}
|
||||
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
@ -1515,7 +1514,7 @@ fn run_utf8_validation(v: &[u8]) -> Result<(), Utf8Error> {
|
||||
let ptr = v.as_ptr();
|
||||
let align = unsafe {
|
||||
// the offset is safe, because `index` is guaranteed inbounds
|
||||
align_offset(ptr.offset(index as isize) as *const (), usize_bytes)
|
||||
ptr.offset(index as isize).align_offset(usize_bytes)
|
||||
};
|
||||
if align == 0 {
|
||||
while index < blocks_end {
|
||||
@ -2447,12 +2446,12 @@ impl StrExt for str {
|
||||
|
||||
#[inline]
|
||||
fn as_bytes(&self) -> &[u8] {
|
||||
unsafe { mem::transmute(self) }
|
||||
unsafe { &*(self as *const str as *const [u8]) }
|
||||
}
|
||||
|
||||
#[inline]
|
||||
unsafe fn as_bytes_mut(&mut self) -> &mut [u8] {
|
||||
mem::transmute(self)
|
||||
&mut *(self as *mut str as *mut [u8])
|
||||
}
|
||||
|
||||
fn find<'a, P: Pattern<'a>>(&'a self, pat: P) -> Option<usize> {
|
||||
|
@ -603,12 +603,12 @@ trait DepNodeParams<'a, 'gcx: 'tcx + 'a, 'tcx: 'a> : fmt::Debug {
|
||||
}
|
||||
|
||||
impl<'a, 'gcx: 'tcx + 'a, 'tcx: 'a, T> DepNodeParams<'a, 'gcx, 'tcx> for T
|
||||
where T: HashStable<StableHashingContext<'a, 'gcx, 'tcx>> + fmt::Debug
|
||||
where T: HashStable<StableHashingContext<'gcx>> + fmt::Debug
|
||||
{
|
||||
default const CAN_RECONSTRUCT_QUERY_KEY: bool = false;
|
||||
|
||||
default fn to_fingerprint(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Fingerprint {
|
||||
let mut hcx = StableHashingContext::new(tcx);
|
||||
let mut hcx = tcx.create_stable_hashing_context();
|
||||
let mut hasher = StableHasher::new();
|
||||
|
||||
self.hash_stable(&mut hcx, &mut hasher);
|
||||
@ -633,6 +633,18 @@ impl<'a, 'gcx: 'tcx + 'a, 'tcx: 'a> DepNodeParams<'a, 'gcx, 'tcx> for (DefId,) {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'gcx: 'tcx + 'a, 'tcx: 'a> DepNodeParams<'a, 'gcx, 'tcx> for (DefIndex,) {
|
||||
const CAN_RECONSTRUCT_QUERY_KEY: bool = true;
|
||||
|
||||
fn to_fingerprint(&self, tcx: TyCtxt) -> Fingerprint {
|
||||
tcx.hir.definitions().def_path_hash(self.0).0
|
||||
}
|
||||
|
||||
fn to_debug_str(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> String {
|
||||
tcx.item_path_str(DefId::local(self.0))
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'gcx: 'tcx + 'a, 'tcx: 'a> DepNodeParams<'a, 'gcx, 'tcx> for (DefId, DefId) {
|
||||
const CAN_RECONSTRUCT_QUERY_KEY: bool = false;
|
||||
|
||||
|
@ -9,11 +9,15 @@
|
||||
// except according to those terms.
|
||||
|
||||
use rustc_data_structures::fx::FxHashMap;
|
||||
use rustc_data_structures::stable_hasher::{HashStable, StableHasher,
|
||||
StableHashingContextProvider};
|
||||
use session::config::OutputType;
|
||||
use std::cell::{Ref, RefCell};
|
||||
use std::rc::Rc;
|
||||
use util::common::{ProfileQueriesMsg, profq_msg};
|
||||
|
||||
use ich::Fingerprint;
|
||||
|
||||
use super::dep_node::{DepNode, DepKind, WorkProductId};
|
||||
use super::query::DepGraphQuery;
|
||||
use super::raii;
|
||||
@ -71,10 +75,6 @@ impl DepGraph {
|
||||
self.data.as_ref().map(|data| raii::IgnoreTask::new(&data.edges))
|
||||
}
|
||||
|
||||
pub fn in_task<'graph>(&'graph self, key: DepNode) -> Option<raii::DepTask<'graph>> {
|
||||
self.data.as_ref().map(|data| raii::DepTask::new(&data.edges, key))
|
||||
}
|
||||
|
||||
pub fn with_ignore<OP,R>(&self, op: OP) -> R
|
||||
where OP: FnOnce() -> R
|
||||
{
|
||||
@ -109,24 +109,38 @@ impl DepGraph {
|
||||
/// `arg` parameter.
|
||||
///
|
||||
/// [README]: README.md
|
||||
pub fn with_task<C, A, R>(&self,
|
||||
key: DepNode,
|
||||
cx: C,
|
||||
arg: A,
|
||||
task: fn(C, A) -> R)
|
||||
-> (R, DepNodeIndex)
|
||||
where C: DepGraphSafe
|
||||
pub fn with_task<C, A, R, HCX>(&self,
|
||||
key: DepNode,
|
||||
cx: C,
|
||||
arg: A,
|
||||
task: fn(C, A) -> R)
|
||||
-> (R, DepNodeIndex)
|
||||
where C: DepGraphSafe + StableHashingContextProvider<ContextType=HCX>,
|
||||
R: HashStable<HCX>,
|
||||
{
|
||||
if let Some(ref data) = self.data {
|
||||
data.edges.borrow_mut().push_task(key);
|
||||
if cfg!(debug_assertions) {
|
||||
profq_msg(ProfileQueriesMsg::TaskBegin(key.clone()))
|
||||
};
|
||||
|
||||
// In incremental mode, hash the result of the task. We don't
|
||||
// do anything with the hash yet, but we are computing it
|
||||
// anyway so that
|
||||
// - we make sure that the infrastructure works and
|
||||
// - we can get an idea of the runtime cost.
|
||||
let mut hcx = cx.create_stable_hashing_context();
|
||||
|
||||
let result = task(cx, arg);
|
||||
if cfg!(debug_assertions) {
|
||||
profq_msg(ProfileQueriesMsg::TaskEnd)
|
||||
};
|
||||
let dep_node_index = data.edges.borrow_mut().pop_task(key);
|
||||
|
||||
let mut stable_hasher = StableHasher::new();
|
||||
result.hash_stable(&mut hcx, &mut stable_hasher);
|
||||
let _: Fingerprint = stable_hasher.finish();
|
||||
|
||||
(result, dep_node_index)
|
||||
} else {
|
||||
(task(cx, arg), DepNodeIndex::INVALID)
|
||||
|
@ -58,6 +58,13 @@ impl<'a, A> DepGraphSafe for &'a A
|
||||
{
|
||||
}
|
||||
|
||||
/// Mut ref to dep-graph-safe stuff should still be dep-graph-safe.
|
||||
impl<'a, A> DepGraphSafe for &'a mut A
|
||||
where A: DepGraphSafe,
|
||||
{
|
||||
}
|
||||
|
||||
|
||||
/// No data here! :)
|
||||
impl DepGraphSafe for () {
|
||||
}
|
||||
|
@ -247,7 +247,7 @@ pub struct Map<'hir> {
|
||||
/// plain old integers.
|
||||
map: Vec<MapEntry<'hir>>,
|
||||
|
||||
definitions: Definitions,
|
||||
definitions: &'hir Definitions,
|
||||
|
||||
/// Bodies inlined from other crates are cached here.
|
||||
inlined_bodies: RefCell<DefIdMap<&'hir Body>>,
|
||||
@ -304,8 +304,8 @@ impl<'hir> Map<'hir> {
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn definitions(&self) -> &Definitions {
|
||||
&self.definitions
|
||||
pub fn definitions(&self) -> &'hir Definitions {
|
||||
self.definitions
|
||||
}
|
||||
|
||||
pub fn def_key(&self, def_id: DefId) -> DefKey {
|
||||
@ -1013,7 +1013,7 @@ impl Named for TraitItem { fn name(&self) -> Name { self.name } }
|
||||
impl Named for ImplItem { fn name(&self) -> Name { self.name } }
|
||||
|
||||
pub fn map_crate<'hir>(forest: &'hir mut Forest,
|
||||
definitions: Definitions)
|
||||
definitions: &'hir Definitions)
|
||||
-> Map<'hir> {
|
||||
let map = {
|
||||
let mut collector = NodeCollector::root(&forest.krate,
|
||||
|
@ -11,7 +11,6 @@
|
||||
use std::rc::Rc;
|
||||
use syntax::codemap::CodeMap;
|
||||
use syntax_pos::{BytePos, FileMap};
|
||||
use ty::TyCtxt;
|
||||
|
||||
#[derive(Clone)]
|
||||
struct CacheEntry {
|
||||
@ -23,15 +22,14 @@ struct CacheEntry {
|
||||
file_index: usize,
|
||||
}
|
||||
|
||||
pub struct CachingCodemapView<'tcx> {
|
||||
codemap: &'tcx CodeMap,
|
||||
pub struct CachingCodemapView<'cm> {
|
||||
codemap: &'cm CodeMap,
|
||||
line_cache: [CacheEntry; 3],
|
||||
time_stamp: usize,
|
||||
}
|
||||
|
||||
impl<'gcx> CachingCodemapView<'gcx> {
|
||||
pub fn new<'a, 'tcx>(tcx: TyCtxt<'a, 'gcx, 'tcx>) -> CachingCodemapView<'gcx> {
|
||||
let codemap = tcx.sess.codemap();
|
||||
impl<'cm> CachingCodemapView<'cm> {
|
||||
pub fn new(codemap: &'cm CodeMap) -> CachingCodemapView<'cm> {
|
||||
let files = codemap.files();
|
||||
let first_file = files[0].clone();
|
||||
let entry = CacheEntry {
|
||||
|
@ -9,72 +9,116 @@
|
||||
// except according to those terms.
|
||||
|
||||
use hir;
|
||||
use hir::def_id::DefId;
|
||||
use hir::def_id::{DefId, DefIndex};
|
||||
use hir::map::DefPathHash;
|
||||
use hir::map::definitions::Definitions;
|
||||
use ich::{self, CachingCodemapView};
|
||||
use middle::cstore::CrateStore;
|
||||
use session::config::DebugInfoLevel::NoDebugInfo;
|
||||
use ty::TyCtxt;
|
||||
use util::nodemap::{NodeMap, ItemLocalMap};
|
||||
use ty::{TyCtxt, fast_reject};
|
||||
use session::Session;
|
||||
|
||||
use std::cmp::Ord;
|
||||
use std::hash as std_hash;
|
||||
use std::collections::{HashMap, HashSet, BTreeMap};
|
||||
use std::cell::RefCell;
|
||||
use std::collections::HashMap;
|
||||
|
||||
use syntax::ast;
|
||||
use syntax::attr;
|
||||
use syntax::codemap::CodeMap;
|
||||
use syntax::ext::hygiene::SyntaxContext;
|
||||
use syntax::symbol::Symbol;
|
||||
use syntax_pos::Span;
|
||||
|
||||
use rustc_data_structures::stable_hasher::{HashStable, StableHasher,
|
||||
StableHasherResult};
|
||||
use rustc_data_structures::stable_hasher::{HashStable, StableHashingContextProvider,
|
||||
StableHasher, StableHasherResult,
|
||||
ToStableHashKey};
|
||||
use rustc_data_structures::accumulate_vec::AccumulateVec;
|
||||
use rustc_data_structures::fx::FxHashSet;
|
||||
|
||||
thread_local!(static IGNORED_ATTR_NAMES: RefCell<FxHashSet<Symbol>> =
|
||||
RefCell::new(FxHashSet()));
|
||||
|
||||
/// This is the context state available during incr. comp. hashing. It contains
|
||||
/// enough information to transform DefIds and HirIds into stable DefPaths (i.e.
|
||||
/// a reference to the TyCtxt) and it holds a few caches for speeding up various
|
||||
/// things (e.g. each DefId/DefPath is only hashed once).
|
||||
pub struct StableHashingContext<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
|
||||
tcx: TyCtxt<'a, 'gcx, 'tcx>,
|
||||
codemap: CachingCodemapView<'gcx>,
|
||||
pub struct StableHashingContext<'gcx> {
|
||||
sess: &'gcx Session,
|
||||
definitions: &'gcx Definitions,
|
||||
cstore: &'gcx CrateStore,
|
||||
body_resolver: BodyResolver<'gcx>,
|
||||
hash_spans: bool,
|
||||
hash_bodies: bool,
|
||||
overflow_checks_enabled: bool,
|
||||
node_id_hashing_mode: NodeIdHashingMode,
|
||||
// A sorted array of symbol keys for fast lookup.
|
||||
ignored_attr_names: Vec<Symbol>,
|
||||
|
||||
// Very often, we are hashing something that does not need the
|
||||
// CachingCodemapView, so we initialize it lazily.
|
||||
raw_codemap: &'gcx CodeMap,
|
||||
caching_codemap: Option<CachingCodemapView<'gcx>>,
|
||||
}
|
||||
|
||||
#[derive(PartialEq, Eq, Clone, Copy)]
|
||||
pub enum NodeIdHashingMode {
|
||||
Ignore,
|
||||
HashDefPath,
|
||||
HashTraitsInScope,
|
||||
}
|
||||
|
||||
impl<'a, 'gcx, 'tcx> StableHashingContext<'a, 'gcx, 'tcx> {
|
||||
/// The BodyResolver allows to map a BodyId to the corresponding hir::Body.
|
||||
/// We could also just store a plain reference to the hir::Crate but we want
|
||||
/// to avoid that the crate is used to get untracked access to all of the HIR.
|
||||
#[derive(Clone, Copy)]
|
||||
struct BodyResolver<'gcx>(&'gcx hir::Crate);
|
||||
|
||||
pub fn new(tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Self {
|
||||
let hash_spans_initial = tcx.sess.opts.debuginfo != NoDebugInfo;
|
||||
let check_overflow_initial = tcx.sess.overflow_checks();
|
||||
impl<'gcx> BodyResolver<'gcx> {
|
||||
// Return a reference to the hir::Body with the given BodyId.
|
||||
// DOES NOT DO ANY TRACKING, use carefully.
|
||||
fn body(self, id: hir::BodyId) -> &'gcx hir::Body {
|
||||
self.0.body(id)
|
||||
}
|
||||
}
|
||||
|
||||
let mut ignored_attr_names: Vec<_> = ich::IGNORED_ATTRIBUTES
|
||||
.iter()
|
||||
.map(|&s| Symbol::intern(s))
|
||||
.collect();
|
||||
impl<'gcx> StableHashingContext<'gcx> {
|
||||
// The `krate` here is only used for mapping BodyIds to Bodies.
|
||||
// Don't use it for anything else or you'll run the risk of
|
||||
// leaking data out of the tracking system.
|
||||
pub fn new(sess: &'gcx Session,
|
||||
krate: &'gcx hir::Crate,
|
||||
definitions: &'gcx Definitions,
|
||||
cstore: &'gcx CrateStore)
|
||||
-> Self {
|
||||
let hash_spans_initial = sess.opts.debuginfo != NoDebugInfo;
|
||||
let check_overflow_initial = sess.overflow_checks();
|
||||
|
||||
ignored_attr_names.sort();
|
||||
debug_assert!(ich::IGNORED_ATTRIBUTES.len() > 0);
|
||||
IGNORED_ATTR_NAMES.with(|names| {
|
||||
let mut names = names.borrow_mut();
|
||||
if names.is_empty() {
|
||||
names.extend(ich::IGNORED_ATTRIBUTES.iter()
|
||||
.map(|&s| Symbol::intern(s)));
|
||||
}
|
||||
});
|
||||
|
||||
StableHashingContext {
|
||||
tcx,
|
||||
codemap: CachingCodemapView::new(tcx),
|
||||
sess,
|
||||
body_resolver: BodyResolver(krate),
|
||||
definitions,
|
||||
cstore,
|
||||
caching_codemap: None,
|
||||
raw_codemap: sess.codemap(),
|
||||
hash_spans: hash_spans_initial,
|
||||
hash_bodies: true,
|
||||
overflow_checks_enabled: check_overflow_initial,
|
||||
node_id_hashing_mode: NodeIdHashingMode::HashDefPath,
|
||||
ignored_attr_names,
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn sess(&self) -> &'gcx Session {
|
||||
self.sess
|
||||
}
|
||||
|
||||
pub fn force_span_hashing(mut self) -> Self {
|
||||
self.hash_spans = true;
|
||||
self
|
||||
@ -111,13 +155,17 @@ impl<'a, 'gcx, 'tcx> StableHashingContext<'a, 'gcx, 'tcx> {
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn tcx(&self) -> TyCtxt<'a, 'gcx, 'tcx> {
|
||||
self.tcx
|
||||
pub fn def_path_hash(&self, def_id: DefId) -> DefPathHash {
|
||||
if def_id.is_local() {
|
||||
self.definitions.def_path_hash(def_id.index)
|
||||
} else {
|
||||
self.cstore.def_path_hash(def_id)
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn def_path_hash(&mut self, def_id: DefId) -> DefPathHash {
|
||||
self.tcx.def_path_hash(def_id)
|
||||
pub fn local_def_path_hash(&self, def_index: DefIndex) -> DefPathHash {
|
||||
self.definitions.def_path_hash(def_index)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
@ -132,12 +180,22 @@ impl<'a, 'gcx, 'tcx> StableHashingContext<'a, 'gcx, 'tcx> {
|
||||
|
||||
#[inline]
|
||||
pub fn codemap(&mut self) -> &mut CachingCodemapView<'gcx> {
|
||||
&mut self.codemap
|
||||
match self.caching_codemap {
|
||||
Some(ref mut cm) => {
|
||||
cm
|
||||
}
|
||||
ref mut none => {
|
||||
*none = Some(CachingCodemapView::new(self.raw_codemap));
|
||||
none.as_mut().unwrap()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn is_ignored_attr(&self, name: Symbol) -> bool {
|
||||
self.ignored_attr_names.binary_search(&name).is_ok()
|
||||
IGNORED_ATTR_NAMES.with(|names| {
|
||||
names.borrow().contains(&name)
|
||||
})
|
||||
}
|
||||
|
||||
pub fn hash_hir_item_like<F: FnOnce(&mut Self)>(&mut self,
|
||||
@ -194,44 +252,84 @@ impl<'a, 'gcx, 'tcx> StableHashingContext<'a, 'gcx, 'tcx> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'gcx, 'lcx> StableHashingContextProvider for TyCtxt<'a, 'gcx, 'lcx> {
|
||||
type ContextType = StableHashingContext<'gcx>;
|
||||
fn create_stable_hashing_context(&self) -> Self::ContextType {
|
||||
(*self).create_stable_hashing_context()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for ast::NodeId {
|
||||
impl<'gcx> HashStable<StableHashingContext<'gcx>> for hir::BodyId {
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
|
||||
hcx: &mut StableHashingContext<'gcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
if hcx.hash_bodies() {
|
||||
hcx.body_resolver.body(*self).hash_stable(hcx, hasher);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'gcx> HashStable<StableHashingContext<'gcx>> for hir::HirId {
|
||||
#[inline]
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'gcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
let hir_id = hcx.tcx.hir.node_to_hir_id(*self);
|
||||
match hcx.node_id_hashing_mode {
|
||||
NodeIdHashingMode::Ignore => {
|
||||
// Most NodeIds in the HIR can be ignored, but if there is a
|
||||
// corresponding entry in the `trait_map` we need to hash that.
|
||||
// Make sure we don't ignore too much by checking that there is
|
||||
// no entry in a debug_assert!().
|
||||
debug_assert!(hcx.tcx.in_scope_traits(hir_id).is_none());
|
||||
// Don't do anything.
|
||||
}
|
||||
NodeIdHashingMode::HashDefPath => {
|
||||
hir_id.hash_stable(hcx, hasher);
|
||||
}
|
||||
NodeIdHashingMode::HashTraitsInScope => {
|
||||
if let Some(traits) = hcx.tcx.in_scope_traits(hir_id) {
|
||||
// The ordering of the candidates is not fixed. So we hash
|
||||
// the def-ids and then sort them and hash the collection.
|
||||
let mut candidates: AccumulateVec<[_; 8]> =
|
||||
traits.iter()
|
||||
.map(|&hir::TraitCandidate { def_id, import_id: _ }| {
|
||||
hcx.def_path_hash(def_id)
|
||||
})
|
||||
.collect();
|
||||
if traits.len() > 1 {
|
||||
candidates.sort();
|
||||
}
|
||||
candidates.hash_stable(hcx, hasher);
|
||||
}
|
||||
let hir::HirId {
|
||||
owner,
|
||||
local_id,
|
||||
} = *self;
|
||||
|
||||
hcx.local_def_path_hash(owner).hash_stable(hcx, hasher);
|
||||
local_id.hash_stable(hcx, hasher);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for Span {
|
||||
impl<'gcx> ToStableHashKey<StableHashingContext<'gcx>> for hir::HirId {
|
||||
type KeyType = (DefPathHash, hir::ItemLocalId);
|
||||
|
||||
#[inline]
|
||||
fn to_stable_hash_key(&self,
|
||||
hcx: &StableHashingContext<'gcx>)
|
||||
-> (DefPathHash, hir::ItemLocalId) {
|
||||
let def_path_hash = hcx.local_def_path_hash(self.owner);
|
||||
(def_path_hash, self.local_id)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'gcx> HashStable<StableHashingContext<'gcx>> for ast::NodeId {
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'gcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
match hcx.node_id_hashing_mode {
|
||||
NodeIdHashingMode::Ignore => {
|
||||
// Don't do anything.
|
||||
}
|
||||
NodeIdHashingMode::HashDefPath => {
|
||||
hcx.definitions.node_to_hir_id(*self).hash_stable(hcx, hasher);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'gcx> ToStableHashKey<StableHashingContext<'gcx>> for ast::NodeId {
|
||||
type KeyType = (DefPathHash, hir::ItemLocalId);
|
||||
|
||||
#[inline]
|
||||
fn to_stable_hash_key(&self,
|
||||
hcx: &StableHashingContext<'gcx>)
|
||||
-> (DefPathHash, hir::ItemLocalId) {
|
||||
hcx.definitions.node_to_hir_id(*self).to_stable_hash_key(hcx)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'gcx> HashStable<StableHashingContext<'gcx>> for Span {
|
||||
|
||||
// Hash a span in a stable way. We can't directly hash the span's BytePos
|
||||
// fields (that would be similar to hashing pointers, since those are just
|
||||
@ -243,7 +341,7 @@ impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for Span {
|
||||
// Also, hashing filenames is expensive so we avoid doing it twice when the
|
||||
// span starts and ends in the same file, which is almost always the case.
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
|
||||
hcx: &mut StableHashingContext<'gcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
use syntax_pos::Pos;
|
||||
|
||||
@ -306,90 +404,47 @@ impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for Span {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn hash_stable_hashmap<'a, 'gcx, 'tcx, K, V, R, SK, F, W>(
|
||||
hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
|
||||
pub fn hash_stable_trait_impls<'gcx, W, R>(
|
||||
hcx: &mut StableHashingContext<'gcx>,
|
||||
hasher: &mut StableHasher<W>,
|
||||
map: &HashMap<K, V, R>,
|
||||
extract_stable_key: F)
|
||||
where K: Eq + std_hash::Hash,
|
||||
V: HashStable<StableHashingContext<'a, 'gcx, 'tcx>>,
|
||||
blanket_impls: &Vec<DefId>,
|
||||
non_blanket_impls: &HashMap<fast_reject::SimplifiedType, Vec<DefId>, R>)
|
||||
where W: StableHasherResult,
|
||||
R: std_hash::BuildHasher,
|
||||
SK: HashStable<StableHashingContext<'a, 'gcx, 'tcx>> + Ord + Clone,
|
||||
F: Fn(&mut StableHashingContext<'a, 'gcx, 'tcx>, &K) -> SK,
|
||||
W: StableHasherResult,
|
||||
{
|
||||
let mut keys: Vec<_> = map.keys()
|
||||
.map(|k| (extract_stable_key(hcx, k), k))
|
||||
.collect();
|
||||
keys.sort_unstable_by_key(|&(ref stable_key, _)| stable_key.clone());
|
||||
keys.len().hash_stable(hcx, hasher);
|
||||
for (stable_key, key) in keys {
|
||||
stable_key.hash_stable(hcx, hasher);
|
||||
map[key].hash_stable(hcx, hasher);
|
||||
{
|
||||
let mut blanket_impls: AccumulateVec<[_; 8]> = blanket_impls
|
||||
.iter()
|
||||
.map(|&def_id| hcx.def_path_hash(def_id))
|
||||
.collect();
|
||||
|
||||
if blanket_impls.len() > 1 {
|
||||
blanket_impls.sort_unstable();
|
||||
}
|
||||
|
||||
blanket_impls.hash_stable(hcx, hasher);
|
||||
}
|
||||
|
||||
{
|
||||
let mut keys: AccumulateVec<[_; 8]> =
|
||||
non_blanket_impls.keys()
|
||||
.map(|k| (k, k.map_def(|d| hcx.def_path_hash(d))))
|
||||
.collect();
|
||||
keys.sort_unstable_by(|&(_, ref k1), &(_, ref k2)| k1.cmp(k2));
|
||||
keys.len().hash_stable(hcx, hasher);
|
||||
for (key, ref stable_key) in keys {
|
||||
stable_key.hash_stable(hcx, hasher);
|
||||
let mut impls : AccumulateVec<[_; 8]> = non_blanket_impls[key]
|
||||
.iter()
|
||||
.map(|&impl_id| hcx.def_path_hash(impl_id))
|
||||
.collect();
|
||||
|
||||
if impls.len() > 1 {
|
||||
impls.sort_unstable();
|
||||
}
|
||||
|
||||
impls.hash_stable(hcx, hasher);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn hash_stable_hashset<'a, 'tcx, 'gcx, K, R, SK, F, W>(
|
||||
hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
|
||||
hasher: &mut StableHasher<W>,
|
||||
set: &HashSet<K, R>,
|
||||
extract_stable_key: F)
|
||||
where K: Eq + std_hash::Hash,
|
||||
R: std_hash::BuildHasher,
|
||||
SK: HashStable<StableHashingContext<'a, 'gcx, 'tcx>> + Ord + Clone,
|
||||
F: Fn(&mut StableHashingContext<'a, 'gcx, 'tcx>, &K) -> SK,
|
||||
W: StableHasherResult,
|
||||
{
|
||||
let mut keys: Vec<_> = set.iter()
|
||||
.map(|k| extract_stable_key(hcx, k))
|
||||
.collect();
|
||||
keys.sort_unstable();
|
||||
keys.hash_stable(hcx, hasher);
|
||||
}
|
||||
|
||||
pub fn hash_stable_nodemap<'a, 'tcx, 'gcx, V, W>(
|
||||
hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
|
||||
hasher: &mut StableHasher<W>,
|
||||
map: &NodeMap<V>)
|
||||
where V: HashStable<StableHashingContext<'a, 'gcx, 'tcx>>,
|
||||
W: StableHasherResult,
|
||||
{
|
||||
hash_stable_hashmap(hcx, hasher, map, |hcx, node_id| {
|
||||
hcx.tcx.hir.definitions().node_to_hir_id(*node_id).local_id
|
||||
});
|
||||
}
|
||||
|
||||
pub fn hash_stable_itemlocalmap<'a, 'tcx, 'gcx, V, W>(
|
||||
hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
|
||||
hasher: &mut StableHasher<W>,
|
||||
map: &ItemLocalMap<V>)
|
||||
where V: HashStable<StableHashingContext<'a, 'gcx, 'tcx>>,
|
||||
W: StableHasherResult,
|
||||
{
|
||||
hash_stable_hashmap(hcx, hasher, map, |_, local_id| {
|
||||
*local_id
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
pub fn hash_stable_btreemap<'a, 'tcx, 'gcx, K, V, SK, F, W>(
|
||||
hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
|
||||
hasher: &mut StableHasher<W>,
|
||||
map: &BTreeMap<K, V>,
|
||||
extract_stable_key: F)
|
||||
where K: Eq + Ord,
|
||||
V: HashStable<StableHashingContext<'a, 'gcx, 'tcx>>,
|
||||
SK: HashStable<StableHashingContext<'a, 'gcx, 'tcx>> + Ord + Clone,
|
||||
F: Fn(&mut StableHashingContext<'a, 'gcx, 'tcx>, &K) -> SK,
|
||||
W: StableHasherResult,
|
||||
{
|
||||
let mut keys: Vec<_> = map.keys()
|
||||
.map(|k| (extract_stable_key(hcx, k), k))
|
||||
.collect();
|
||||
keys.sort_unstable_by_key(|&(ref stable_key, _)| stable_key.clone());
|
||||
keys.len().hash_stable(hcx, hasher);
|
||||
for (stable_key, key) in keys {
|
||||
stable_key.hash_stable(hcx, hasher);
|
||||
map[key].hash_stable(hcx, hasher);
|
||||
}
|
||||
}
|
||||
|
@ -11,6 +11,8 @@
|
||||
//! This module contains `HashStable` implementations for various data types
|
||||
//! from rustc::middle::cstore in no particular order.
|
||||
|
||||
use rustc_data_structures::stable_hasher::{HashStable, StableHasher, StableHasherResult};
|
||||
|
||||
use middle;
|
||||
|
||||
impl_stable_hash_for!(enum middle::cstore::DepKind {
|
||||
@ -38,3 +40,42 @@ impl_stable_hash_for!(enum middle::cstore::LinkagePreference {
|
||||
RequireDynamic,
|
||||
RequireStatic
|
||||
});
|
||||
|
||||
impl_stable_hash_for!(struct middle::cstore::ExternCrate {
|
||||
def_id,
|
||||
span,
|
||||
direct,
|
||||
path_len
|
||||
});
|
||||
|
||||
impl_stable_hash_for!(struct middle::cstore::CrateSource {
|
||||
dylib,
|
||||
rlib,
|
||||
rmeta
|
||||
});
|
||||
|
||||
impl<HCX> HashStable<HCX> for middle::cstore::ExternBodyNestedBodies {
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut HCX,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
let middle::cstore::ExternBodyNestedBodies {
|
||||
nested_bodies: _,
|
||||
fingerprint,
|
||||
} = *self;
|
||||
|
||||
fingerprint.hash_stable(hcx, hasher);
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, HCX> HashStable<HCX> for middle::cstore::ExternConstBody<'a> {
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut HCX,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
let middle::cstore::ExternConstBody {
|
||||
body: _,
|
||||
fingerprint,
|
||||
} = *self;
|
||||
|
||||
fingerprint.hash_stable(hcx, hasher);
|
||||
}
|
||||
}
|
||||
|
@ -12,45 +12,36 @@
|
||||
//! types in no particular order.
|
||||
|
||||
use hir;
|
||||
use hir::map::DefPathHash;
|
||||
use hir::def_id::{DefId, CrateNum, CRATE_DEF_INDEX};
|
||||
use ich::{StableHashingContext, NodeIdHashingMode};
|
||||
use rustc_data_structures::stable_hasher::{HashStable, ToStableHashKey,
|
||||
StableHasher, StableHasherResult};
|
||||
use std::mem;
|
||||
|
||||
use syntax::ast;
|
||||
|
||||
use rustc_data_structures::stable_hasher::{HashStable, StableHasher,
|
||||
StableHasherResult};
|
||||
|
||||
impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for DefId {
|
||||
impl<'gcx> HashStable<StableHashingContext<'gcx>> for DefId {
|
||||
#[inline]
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
|
||||
hcx: &mut StableHashingContext<'gcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
hcx.def_path_hash(*self).hash_stable(hcx, hasher);
|
||||
}
|
||||
}
|
||||
|
||||
impl<'gcx> ToStableHashKey<StableHashingContext<'gcx>> for DefId {
|
||||
type KeyType = DefPathHash;
|
||||
|
||||
impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for hir::HirId {
|
||||
#[inline]
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
let hir::HirId {
|
||||
owner,
|
||||
local_id,
|
||||
} = *self;
|
||||
|
||||
hcx.def_path_hash(DefId::local(owner)).hash_stable(hcx, hasher);
|
||||
local_id.hash_stable(hcx, hasher);
|
||||
fn to_stable_hash_key(&self, hcx: &StableHashingContext<'gcx>) -> DefPathHash {
|
||||
hcx.def_path_hash(*self)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for CrateNum {
|
||||
impl<'gcx> HashStable<StableHashingContext<'gcx>> for CrateNum {
|
||||
#[inline]
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
|
||||
hcx: &mut StableHashingContext<'gcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
hcx.def_path_hash(DefId {
|
||||
krate: *self,
|
||||
@ -59,8 +50,30 @@ impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for CrateN
|
||||
}
|
||||
}
|
||||
|
||||
impl<'gcx> ToStableHashKey<StableHashingContext<'gcx>> for CrateNum {
|
||||
type KeyType = DefPathHash;
|
||||
|
||||
#[inline]
|
||||
fn to_stable_hash_key(&self, hcx: &StableHashingContext<'gcx>) -> DefPathHash {
|
||||
let def_id = DefId { krate: *self, index: CRATE_DEF_INDEX };
|
||||
def_id.to_stable_hash_key(hcx)
|
||||
}
|
||||
}
|
||||
|
||||
impl_stable_hash_for!(tuple_struct hir::ItemLocalId { index });
|
||||
|
||||
impl<'gcx> ToStableHashKey<StableHashingContext<'gcx>>
|
||||
for hir::ItemLocalId {
|
||||
type KeyType = hir::ItemLocalId;
|
||||
|
||||
#[inline]
|
||||
fn to_stable_hash_key(&self,
|
||||
_: &StableHashingContext<'gcx>)
|
||||
-> hir::ItemLocalId {
|
||||
*self
|
||||
}
|
||||
}
|
||||
|
||||
// The following implementations of HashStable for ItemId, TraitItemId, and
|
||||
// ImplItemId deserve special attention. Normally we do not hash NodeIds within
|
||||
// the HIR, since they just signify a HIR nodes own path. But ItemId et al
|
||||
@ -68,9 +81,9 @@ impl_stable_hash_for!(tuple_struct hir::ItemLocalId { index });
|
||||
// want to pick up on a reference changing its target, so we hash the NodeIds
|
||||
// in "DefPath Mode".
|
||||
|
||||
impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for hir::ItemId {
|
||||
impl<'gcx> HashStable<StableHashingContext<'gcx>> for hir::ItemId {
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
|
||||
hcx: &mut StableHashingContext<'gcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
let hir::ItemId {
|
||||
id
|
||||
@ -82,9 +95,9 @@ impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for hir::I
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for hir::TraitItemId {
|
||||
impl<'gcx> HashStable<StableHashingContext<'gcx>> for hir::TraitItemId {
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
|
||||
hcx: &mut StableHashingContext<'gcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
let hir::TraitItemId {
|
||||
node_id
|
||||
@ -96,9 +109,9 @@ impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for hir::T
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for hir::ImplItemId {
|
||||
impl<'gcx> HashStable<StableHashingContext<'gcx>> for hir::ImplItemId {
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
|
||||
hcx: &mut StableHashingContext<'gcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
let hir::ImplItemId {
|
||||
node_id
|
||||
@ -218,40 +231,17 @@ impl_stable_hash_for!(struct hir::TypeBinding {
|
||||
span
|
||||
});
|
||||
|
||||
impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for hir::Ty {
|
||||
impl<'gcx> HashStable<StableHashingContext<'gcx>> for hir::Ty {
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
|
||||
hcx: &mut StableHashingContext<'gcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
let node_id_hashing_mode = match self.node {
|
||||
hir::TySlice(..) |
|
||||
hir::TyArray(..) |
|
||||
hir::TyPtr(..) |
|
||||
hir::TyRptr(..) |
|
||||
hir::TyBareFn(..) |
|
||||
hir::TyNever |
|
||||
hir::TyTup(..) |
|
||||
hir::TyTraitObject(..) |
|
||||
hir::TyImplTrait(..) |
|
||||
hir::TyTypeof(..) |
|
||||
hir::TyErr |
|
||||
hir::TyInfer => {
|
||||
NodeIdHashingMode::Ignore
|
||||
}
|
||||
hir::TyPath(..) => {
|
||||
NodeIdHashingMode::HashTraitsInScope
|
||||
}
|
||||
};
|
||||
|
||||
hcx.while_hashing_hir_bodies(true, |hcx| {
|
||||
let hir::Ty {
|
||||
id,
|
||||
id: _,
|
||||
ref node,
|
||||
ref span,
|
||||
} = *self;
|
||||
|
||||
hcx.with_node_id_hashing_mode(node_id_hashing_mode, |hcx| {
|
||||
id.hash_stable(hcx, hasher);
|
||||
});
|
||||
node.hash_stable(hcx, hasher);
|
||||
span.hash_stable(hcx, hasher);
|
||||
})
|
||||
@ -302,19 +292,17 @@ impl_stable_hash_for!(enum hir::FunctionRetTy {
|
||||
Return(t)
|
||||
});
|
||||
|
||||
impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for hir::TraitRef {
|
||||
impl<'gcx> HashStable<StableHashingContext<'gcx>> for hir::TraitRef {
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
|
||||
hcx: &mut StableHashingContext<'gcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
let hir::TraitRef {
|
||||
ref path,
|
||||
ref_id,
|
||||
// Don't hash the ref_id. It is tracked via the thing it is used to access
|
||||
ref_id: _,
|
||||
} = *self;
|
||||
|
||||
path.hash_stable(hcx, hasher);
|
||||
hcx.with_node_id_hashing_mode(NodeIdHashingMode::HashTraitsInScope, |hcx| {
|
||||
ref_id.hash_stable(hcx, hasher);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@ -341,14 +329,14 @@ impl_stable_hash_for!(struct hir::MacroDef {
|
||||
});
|
||||
|
||||
|
||||
impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for hir::Block {
|
||||
impl<'gcx> HashStable<StableHashingContext<'gcx>> for hir::Block {
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
|
||||
hcx: &mut StableHashingContext<'gcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
let hir::Block {
|
||||
ref stmts,
|
||||
ref expr,
|
||||
id,
|
||||
id: _,
|
||||
hir_id: _,
|
||||
rules,
|
||||
span,
|
||||
@ -383,45 +371,24 @@ impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for hir::B
|
||||
}
|
||||
|
||||
expr.hash_stable(hcx, hasher);
|
||||
id.hash_stable(hcx, hasher);
|
||||
rules.hash_stable(hcx, hasher);
|
||||
span.hash_stable(hcx, hasher);
|
||||
targeted_by_break.hash_stable(hcx, hasher);
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for hir::Pat {
|
||||
impl<'gcx> HashStable<StableHashingContext<'gcx>> for hir::Pat {
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
|
||||
hcx: &mut StableHashingContext<'gcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
let node_id_hashing_mode = match self.node {
|
||||
hir::PatKind::Wild |
|
||||
hir::PatKind::Binding(..) |
|
||||
hir::PatKind::Tuple(..) |
|
||||
hir::PatKind::Box(..) |
|
||||
hir::PatKind::Ref(..) |
|
||||
hir::PatKind::Lit(..) |
|
||||
hir::PatKind::Range(..) |
|
||||
hir::PatKind::Slice(..) => {
|
||||
NodeIdHashingMode::Ignore
|
||||
}
|
||||
hir::PatKind::Path(..) |
|
||||
hir::PatKind::Struct(..) |
|
||||
hir::PatKind::TupleStruct(..) => {
|
||||
NodeIdHashingMode::HashTraitsInScope
|
||||
}
|
||||
};
|
||||
|
||||
let hir::Pat {
|
||||
id,
|
||||
id: _,
|
||||
hir_id: _,
|
||||
ref node,
|
||||
ref span
|
||||
} = *self;
|
||||
|
||||
hcx.with_node_id_hashing_mode(node_id_hashing_mode, |hcx| {
|
||||
id.hash_stable(hcx, hasher);
|
||||
});
|
||||
|
||||
node.hash_stable(hcx, hasher);
|
||||
span.hash_stable(hcx, hasher);
|
||||
}
|
||||
@ -537,20 +504,20 @@ impl_stable_hash_for!(enum hir::UnsafeSource {
|
||||
UserProvided
|
||||
});
|
||||
|
||||
impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for hir::Expr {
|
||||
impl<'gcx> HashStable<StableHashingContext<'gcx>> for hir::Expr {
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
|
||||
hcx: &mut StableHashingContext<'gcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
hcx.while_hashing_hir_bodies(true, |hcx| {
|
||||
let hir::Expr {
|
||||
id,
|
||||
id: _,
|
||||
hir_id: _,
|
||||
ref span,
|
||||
ref node,
|
||||
ref attrs
|
||||
} = *self;
|
||||
|
||||
let (spans_always_on, node_id_hashing_mode) = match *node {
|
||||
let spans_always_on = match *node {
|
||||
hir::ExprBox(..) |
|
||||
hir::ExprArray(..) |
|
||||
hir::ExprCall(..) |
|
||||
@ -569,41 +536,33 @@ impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for hir::E
|
||||
hir::ExprBreak(..) |
|
||||
hir::ExprAgain(..) |
|
||||
hir::ExprRet(..) |
|
||||
hir::ExprYield(..) |
|
||||
hir::ExprYield(..) |
|
||||
hir::ExprInlineAsm(..) |
|
||||
hir::ExprRepeat(..) |
|
||||
hir::ExprTup(..) => {
|
||||
// For these we only hash the span when debuginfo is on.
|
||||
(false, NodeIdHashingMode::Ignore)
|
||||
}
|
||||
// For the following, spans might be significant because of
|
||||
// panic messages indicating the source location.
|
||||
hir::ExprBinary(op, ..) => {
|
||||
(hcx.binop_can_panic_at_runtime(op.node), NodeIdHashingMode::Ignore)
|
||||
}
|
||||
hir::ExprUnary(op, _) => {
|
||||
(hcx.unop_can_panic_at_runtime(op), NodeIdHashingMode::Ignore)
|
||||
}
|
||||
hir::ExprAssignOp(op, ..) => {
|
||||
(hcx.binop_can_panic_at_runtime(op.node), NodeIdHashingMode::Ignore)
|
||||
}
|
||||
hir::ExprIndex(..) => {
|
||||
(true, NodeIdHashingMode::Ignore)
|
||||
}
|
||||
// For these we don't care about the span, but want to hash the
|
||||
// trait in scope
|
||||
hir::ExprTup(..) |
|
||||
hir::ExprMethodCall(..) |
|
||||
hir::ExprPath(..) |
|
||||
hir::ExprStruct(..) |
|
||||
hir::ExprField(..) => {
|
||||
(false, NodeIdHashingMode::HashTraitsInScope)
|
||||
// For these we only hash the span when debuginfo is on.
|
||||
false
|
||||
}
|
||||
// For the following, spans might be significant because of
|
||||
// panic messages indicating the source location.
|
||||
hir::ExprBinary(op, ..) => {
|
||||
hcx.binop_can_panic_at_runtime(op.node)
|
||||
}
|
||||
hir::ExprUnary(op, _) => {
|
||||
hcx.unop_can_panic_at_runtime(op)
|
||||
}
|
||||
hir::ExprAssignOp(op, ..) => {
|
||||
hcx.binop_can_panic_at_runtime(op.node)
|
||||
}
|
||||
hir::ExprIndex(..) => {
|
||||
true
|
||||
}
|
||||
};
|
||||
|
||||
hcx.with_node_id_hashing_mode(node_id_hashing_mode, |hcx| {
|
||||
id.hash_stable(hcx, hasher);
|
||||
});
|
||||
|
||||
if spans_always_on {
|
||||
hcx.while_hashing_spans(true, |hcx| {
|
||||
span.hash_stable(hcx, hasher);
|
||||
@ -663,9 +622,9 @@ impl_stable_hash_for!(enum hir::LoopSource {
|
||||
ForLoop
|
||||
});
|
||||
|
||||
impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for hir::MatchSource {
|
||||
impl<'gcx> HashStable<StableHashingContext<'gcx>> for hir::MatchSource {
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
|
||||
hcx: &mut StableHashingContext<'gcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
use hir::MatchSource;
|
||||
|
||||
@ -714,9 +673,9 @@ impl_stable_hash_for!(enum hir::ScopeTarget {
|
||||
Loop(loop_id_result)
|
||||
});
|
||||
|
||||
impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for ast::Ident {
|
||||
impl<'gcx> HashStable<StableHashingContext<'gcx>> for ast::Ident {
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
|
||||
hcx: &mut StableHashingContext<'gcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
let ast::Ident {
|
||||
ref name,
|
||||
@ -727,9 +686,9 @@ impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for ast::I
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for hir::TraitItem {
|
||||
impl<'gcx> HashStable<StableHashingContext<'gcx>> for hir::TraitItem {
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
|
||||
hcx: &mut StableHashingContext<'gcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
let hir::TraitItem {
|
||||
id,
|
||||
@ -761,9 +720,9 @@ impl_stable_hash_for!(enum hir::TraitItemKind {
|
||||
Type(bounds, rhs)
|
||||
});
|
||||
|
||||
impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for hir::ImplItem {
|
||||
impl<'gcx> HashStable<StableHashingContext<'gcx>> for hir::ImplItem {
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
|
||||
hcx: &mut StableHashingContext<'gcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
let hir::ImplItem {
|
||||
id,
|
||||
@ -794,9 +753,9 @@ impl_stable_hash_for!(enum hir::ImplItemKind {
|
||||
Type(t)
|
||||
});
|
||||
|
||||
impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for hir::Visibility {
|
||||
impl<'gcx> HashStable<StableHashingContext<'gcx>> for hir::Visibility {
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
|
||||
hcx: &mut StableHashingContext<'gcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
mem::discriminant(self).hash_stable(hcx, hasher);
|
||||
match *self {
|
||||
@ -806,7 +765,7 @@ impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for hir::V
|
||||
// No fields to hash.
|
||||
}
|
||||
hir::Visibility::Restricted { ref path, id } => {
|
||||
hcx.with_node_id_hashing_mode(NodeIdHashingMode::HashTraitsInScope, |hcx| {
|
||||
hcx.with_node_id_hashing_mode(NodeIdHashingMode::HashDefPath, |hcx| {
|
||||
id.hash_stable(hcx, hasher);
|
||||
});
|
||||
path.hash_stable(hcx, hasher);
|
||||
@ -815,9 +774,9 @@ impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for hir::V
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for hir::Defaultness {
|
||||
impl<'gcx> HashStable<StableHashingContext<'gcx>> for hir::Defaultness {
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
|
||||
hcx: &mut StableHashingContext<'gcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
mem::discriminant(self).hash_stable(hcx, hasher);
|
||||
match *self {
|
||||
@ -836,9 +795,9 @@ impl_stable_hash_for!(enum hir::ImplPolarity {
|
||||
Negative
|
||||
});
|
||||
|
||||
impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for hir::Mod {
|
||||
impl<'gcx> HashStable<StableHashingContext<'gcx>> for hir::Mod {
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
|
||||
hcx: &mut StableHashingContext<'gcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
let hir::Mod {
|
||||
inner,
|
||||
@ -891,20 +850,17 @@ impl_stable_hash_for!(enum hir::VariantData {
|
||||
Unit(id)
|
||||
});
|
||||
|
||||
impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for hir::Item {
|
||||
impl<'gcx> HashStable<StableHashingContext<'gcx>> for hir::Item {
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
|
||||
hcx: &mut StableHashingContext<'gcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
let (node_id_hashing_mode, hash_spans) = match self.node {
|
||||
let hash_spans = match self.node {
|
||||
hir::ItemStatic(..) |
|
||||
hir::ItemConst(..) |
|
||||
hir::ItemFn(..) => {
|
||||
(NodeIdHashingMode::Ignore, hcx.hash_spans())
|
||||
hcx.hash_spans()
|
||||
}
|
||||
hir::ItemUse(..) => {
|
||||
(NodeIdHashingMode::HashTraitsInScope, false)
|
||||
}
|
||||
|
||||
hir::ItemUse(..) |
|
||||
hir::ItemExternCrate(..) |
|
||||
hir::ItemForeignMod(..) |
|
||||
hir::ItemGlobalAsm(..) |
|
||||
@ -916,14 +872,14 @@ impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for hir::I
|
||||
hir::ItemEnum(..) |
|
||||
hir::ItemStruct(..) |
|
||||
hir::ItemUnion(..) => {
|
||||
(NodeIdHashingMode::Ignore, false)
|
||||
false
|
||||
}
|
||||
};
|
||||
|
||||
let hir::Item {
|
||||
name,
|
||||
ref attrs,
|
||||
id,
|
||||
id: _,
|
||||
hir_id: _,
|
||||
ref node,
|
||||
ref vis,
|
||||
@ -932,9 +888,6 @@ impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for hir::I
|
||||
|
||||
hcx.hash_hir_item_like(attrs, |hcx| {
|
||||
hcx.while_hashing_spans(hash_spans, |hcx| {
|
||||
hcx.with_node_id_hashing_mode(node_id_hashing_mode, |hcx| {
|
||||
id.hash_stable(hcx, hasher);
|
||||
});
|
||||
name.hash_stable(hcx, hasher);
|
||||
attrs.hash_stable(hcx, hasher);
|
||||
node.hash_stable(hcx, hasher);
|
||||
@ -980,10 +933,10 @@ impl_stable_hash_for!(struct hir::ImplItemRef {
|
||||
defaultness
|
||||
});
|
||||
|
||||
impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>>
|
||||
impl<'gcx> HashStable<StableHashingContext<'gcx>>
|
||||
for hir::AssociatedItemKind {
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
|
||||
hcx: &mut StableHashingContext<'gcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
mem::discriminant(self).hash_stable(hcx, hasher);
|
||||
match *self {
|
||||
@ -1024,19 +977,33 @@ impl_stable_hash_for!(struct hir::Arg {
|
||||
hir_id
|
||||
});
|
||||
|
||||
impl_stable_hash_for!(struct hir::Body {
|
||||
arguments,
|
||||
value,
|
||||
is_generator
|
||||
});
|
||||
|
||||
impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for hir::BodyId {
|
||||
impl<'gcx> HashStable<StableHashingContext<'gcx>> for hir::Body {
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
|
||||
hcx: &mut StableHashingContext<'gcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
if hcx.hash_bodies() {
|
||||
hcx.tcx().hir.body(*self).hash_stable(hcx, hasher);
|
||||
}
|
||||
let hir::Body {
|
||||
ref arguments,
|
||||
ref value,
|
||||
is_generator,
|
||||
} = *self;
|
||||
|
||||
hcx.with_node_id_hashing_mode(NodeIdHashingMode::Ignore, |hcx| {
|
||||
arguments.hash_stable(hcx, hasher);
|
||||
value.hash_stable(hcx, hasher);
|
||||
is_generator.hash_stable(hcx, hasher);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
impl<'gcx> ToStableHashKey<StableHashingContext<'gcx>> for hir::BodyId {
|
||||
type KeyType = (DefPathHash, hir::ItemLocalId);
|
||||
|
||||
#[inline]
|
||||
fn to_stable_hash_key(&self,
|
||||
hcx: &StableHashingContext<'gcx>)
|
||||
-> (DefPathHash, hir::ItemLocalId) {
|
||||
let hir::BodyId { node_id } = *self;
|
||||
node_id.to_stable_hash_key(hcx)
|
||||
}
|
||||
}
|
||||
|
||||
@ -1046,9 +1013,9 @@ impl_stable_hash_for!(struct hir::InlineAsmOutput {
|
||||
is_indirect
|
||||
});
|
||||
|
||||
impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for hir::GlobalAsm {
|
||||
impl<'gcx> HashStable<StableHashingContext<'gcx>> for hir::GlobalAsm {
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
|
||||
hcx: &mut StableHashingContext<'gcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
let hir::GlobalAsm {
|
||||
asm,
|
||||
@ -1059,9 +1026,9 @@ impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for hir::G
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for hir::InlineAsm {
|
||||
impl<'gcx> HashStable<StableHashingContext<'gcx>> for hir::InlineAsm {
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
|
||||
hcx: &mut StableHashingContext<'gcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
let hir::InlineAsm {
|
||||
asm,
|
||||
@ -1136,13 +1103,23 @@ impl_stable_hash_for!(enum hir::Constness {
|
||||
NotConst
|
||||
});
|
||||
|
||||
impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>>
|
||||
impl<'gcx> HashStable<StableHashingContext<'gcx>>
|
||||
for hir::def_id::DefIndex {
|
||||
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
|
||||
hcx: &mut StableHashingContext<'gcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
DefId::local(*self).hash_stable(hcx, hasher);
|
||||
hcx.local_def_path_hash(*self).hash_stable(hcx, hasher);
|
||||
}
|
||||
}
|
||||
|
||||
impl<'gcx> ToStableHashKey<StableHashingContext<'gcx>>
|
||||
for hir::def_id::DefIndex {
|
||||
type KeyType = DefPathHash;
|
||||
|
||||
#[inline]
|
||||
fn to_stable_hash_key(&self, hcx: &StableHashingContext<'gcx>) -> DefPathHash {
|
||||
hcx.local_def_path_hash(*self)
|
||||
}
|
||||
}
|
||||
|
||||
@ -1152,11 +1129,38 @@ impl_stable_hash_for!(struct hir::def::Export {
|
||||
span
|
||||
});
|
||||
|
||||
impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>>
|
||||
impl<'gcx> HashStable<StableHashingContext<'gcx>>
|
||||
for ::middle::lang_items::LangItem {
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
_: &mut StableHashingContext<'a, 'gcx, 'tcx>,
|
||||
_: &mut StableHashingContext<'gcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
::std::hash::Hash::hash(self, hasher);
|
||||
}
|
||||
}
|
||||
|
||||
impl_stable_hash_for!(struct ::middle::lang_items::LanguageItems {
|
||||
items,
|
||||
missing
|
||||
});
|
||||
|
||||
impl<'gcx> HashStable<StableHashingContext<'gcx>>
|
||||
for hir::TraitCandidate {
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'gcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
hcx.with_node_id_hashing_mode(NodeIdHashingMode::HashDefPath, |hcx| {
|
||||
let hir::TraitCandidate {
|
||||
def_id,
|
||||
import_id,
|
||||
} = *self;
|
||||
|
||||
def_id.hash_stable(hcx, hasher);
|
||||
import_id.hash_stable(hcx, hasher);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
impl_stable_hash_for!(struct hir::Freevar {
|
||||
def,
|
||||
span
|
||||
});
|
||||
|
@ -33,11 +33,11 @@ impl_stable_hash_for!(struct mir::LocalDecl<'tcx> {
|
||||
impl_stable_hash_for!(struct mir::UpvarDecl { debug_name, by_ref });
|
||||
impl_stable_hash_for!(struct mir::BasicBlockData<'tcx> { statements, terminator, is_cleanup });
|
||||
|
||||
impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>>
|
||||
impl<'gcx> HashStable<StableHashingContext<'gcx>>
|
||||
for mir::Terminator<'gcx> {
|
||||
#[inline]
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
|
||||
hcx: &mut StableHashingContext<'gcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
let mir::Terminator {
|
||||
ref kind,
|
||||
@ -76,61 +76,61 @@ for mir::Terminator<'gcx> {
|
||||
}
|
||||
|
||||
|
||||
impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for mir::Local {
|
||||
impl<'gcx> HashStable<StableHashingContext<'gcx>> for mir::Local {
|
||||
#[inline]
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
|
||||
hcx: &mut StableHashingContext<'gcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
use rustc_data_structures::indexed_vec::Idx;
|
||||
self.index().hash_stable(hcx, hasher);
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for mir::BasicBlock {
|
||||
impl<'gcx> HashStable<StableHashingContext<'gcx>> for mir::BasicBlock {
|
||||
#[inline]
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
|
||||
hcx: &mut StableHashingContext<'gcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
use rustc_data_structures::indexed_vec::Idx;
|
||||
self.index().hash_stable(hcx, hasher);
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for mir::Field {
|
||||
impl<'gcx> HashStable<StableHashingContext<'gcx>> for mir::Field {
|
||||
#[inline]
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
|
||||
hcx: &mut StableHashingContext<'gcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
use rustc_data_structures::indexed_vec::Idx;
|
||||
self.index().hash_stable(hcx, hasher);
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>>
|
||||
impl<'gcx> HashStable<StableHashingContext<'gcx>>
|
||||
for mir::VisibilityScope {
|
||||
#[inline]
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
|
||||
hcx: &mut StableHashingContext<'gcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
use rustc_data_structures::indexed_vec::Idx;
|
||||
self.index().hash_stable(hcx, hasher);
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for mir::Promoted {
|
||||
impl<'gcx> HashStable<StableHashingContext<'gcx>> for mir::Promoted {
|
||||
#[inline]
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
|
||||
hcx: &mut StableHashingContext<'gcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
use rustc_data_structures::indexed_vec::Idx;
|
||||
self.index().hash_stable(hcx, hasher);
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>>
|
||||
impl<'gcx> HashStable<StableHashingContext<'gcx>>
|
||||
for mir::TerminatorKind<'gcx> {
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
|
||||
hcx: &mut StableHashingContext<'gcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
mem::discriminant(self).hash_stable(hcx, hasher);
|
||||
|
||||
@ -196,10 +196,10 @@ for mir::TerminatorKind<'gcx> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>>
|
||||
impl<'gcx> HashStable<StableHashingContext<'gcx>>
|
||||
for mir::AssertMessage<'gcx> {
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
|
||||
hcx: &mut StableHashingContext<'gcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
mem::discriminant(self).hash_stable(hcx, hasher);
|
||||
|
||||
@ -219,10 +219,10 @@ for mir::AssertMessage<'gcx> {
|
||||
|
||||
impl_stable_hash_for!(struct mir::Statement<'tcx> { source_info, kind });
|
||||
|
||||
impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>>
|
||||
impl<'gcx> HashStable<StableHashingContext<'gcx>>
|
||||
for mir::StatementKind<'gcx> {
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
|
||||
hcx: &mut StableHashingContext<'gcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
mem::discriminant(self).hash_stable(hcx, hasher);
|
||||
|
||||
@ -256,12 +256,12 @@ for mir::StatementKind<'gcx> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'gcx, 'tcx, T> HashStable<StableHashingContext<'a, 'gcx, 'tcx>>
|
||||
impl<'gcx, T> HashStable<StableHashingContext<'gcx>>
|
||||
for mir::ValidationOperand<'gcx, T>
|
||||
where T: HashStable<StableHashingContext<'a, 'gcx, 'tcx>>
|
||||
where T: HashStable<StableHashingContext<'gcx>>
|
||||
{
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
|
||||
hcx: &mut StableHashingContext<'gcx>,
|
||||
hasher: &mut StableHasher<W>)
|
||||
{
|
||||
self.lval.hash_stable(hcx, hasher);
|
||||
@ -273,9 +273,9 @@ impl<'a, 'gcx, 'tcx, T> HashStable<StableHashingContext<'a, 'gcx, 'tcx>>
|
||||
|
||||
impl_stable_hash_for!(enum mir::ValidationOp { Acquire, Release, Suspend(region_scope) });
|
||||
|
||||
impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for mir::Lvalue<'gcx> {
|
||||
impl<'gcx> HashStable<StableHashingContext<'gcx>> for mir::Lvalue<'gcx> {
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
|
||||
hcx: &mut StableHashingContext<'gcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
mem::discriminant(self).hash_stable(hcx, hasher);
|
||||
match *self {
|
||||
@ -292,14 +292,14 @@ impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for mir::L
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'gcx, 'tcx, B, V, T> HashStable<StableHashingContext<'a, 'gcx, 'tcx>>
|
||||
impl<'gcx, B, V, T> HashStable<StableHashingContext<'gcx>>
|
||||
for mir::Projection<'gcx, B, V, T>
|
||||
where B: HashStable<StableHashingContext<'a, 'gcx, 'tcx>>,
|
||||
V: HashStable<StableHashingContext<'a, 'gcx, 'tcx>>,
|
||||
T: HashStable<StableHashingContext<'a, 'gcx, 'tcx>>
|
||||
where B: HashStable<StableHashingContext<'gcx>>,
|
||||
V: HashStable<StableHashingContext<'gcx>>,
|
||||
T: HashStable<StableHashingContext<'gcx>>
|
||||
{
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
|
||||
hcx: &mut StableHashingContext<'gcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
let mir::Projection {
|
||||
ref base,
|
||||
@ -311,13 +311,13 @@ for mir::Projection<'gcx, B, V, T>
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'gcx, 'tcx, V, T> HashStable<StableHashingContext<'a, 'gcx, 'tcx>>
|
||||
impl<'gcx, V, T> HashStable<StableHashingContext<'gcx>>
|
||||
for mir::ProjectionElem<'gcx, V, T>
|
||||
where V: HashStable<StableHashingContext<'a, 'gcx, 'tcx>>,
|
||||
T: HashStable<StableHashingContext<'a, 'gcx, 'tcx>>
|
||||
where V: HashStable<StableHashingContext<'gcx>>,
|
||||
T: HashStable<StableHashingContext<'gcx>>
|
||||
{
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
|
||||
hcx: &mut StableHashingContext<'gcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
mem::discriminant(self).hash_stable(hcx, hasher);
|
||||
match *self {
|
||||
@ -348,9 +348,9 @@ for mir::ProjectionElem<'gcx, V, T>
|
||||
|
||||
impl_stable_hash_for!(struct mir::VisibilityScopeData { span, parent_scope });
|
||||
|
||||
impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for mir::Operand<'gcx> {
|
||||
impl<'gcx> HashStable<StableHashingContext<'gcx>> for mir::Operand<'gcx> {
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
|
||||
hcx: &mut StableHashingContext<'gcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
mem::discriminant(self).hash_stable(hcx, hasher);
|
||||
|
||||
@ -365,9 +365,9 @@ impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for mir::O
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for mir::Rvalue<'gcx> {
|
||||
impl<'gcx> HashStable<StableHashingContext<'gcx>> for mir::Rvalue<'gcx> {
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
|
||||
hcx: &mut StableHashingContext<'gcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
mem::discriminant(self).hash_stable(hcx, hasher);
|
||||
|
||||
@ -425,10 +425,10 @@ impl_stable_hash_for!(enum mir::CastKind {
|
||||
Unsize
|
||||
});
|
||||
|
||||
impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>>
|
||||
impl<'gcx> HashStable<StableHashingContext<'gcx>>
|
||||
for mir::AggregateKind<'gcx> {
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
|
||||
hcx: &mut StableHashingContext<'gcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
mem::discriminant(self).hash_stable(hcx, hasher);
|
||||
match *self {
|
||||
@ -487,9 +487,9 @@ impl_stable_hash_for!(enum mir::NullOp {
|
||||
|
||||
impl_stable_hash_for!(struct mir::Constant<'tcx> { span, ty, literal });
|
||||
|
||||
impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for mir::Literal<'gcx> {
|
||||
impl<'gcx> HashStable<StableHashingContext<'gcx>> for mir::Literal<'gcx> {
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
|
||||
hcx: &mut StableHashingContext<'gcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
mem::discriminant(self).hash_stable(hcx, hasher);
|
||||
match *self {
|
||||
|
26
src/librustc/ich/impls_misc.rs
Normal file
26
src/librustc/ich/impls_misc.rs
Normal file
@ -0,0 +1,26 @@
|
||||
// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
|
||||
// file at the top-level directory of this distribution and at
|
||||
// http://rust-lang.org/COPYRIGHT.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
//! This module contains `HashStable` implementations for various data types
|
||||
//! that don't fit into any of the other impls_xxx modules.
|
||||
|
||||
impl_stable_hash_for!(enum ::session::search_paths::PathKind {
|
||||
Native,
|
||||
Crate,
|
||||
Dependency,
|
||||
Framework,
|
||||
ExternFlag,
|
||||
All
|
||||
});
|
||||
|
||||
impl_stable_hash_for!(enum ::rustc_back::PanicStrategy {
|
||||
Abort,
|
||||
Unwind
|
||||
});
|
@ -18,35 +18,57 @@ use std::mem;
|
||||
|
||||
use syntax::ast;
|
||||
use syntax::parse::token;
|
||||
use syntax::symbol::InternedString;
|
||||
use syntax::tokenstream;
|
||||
use syntax_pos::{Span, FileMap};
|
||||
|
||||
use hir::def_id::{DefId, CrateNum, CRATE_DEF_INDEX};
|
||||
|
||||
use rustc_data_structures::stable_hasher::{HashStable, StableHasher,
|
||||
StableHasherResult};
|
||||
use rustc_data_structures::stable_hasher::{HashStable, ToStableHashKey,
|
||||
StableHasher, StableHasherResult};
|
||||
use rustc_data_structures::accumulate_vec::AccumulateVec;
|
||||
|
||||
impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>>
|
||||
for ::syntax::symbol::InternedString {
|
||||
impl<'gcx> HashStable<StableHashingContext<'gcx>> for InternedString {
|
||||
#[inline]
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
|
||||
hcx: &mut StableHashingContext<'gcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
let s: &str = &**self;
|
||||
s.hash_stable(hcx, hasher);
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for ast::Name {
|
||||
impl<'gcx> ToStableHashKey<StableHashingContext<'gcx>> for InternedString {
|
||||
type KeyType = InternedString;
|
||||
|
||||
#[inline]
|
||||
fn to_stable_hash_key(&self,
|
||||
_: &StableHashingContext<'gcx>)
|
||||
-> InternedString {
|
||||
self.clone()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'gcx> HashStable<StableHashingContext<'gcx>> for ast::Name {
|
||||
#[inline]
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
|
||||
hcx: &mut StableHashingContext<'gcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
self.as_str().hash_stable(hcx, hasher);
|
||||
}
|
||||
}
|
||||
|
||||
impl<'gcx> ToStableHashKey<StableHashingContext<'gcx>> for ast::Name {
|
||||
type KeyType = InternedString;
|
||||
|
||||
#[inline]
|
||||
fn to_stable_hash_key(&self,
|
||||
_: &StableHashingContext<'gcx>)
|
||||
-> InternedString {
|
||||
self.as_str()
|
||||
}
|
||||
}
|
||||
|
||||
impl_stable_hash_for!(enum ::syntax::ast::AsmDialect {
|
||||
Att,
|
||||
Intel
|
||||
@ -88,10 +110,10 @@ impl_stable_hash_for!(struct ::syntax::attr::Stability {
|
||||
rustc_const_unstable
|
||||
});
|
||||
|
||||
impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>>
|
||||
impl<'gcx> HashStable<StableHashingContext<'gcx>>
|
||||
for ::syntax::attr::StabilityLevel {
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
|
||||
hcx: &mut StableHashingContext<'gcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
mem::discriminant(self).hash_stable(hcx, hasher);
|
||||
match *self {
|
||||
@ -143,10 +165,15 @@ impl_stable_hash_for!(struct ::syntax::ast::Lifetime { id, span, ident });
|
||||
impl_stable_hash_for!(enum ::syntax::ast::StrStyle { Cooked, Raw(pounds) });
|
||||
impl_stable_hash_for!(enum ::syntax::ast::AttrStyle { Outer, Inner });
|
||||
|
||||
impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for [ast::Attribute] {
|
||||
impl<'gcx> HashStable<StableHashingContext<'gcx>> for [ast::Attribute] {
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
|
||||
hcx: &mut StableHashingContext<'gcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
if self.len() == 0 {
|
||||
self.len().hash_stable(hcx, hasher);
|
||||
return
|
||||
}
|
||||
|
||||
// Some attributes are always ignored during hashing.
|
||||
let filtered: AccumulateVec<[&ast::Attribute; 8]> = self
|
||||
.iter()
|
||||
@ -163,9 +190,9 @@ impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for [ast::
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for ast::Attribute {
|
||||
impl<'gcx> HashStable<StableHashingContext<'gcx>> for ast::Attribute {
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
|
||||
hcx: &mut StableHashingContext<'gcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
// Make sure that these have been filtered out.
|
||||
debug_assert!(self.name().map(|name| !hcx.is_ignored_attr(name)).unwrap_or(true));
|
||||
@ -192,10 +219,10 @@ impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for ast::A
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>>
|
||||
impl<'gcx> HashStable<StableHashingContext<'gcx>>
|
||||
for tokenstream::TokenTree {
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
|
||||
hcx: &mut StableHashingContext<'gcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
mem::discriminant(self).hash_stable(hcx, hasher);
|
||||
match *self {
|
||||
@ -214,10 +241,10 @@ for tokenstream::TokenTree {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>>
|
||||
impl<'gcx> HashStable<StableHashingContext<'gcx>>
|
||||
for tokenstream::TokenStream {
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
|
||||
hcx: &mut StableHashingContext<'gcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
for sub_tt in self.trees() {
|
||||
sub_tt.hash_stable(hcx, hasher);
|
||||
@ -225,10 +252,10 @@ for tokenstream::TokenStream {
|
||||
}
|
||||
}
|
||||
|
||||
fn hash_token<'a, 'gcx, 'tcx, W: StableHasherResult>(token: &token::Token,
|
||||
hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
|
||||
hasher: &mut StableHasher<W>,
|
||||
error_reporting_span: Span) {
|
||||
fn hash_token<'gcx, W: StableHasherResult>(token: &token::Token,
|
||||
hcx: &mut StableHashingContext<'gcx>,
|
||||
hasher: &mut StableHasher<W>,
|
||||
error_reporting_span: Span) {
|
||||
mem::discriminant(token).hash_stable(hcx, hasher);
|
||||
match *token {
|
||||
token::Token::Eq |
|
||||
@ -297,11 +324,11 @@ fn hash_token<'a, 'gcx, 'tcx, W: StableHasherResult>(token: &token::Token,
|
||||
// in a stable way, in addition to the HIR.
|
||||
// Since this is hardly used anywhere, just emit a
|
||||
// warning for now.
|
||||
if hcx.tcx().sess.opts.debugging_opts.incremental.is_some() {
|
||||
if hcx.sess().opts.debugging_opts.incremental.is_some() {
|
||||
let msg = format!("Quasi-quoting might make incremental \
|
||||
compilation very inefficient: {:?}",
|
||||
non_terminal);
|
||||
hcx.tcx().sess.span_warn(error_reporting_span, &msg[..]);
|
||||
hcx.sess().span_warn(error_reporting_span, &msg[..]);
|
||||
}
|
||||
|
||||
std_hash::Hash::hash(non_terminal, hasher);
|
||||
@ -331,9 +358,9 @@ impl_stable_hash_for!(enum ::syntax::ast::MetaItemKind {
|
||||
NameValue(lit)
|
||||
});
|
||||
|
||||
impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for FileMap {
|
||||
impl<'gcx> HashStable<StableHashingContext<'gcx>> for FileMap {
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
|
||||
hcx: &mut StableHashingContext<'gcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
let FileMap {
|
||||
ref name,
|
||||
|
@ -11,38 +11,39 @@
|
||||
//! This module contains `HashStable` implementations for various data types
|
||||
//! from rustc::ty in no particular order.
|
||||
|
||||
use ich::StableHashingContext;
|
||||
use rustc_data_structures::stable_hasher::{HashStable, StableHasher,
|
||||
StableHasherResult};
|
||||
use ich::{StableHashingContext, NodeIdHashingMode};
|
||||
use rustc_data_structures::stable_hasher::{HashStable, ToStableHashKey,
|
||||
StableHasher, StableHasherResult};
|
||||
use std::hash as std_hash;
|
||||
use std::mem;
|
||||
use middle::region;
|
||||
use traits;
|
||||
use ty;
|
||||
|
||||
impl<'a, 'gcx, 'tcx, T> HashStable<StableHashingContext<'a, 'gcx, 'tcx>>
|
||||
impl<'gcx, T> HashStable<StableHashingContext<'gcx>>
|
||||
for &'gcx ty::Slice<T>
|
||||
where T: HashStable<StableHashingContext<'a, 'gcx, 'tcx>> {
|
||||
where T: HashStable<StableHashingContext<'gcx>> {
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
|
||||
hcx: &mut StableHashingContext<'gcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
(&self[..]).hash_stable(hcx, hasher);
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>>
|
||||
impl<'gcx> HashStable<StableHashingContext<'gcx>>
|
||||
for ty::subst::Kind<'gcx> {
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
|
||||
hcx: &mut StableHashingContext<'gcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
self.as_type().hash_stable(hcx, hasher);
|
||||
self.as_region().hash_stable(hcx, hasher);
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>>
|
||||
impl<'gcx> HashStable<StableHashingContext<'gcx>>
|
||||
for ty::RegionKind {
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
|
||||
hcx: &mut StableHashingContext<'gcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
mem::discriminant(self).hash_stable(hcx, hasher);
|
||||
match *self {
|
||||
@ -80,10 +81,10 @@ for ty::RegionKind {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>>
|
||||
impl<'gcx> HashStable<StableHashingContext<'gcx>>
|
||||
for ty::adjustment::AutoBorrow<'gcx> {
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
|
||||
hcx: &mut StableHashingContext<'gcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
mem::discriminant(self).hash_stable(hcx, hasher);
|
||||
match *self {
|
||||
@ -98,10 +99,10 @@ for ty::adjustment::AutoBorrow<'gcx> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>>
|
||||
impl<'gcx> HashStable<StableHashingContext<'gcx>>
|
||||
for ty::adjustment::Adjust<'gcx> {
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
|
||||
hcx: &mut StableHashingContext<'gcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
mem::discriminant(self).hash_stable(hcx, hasher);
|
||||
match *self {
|
||||
@ -123,19 +124,20 @@ for ty::adjustment::Adjust<'gcx> {
|
||||
|
||||
impl_stable_hash_for!(struct ty::adjustment::Adjustment<'tcx> { kind, target });
|
||||
impl_stable_hash_for!(struct ty::adjustment::OverloadedDeref<'tcx> { region, mutbl });
|
||||
impl_stable_hash_for!(struct ty::UpvarId { var_id, closure_expr_id });
|
||||
impl_stable_hash_for!(struct ty::UpvarBorrow<'tcx> { kind, region });
|
||||
|
||||
impl_stable_hash_for!(struct ty::UpvarId { var_id, closure_expr_id });
|
||||
|
||||
impl_stable_hash_for!(enum ty::BorrowKind {
|
||||
ImmBorrow,
|
||||
UniqueImmBorrow,
|
||||
MutBorrow
|
||||
});
|
||||
|
||||
impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>>
|
||||
impl<'gcx> HashStable<StableHashingContext<'gcx>>
|
||||
for ty::UpvarCapture<'gcx> {
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
|
||||
hcx: &mut StableHashingContext<'gcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
mem::discriminant(self).hash_stable(hcx, hasher);
|
||||
match *self {
|
||||
@ -159,11 +161,11 @@ impl_stable_hash_for!(struct ty::FnSig<'tcx> {
|
||||
abi
|
||||
});
|
||||
|
||||
impl<'a, 'gcx, 'tcx, T> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for ty::Binder<T>
|
||||
where T: HashStable<StableHashingContext<'a, 'gcx, 'tcx>>
|
||||
impl<'gcx, T> HashStable<StableHashingContext<'gcx>> for ty::Binder<T>
|
||||
where T: HashStable<StableHashingContext<'gcx>>
|
||||
{
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
|
||||
hcx: &mut StableHashingContext<'gcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
let ty::Binder(ref inner) = *self;
|
||||
inner.hash_stable(hcx, hasher);
|
||||
@ -183,13 +185,13 @@ impl_stable_hash_for!(struct ty::TraitPredicate<'tcx> { trait_ref });
|
||||
impl_stable_hash_for!(tuple_struct ty::EquatePredicate<'tcx> { t1, t2 });
|
||||
impl_stable_hash_for!(struct ty::SubtypePredicate<'tcx> { a_is_expected, a, b });
|
||||
|
||||
impl<'a, 'gcx, 'tcx, A, B> HashStable<StableHashingContext<'a, 'gcx, 'tcx>>
|
||||
impl<'gcx, A, B> HashStable<StableHashingContext<'gcx>>
|
||||
for ty::OutlivesPredicate<A, B>
|
||||
where A: HashStable<StableHashingContext<'a, 'gcx, 'tcx>>,
|
||||
B: HashStable<StableHashingContext<'a, 'gcx, 'tcx>>,
|
||||
where A: HashStable<StableHashingContext<'gcx>>,
|
||||
B: HashStable<StableHashingContext<'gcx>>,
|
||||
{
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
|
||||
hcx: &mut StableHashingContext<'gcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
let ty::OutlivesPredicate(ref a, ref b) = *self;
|
||||
a.hash_stable(hcx, hasher);
|
||||
@ -201,9 +203,9 @@ impl_stable_hash_for!(struct ty::ProjectionPredicate<'tcx> { projection_ty, ty }
|
||||
impl_stable_hash_for!(struct ty::ProjectionTy<'tcx> { substs, item_def_id });
|
||||
|
||||
|
||||
impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for ty::Predicate<'gcx> {
|
||||
impl<'gcx> HashStable<StableHashingContext<'gcx>> for ty::Predicate<'gcx> {
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
|
||||
hcx: &mut StableHashingContext<'gcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
mem::discriminant(self).hash_stable(hcx, hasher);
|
||||
match *self {
|
||||
@ -243,9 +245,9 @@ impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for ty::Pr
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for ty::AdtFlags {
|
||||
impl<'gcx> HashStable<StableHashingContext<'gcx>> for ty::AdtFlags {
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
_: &mut StableHashingContext<'a, 'gcx, 'tcx>,
|
||||
_: &mut StableHashingContext<'gcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
std_hash::Hash::hash(self, hasher);
|
||||
}
|
||||
@ -270,10 +272,10 @@ impl_stable_hash_for!(struct ty::FieldDef {
|
||||
vis
|
||||
});
|
||||
|
||||
impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>>
|
||||
impl<'gcx> HashStable<StableHashingContext<'gcx>>
|
||||
for ::middle::const_val::ConstVal<'gcx> {
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
|
||||
hcx: &mut StableHashingContext<'gcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
use middle::const_val::ConstVal::*;
|
||||
use middle::const_val::ConstAggregate::*;
|
||||
@ -304,7 +306,9 @@ for ::middle::const_val::ConstVal<'gcx> {
|
||||
}
|
||||
Function(def_id, substs) => {
|
||||
def_id.hash_stable(hcx, hasher);
|
||||
substs.hash_stable(hcx, hasher);
|
||||
hcx.with_node_id_hashing_mode(NodeIdHashingMode::HashDefPath, |hcx| {
|
||||
substs.hash_stable(hcx, hasher);
|
||||
});
|
||||
}
|
||||
Aggregate(Struct(ref name_values)) => {
|
||||
let mut values = name_values.to_vec();
|
||||
@ -338,6 +342,54 @@ impl_stable_hash_for!(struct ty::Const<'tcx> {
|
||||
val
|
||||
});
|
||||
|
||||
impl_stable_hash_for!(struct ::middle::const_val::ConstEvalErr<'tcx> {
|
||||
span,
|
||||
kind
|
||||
});
|
||||
|
||||
impl<'gcx> HashStable<StableHashingContext<'gcx>>
|
||||
for ::middle::const_val::ErrKind<'gcx> {
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'gcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
use middle::const_val::ErrKind::*;
|
||||
|
||||
mem::discriminant(self).hash_stable(hcx, hasher);
|
||||
|
||||
match *self {
|
||||
CannotCast |
|
||||
MissingStructField |
|
||||
NonConstPath |
|
||||
ExpectedConstTuple |
|
||||
ExpectedConstStruct |
|
||||
IndexedNonVec |
|
||||
IndexNotUsize |
|
||||
MiscBinaryOp |
|
||||
MiscCatchAll |
|
||||
IndexOpFeatureGated |
|
||||
TypeckError => {
|
||||
// nothing to do
|
||||
}
|
||||
UnimplementedConstVal(s) => {
|
||||
s.hash_stable(hcx, hasher);
|
||||
}
|
||||
IndexOutOfBounds { len, index } => {
|
||||
len.hash_stable(hcx, hasher);
|
||||
index.hash_stable(hcx, hasher);
|
||||
}
|
||||
Math(ref const_math_err) => {
|
||||
const_math_err.hash_stable(hcx, hasher);
|
||||
}
|
||||
LayoutError(ref layout_error) => {
|
||||
layout_error.hash_stable(hcx, hasher);
|
||||
}
|
||||
ErroneousReferencedConstant(ref const_val) => {
|
||||
const_val.hash_stable(hcx, hasher);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl_stable_hash_for!(struct ty::ClosureSubsts<'tcx> { substs });
|
||||
|
||||
impl_stable_hash_for!(struct ty::GeneratorInterior<'tcx> { witness });
|
||||
@ -358,9 +410,9 @@ impl_stable_hash_for!(enum ty::adjustment::CustomCoerceUnsized {
|
||||
Struct(index)
|
||||
});
|
||||
|
||||
impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for ty::Generics {
|
||||
impl<'gcx> HashStable<StableHashingContext<'gcx>> for ty::Generics {
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
|
||||
hcx: &mut StableHashingContext<'gcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
let ty::Generics {
|
||||
parent,
|
||||
@ -386,10 +438,10 @@ impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for ty::Ge
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>>
|
||||
impl<'gcx> HashStable<StableHashingContext<'gcx>>
|
||||
for ty::RegionParameterDef {
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
|
||||
hcx: &mut StableHashingContext<'gcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
let ty::RegionParameterDef {
|
||||
name,
|
||||
@ -414,13 +466,12 @@ impl_stable_hash_for!(struct ty::TypeParameterDef {
|
||||
pure_wrt_drop
|
||||
});
|
||||
|
||||
|
||||
impl<'a, 'gcx, 'tcx, T> HashStable<StableHashingContext<'a, 'gcx, 'tcx>>
|
||||
impl<'gcx, T> HashStable<StableHashingContext<'gcx>>
|
||||
for ::middle::resolve_lifetime::Set1<T>
|
||||
where T: HashStable<StableHashingContext<'a, 'gcx, 'tcx>>
|
||||
where T: HashStable<StableHashingContext<'gcx>>
|
||||
{
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
|
||||
hcx: &mut StableHashingContext<'gcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
use middle::resolve_lifetime::Set1;
|
||||
|
||||
@ -463,26 +514,20 @@ impl_stable_hash_for!(enum ty::cast::CastKind {
|
||||
FnPtrAddrCast
|
||||
});
|
||||
|
||||
impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>>
|
||||
for region::Scope
|
||||
{
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
mem::discriminant(self).hash_stable(hcx, hasher);
|
||||
match *self {
|
||||
region::Scope::Node(node_id) |
|
||||
region::Scope::Destruction(node_id) => {
|
||||
node_id.hash_stable(hcx, hasher);
|
||||
}
|
||||
region::Scope::CallSite(body_id) |
|
||||
region::Scope::Arguments(body_id) => {
|
||||
body_id.hash_stable(hcx, hasher);
|
||||
}
|
||||
region::Scope::Remainder(block_remainder) => {
|
||||
block_remainder.hash_stable(hcx, hasher);
|
||||
}
|
||||
}
|
||||
impl_stable_hash_for!(enum ::middle::region::Scope {
|
||||
Node(local_id),
|
||||
Destruction(local_id),
|
||||
CallSite(local_id),
|
||||
Arguments(local_id),
|
||||
Remainder(block_remainder)
|
||||
});
|
||||
|
||||
impl<'gcx> ToStableHashKey<StableHashingContext<'gcx>> for region::Scope {
|
||||
type KeyType = region::Scope;
|
||||
|
||||
#[inline]
|
||||
fn to_stable_hash_key(&self, _: &StableHashingContext<'gcx>) -> region::Scope {
|
||||
*self
|
||||
}
|
||||
}
|
||||
|
||||
@ -507,11 +552,11 @@ impl_stable_hash_for!(enum ty::BoundRegion {
|
||||
BrEnv
|
||||
});
|
||||
|
||||
impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>>
|
||||
impl<'gcx> HashStable<StableHashingContext<'gcx>>
|
||||
for ty::TypeVariants<'gcx>
|
||||
{
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
|
||||
hcx: &mut StableHashingContext<'gcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
use ty::TypeVariants::*;
|
||||
|
||||
@ -520,6 +565,7 @@ for ty::TypeVariants<'gcx>
|
||||
TyBool |
|
||||
TyChar |
|
||||
TyStr |
|
||||
TyError |
|
||||
TyNever => {
|
||||
// Nothing more to hash.
|
||||
}
|
||||
@ -585,10 +631,8 @@ for ty::TypeVariants<'gcx>
|
||||
TyParam(param_ty) => {
|
||||
param_ty.hash_stable(hcx, hasher);
|
||||
}
|
||||
|
||||
TyError |
|
||||
TyInfer(..) => {
|
||||
bug!("ty::TypeVariants::hash_stable() - Unexpected variant.")
|
||||
bug!("ty::TypeVariants::hash_stable() - Unexpected variant {:?}.", *self)
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -604,11 +648,11 @@ impl_stable_hash_for!(struct ty::TypeAndMut<'tcx> {
|
||||
mutbl
|
||||
});
|
||||
|
||||
impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>>
|
||||
impl<'gcx> HashStable<StableHashingContext<'gcx>>
|
||||
for ty::ExistentialPredicate<'gcx>
|
||||
{
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
|
||||
hcx: &mut StableHashingContext<'gcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
mem::discriminant(self).hash_stable(hcx, hasher);
|
||||
match *self {
|
||||
@ -636,34 +680,14 @@ impl_stable_hash_for!(struct ty::ExistentialProjection<'tcx> {
|
||||
ty
|
||||
});
|
||||
|
||||
impl_stable_hash_for!(enum ty::fast_reject::SimplifiedType {
|
||||
BoolSimplifiedType,
|
||||
CharSimplifiedType,
|
||||
IntSimplifiedType(int_ty),
|
||||
UintSimplifiedType(int_ty),
|
||||
FloatSimplifiedType(float_ty),
|
||||
AdtSimplifiedType(def_id),
|
||||
StrSimplifiedType,
|
||||
ArraySimplifiedType,
|
||||
PtrSimplifiedType,
|
||||
NeverSimplifiedType,
|
||||
TupleSimplifiedType(size),
|
||||
TraitSimplifiedType(def_id),
|
||||
ClosureSimplifiedType(def_id),
|
||||
GeneratorSimplifiedType(def_id),
|
||||
AnonSimplifiedType(def_id),
|
||||
FunctionSimplifiedType(params),
|
||||
ParameterSimplifiedType
|
||||
});
|
||||
|
||||
impl_stable_hash_for!(struct ty::Instance<'tcx> {
|
||||
def,
|
||||
substs
|
||||
});
|
||||
|
||||
impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for ty::InstanceDef<'gcx> {
|
||||
impl<'gcx> HashStable<StableHashingContext<'gcx>> for ty::InstanceDef<'gcx> {
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
|
||||
hcx: &mut StableHashingContext<'gcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
mem::discriminant(self).hash_stable(hcx, hasher);
|
||||
|
||||
@ -697,3 +721,127 @@ impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for ty::In
|
||||
}
|
||||
}
|
||||
|
||||
impl<'gcx> HashStable<StableHashingContext<'gcx>> for ty::TraitDef {
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'gcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
let ty::TraitDef {
|
||||
// We already have the def_path_hash below, no need to hash it twice
|
||||
def_id: _,
|
||||
unsafety,
|
||||
paren_sugar,
|
||||
has_default_impl,
|
||||
def_path_hash,
|
||||
} = *self;
|
||||
|
||||
unsafety.hash_stable(hcx, hasher);
|
||||
paren_sugar.hash_stable(hcx, hasher);
|
||||
has_default_impl.hash_stable(hcx, hasher);
|
||||
def_path_hash.hash_stable(hcx, hasher);
|
||||
}
|
||||
}
|
||||
|
||||
impl_stable_hash_for!(struct ty::Destructor {
|
||||
did
|
||||
});
|
||||
|
||||
impl_stable_hash_for!(struct ty::DtorckConstraint<'tcx> {
|
||||
outlives,
|
||||
dtorck_types
|
||||
});
|
||||
|
||||
|
||||
impl<'gcx> HashStable<StableHashingContext<'gcx>> for ty::CrateVariancesMap {
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'gcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
let ty::CrateVariancesMap {
|
||||
ref dependencies,
|
||||
ref variances,
|
||||
// This is just an irrelevant helper value.
|
||||
empty_variance: _,
|
||||
} = *self;
|
||||
|
||||
dependencies.hash_stable(hcx, hasher);
|
||||
variances.hash_stable(hcx, hasher);
|
||||
}
|
||||
}
|
||||
|
||||
impl_stable_hash_for!(struct ty::AssociatedItem {
|
||||
def_id,
|
||||
name,
|
||||
kind,
|
||||
vis,
|
||||
defaultness,
|
||||
container,
|
||||
method_has_self_argument
|
||||
});
|
||||
|
||||
impl_stable_hash_for!(enum ty::AssociatedKind {
|
||||
Const,
|
||||
Method,
|
||||
Type
|
||||
});
|
||||
|
||||
impl_stable_hash_for!(enum ty::AssociatedItemContainer {
|
||||
TraitContainer(def_id),
|
||||
ImplContainer(def_id)
|
||||
});
|
||||
|
||||
|
||||
impl<'gcx, T> HashStable<StableHashingContext<'gcx>>
|
||||
for ty::steal::Steal<T>
|
||||
where T: HashStable<StableHashingContext<'gcx>>
|
||||
{
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'gcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
self.borrow().hash_stable(hcx, hasher);
|
||||
}
|
||||
}
|
||||
|
||||
impl_stable_hash_for!(struct ty::ParamEnv<'tcx> {
|
||||
caller_bounds,
|
||||
reveal
|
||||
});
|
||||
|
||||
impl_stable_hash_for!(enum traits::Reveal {
|
||||
UserFacing,
|
||||
All
|
||||
});
|
||||
|
||||
impl_stable_hash_for!(enum ::middle::privacy::AccessLevel {
|
||||
Reachable,
|
||||
Exported,
|
||||
Public
|
||||
});
|
||||
|
||||
impl<'gcx> HashStable<StableHashingContext<'gcx>>
|
||||
for ::middle::privacy::AccessLevels {
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'gcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
hcx.with_node_id_hashing_mode(NodeIdHashingMode::HashDefPath, |hcx| {
|
||||
let ::middle::privacy::AccessLevels {
|
||||
ref map
|
||||
} = *self;
|
||||
|
||||
map.hash_stable(hcx, hasher);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
impl_stable_hash_for!(struct ty::CrateInherentImpls {
|
||||
inherent_impls
|
||||
});
|
||||
|
||||
impl_stable_hash_for!(enum ::session::CompileIncomplete {
|
||||
Stopped,
|
||||
Errored(error_reported)
|
||||
});
|
||||
|
||||
impl_stable_hash_for!(struct ::util::common::ErrorReported {});
|
||||
|
||||
impl_stable_hash_for!(tuple_struct ::middle::reachable::ReachableSet {
|
||||
reachable_set
|
||||
});
|
||||
|
@ -12,9 +12,8 @@
|
||||
|
||||
pub use self::fingerprint::Fingerprint;
|
||||
pub use self::caching_codemap_view::CachingCodemapView;
|
||||
pub use self::hcx::{StableHashingContext, NodeIdHashingMode, hash_stable_hashmap,
|
||||
hash_stable_hashset, hash_stable_nodemap,
|
||||
hash_stable_btreemap, hash_stable_itemlocalmap};
|
||||
pub use self::hcx::{StableHashingContext, NodeIdHashingMode,
|
||||
hash_stable_trait_impls};
|
||||
mod fingerprint;
|
||||
mod caching_codemap_view;
|
||||
mod hcx;
|
||||
@ -23,6 +22,7 @@ mod impls_const_math;
|
||||
mod impls_cstore;
|
||||
mod impls_hir;
|
||||
mod impls_mir;
|
||||
mod impls_misc;
|
||||
mod impls_ty;
|
||||
mod impls_syntax;
|
||||
|
||||
|
@ -245,10 +245,8 @@ impl<'a, 'gcx, 'tcx> Visitor<'gcx> for FindNestedTypeVisitor<'a, 'gcx, 'tcx> {
|
||||
// region at the right depth with the same index
|
||||
(Some(rl::Region::EarlyBound(_, id)), ty::BrNamed(def_id, _)) => {
|
||||
debug!("EarlyBound self.infcx.tcx.hir.local_def_id(id)={:?} \
|
||||
def_id={:?}",
|
||||
self.infcx.tcx.hir.local_def_id(id),
|
||||
def_id);
|
||||
if self.infcx.tcx.hir.local_def_id(id) == def_id {
|
||||
def_id={:?}", id, def_id);
|
||||
if id == def_id {
|
||||
self.found_type = Some(arg);
|
||||
return; // we can stop visiting now
|
||||
}
|
||||
@ -260,11 +258,9 @@ impl<'a, 'gcx, 'tcx> Visitor<'gcx> for FindNestedTypeVisitor<'a, 'gcx, 'tcx> {
|
||||
(Some(rl::Region::LateBound(debruijn_index, id)), ty::BrNamed(def_id, _)) => {
|
||||
debug!("FindNestedTypeVisitor::visit_ty: LateBound depth = {:?}",
|
||||
debruijn_index.depth);
|
||||
debug!("self.infcx.tcx.hir.local_def_id(id)={:?}",
|
||||
self.infcx.tcx.hir.local_def_id(id));
|
||||
debug!("self.infcx.tcx.hir.local_def_id(id)={:?}", id);
|
||||
debug!("def_id={:?}", def_id);
|
||||
if debruijn_index.depth == self.depth &&
|
||||
self.infcx.tcx.hir.local_def_id(id) == def_id {
|
||||
if debruijn_index.depth == self.depth && id == def_id {
|
||||
self.found_type = Some(arg);
|
||||
return; // we can stop visiting now
|
||||
}
|
||||
@ -336,10 +332,8 @@ impl<'a, 'gcx, 'tcx> Visitor<'gcx> for TyPathVisitor<'a, 'gcx, 'tcx> {
|
||||
|
||||
(Some(rl::Region::EarlyBound(_, id)), ty::BrNamed(def_id, _)) => {
|
||||
debug!("EarlyBound self.infcx.tcx.hir.local_def_id(id)={:?} \
|
||||
def_id={:?}",
|
||||
self.infcx.tcx.hir.local_def_id(id),
|
||||
def_id);
|
||||
if self.infcx.tcx.hir.local_def_id(id) == def_id {
|
||||
def_id={:?}", id, def_id);
|
||||
if id == def_id {
|
||||
self.found_it = true;
|
||||
return; // we can stop visiting now
|
||||
}
|
||||
@ -348,11 +342,9 @@ impl<'a, 'gcx, 'tcx> Visitor<'gcx> for TyPathVisitor<'a, 'gcx, 'tcx> {
|
||||
(Some(rl::Region::LateBound(debruijn_index, id)), ty::BrNamed(def_id, _)) => {
|
||||
debug!("FindNestedTypeVisitor::visit_ty: LateBound depth = {:?}",
|
||||
debruijn_index.depth);
|
||||
debug!("self.infcx.tcx.hir.local_def_id(id)={:?}",
|
||||
self.infcx.tcx.hir.local_def_id(id));
|
||||
debug!("id={:?}", id);
|
||||
debug!("def_id={:?}", def_id);
|
||||
if debruijn_index.depth == self.depth &&
|
||||
self.infcx.tcx.hir.local_def_id(id) == def_id {
|
||||
if debruijn_index.depth == self.depth && id == def_id {
|
||||
self.found_it = true;
|
||||
return; // we can stop visiting now
|
||||
}
|
||||
|
@ -12,9 +12,12 @@ use std::cmp;
|
||||
|
||||
use errors::DiagnosticBuilder;
|
||||
use hir::HirId;
|
||||
use ich::StableHashingContext;
|
||||
use lint::builtin;
|
||||
use lint::context::CheckLintNameResult;
|
||||
use lint::{self, Lint, LintId, Level, LintSource};
|
||||
use rustc_data_structures::stable_hasher::{HashStable, ToStableHashKey,
|
||||
StableHasher, StableHasherResult};
|
||||
use session::Session;
|
||||
use syntax::ast;
|
||||
use syntax::attr;
|
||||
@ -382,3 +385,62 @@ impl LintLevelMap {
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl<'gcx> HashStable<StableHashingContext<'gcx>> for LintLevelMap {
|
||||
#[inline]
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'gcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
let LintLevelMap {
|
||||
ref sets,
|
||||
ref id_to_set,
|
||||
} = *self;
|
||||
|
||||
id_to_set.hash_stable(hcx, hasher);
|
||||
|
||||
let LintLevelSets {
|
||||
ref list,
|
||||
lint_cap,
|
||||
} = *sets;
|
||||
|
||||
lint_cap.hash_stable(hcx, hasher);
|
||||
|
||||
hcx.while_hashing_spans(true, |hcx| {
|
||||
list.len().hash_stable(hcx, hasher);
|
||||
|
||||
// We are working under the assumption here that the list of
|
||||
// lint-sets is built in a deterministic order.
|
||||
for lint_set in list {
|
||||
::std::mem::discriminant(lint_set).hash_stable(hcx, hasher);
|
||||
|
||||
match *lint_set {
|
||||
LintSet::CommandLine { ref specs } => {
|
||||
specs.hash_stable(hcx, hasher);
|
||||
}
|
||||
LintSet::Node { ref specs, parent } => {
|
||||
specs.hash_stable(hcx, hasher);
|
||||
parent.hash_stable(hcx, hasher);
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl<HCX> HashStable<HCX> for LintId {
|
||||
#[inline]
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut HCX,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
self.lint_name_raw().hash_stable(hcx, hasher);
|
||||
}
|
||||
}
|
||||
|
||||
impl<HCX> ToStableHashKey<HCX> for LintId {
|
||||
type KeyType = &'static str;
|
||||
|
||||
#[inline]
|
||||
fn to_stable_hash_key(&self, _: &HCX) -> &'static str {
|
||||
self.lint_name_raw()
|
||||
}
|
||||
}
|
||||
|
@ -305,6 +305,10 @@ impl LintId {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn lint_name_raw(&self) -> &'static str {
|
||||
self.lint.name
|
||||
}
|
||||
|
||||
/// Get the name of the lint.
|
||||
pub fn to_string(&self) -> String {
|
||||
self.lint.name_lower()
|
||||
@ -317,6 +321,13 @@ pub enum Level {
|
||||
Allow, Warn, Deny, Forbid
|
||||
}
|
||||
|
||||
impl_stable_hash_for!(enum self::Level {
|
||||
Allow,
|
||||
Warn,
|
||||
Deny,
|
||||
Forbid
|
||||
});
|
||||
|
||||
impl Level {
|
||||
/// Convert a level to a lower-case string.
|
||||
pub fn as_str(self) -> &'static str {
|
||||
@ -354,6 +365,12 @@ pub enum LintSource {
|
||||
CommandLine(Symbol),
|
||||
}
|
||||
|
||||
impl_stable_hash_for!(enum self::LintSource {
|
||||
Default,
|
||||
Node(name, span),
|
||||
CommandLine(text)
|
||||
});
|
||||
|
||||
pub type LevelSource = (Level, LintSource);
|
||||
|
||||
pub mod builtin;
|
||||
|
@ -73,10 +73,10 @@ macro_rules! __impl_stable_hash_field {
|
||||
#[macro_export]
|
||||
macro_rules! impl_stable_hash_for {
|
||||
(enum $enum_name:path { $( $variant:ident $( ( $($arg:ident),* ) )* ),* }) => {
|
||||
impl<'a, 'tcx, 'lcx> ::rustc_data_structures::stable_hasher::HashStable<$crate::ich::StableHashingContext<'a, 'tcx, 'lcx>> for $enum_name {
|
||||
impl<'tcx> ::rustc_data_structures::stable_hasher::HashStable<$crate::ich::StableHashingContext<'tcx>> for $enum_name {
|
||||
#[inline]
|
||||
fn hash_stable<W: ::rustc_data_structures::stable_hasher::StableHasherResult>(&self,
|
||||
__ctx: &mut $crate::ich::StableHashingContext<'a, 'tcx, 'lcx>,
|
||||
__ctx: &mut $crate::ich::StableHashingContext<'tcx>,
|
||||
__hasher: &mut ::rustc_data_structures::stable_hasher::StableHasher<W>) {
|
||||
use $enum_name::*;
|
||||
::std::mem::discriminant(self).hash_stable(__ctx, __hasher);
|
||||
@ -92,10 +92,10 @@ macro_rules! impl_stable_hash_for {
|
||||
}
|
||||
};
|
||||
(struct $struct_name:path { $($field:ident),* }) => {
|
||||
impl<'a, 'tcx, 'lcx> ::rustc_data_structures::stable_hasher::HashStable<$crate::ich::StableHashingContext<'a, 'tcx, 'lcx>> for $struct_name {
|
||||
impl<'tcx> ::rustc_data_structures::stable_hasher::HashStable<$crate::ich::StableHashingContext<'tcx>> for $struct_name {
|
||||
#[inline]
|
||||
fn hash_stable<W: ::rustc_data_structures::stable_hasher::StableHasherResult>(&self,
|
||||
__ctx: &mut $crate::ich::StableHashingContext<'a, 'tcx, 'lcx>,
|
||||
__ctx: &mut $crate::ich::StableHashingContext<'tcx>,
|
||||
__hasher: &mut ::rustc_data_structures::stable_hasher::StableHasher<W>) {
|
||||
let $struct_name {
|
||||
$(ref $field),*
|
||||
@ -106,10 +106,10 @@ macro_rules! impl_stable_hash_for {
|
||||
}
|
||||
};
|
||||
(tuple_struct $struct_name:path { $($field:ident),* }) => {
|
||||
impl<'a, 'tcx, 'lcx> ::rustc_data_structures::stable_hasher::HashStable<$crate::ich::StableHashingContext<'a, 'tcx, 'lcx>> for $struct_name {
|
||||
impl<'tcx> ::rustc_data_structures::stable_hasher::HashStable<$crate::ich::StableHashingContext<'tcx>> for $struct_name {
|
||||
#[inline]
|
||||
fn hash_stable<W: ::rustc_data_structures::stable_hasher::StableHasherResult>(&self,
|
||||
__ctx: &mut $crate::ich::StableHashingContext<'a, 'tcx, 'lcx>,
|
||||
__ctx: &mut $crate::ich::StableHashingContext<'tcx>,
|
||||
__hasher: &mut ::rustc_data_structures::stable_hasher::StableHasher<W>) {
|
||||
let $struct_name (
|
||||
$(ref $field),*
|
||||
@ -125,11 +125,11 @@ macro_rules! impl_stable_hash_for {
|
||||
macro_rules! impl_stable_hash_for_spanned {
|
||||
($T:path) => (
|
||||
|
||||
impl<'a, 'tcx, 'lcx> HashStable<StableHashingContext<'a, 'tcx, 'lcx>> for ::syntax::codemap::Spanned<$T>
|
||||
impl<'tcx> HashStable<StableHashingContext<'tcx>> for ::syntax::codemap::Spanned<$T>
|
||||
{
|
||||
#[inline]
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'a, 'tcx, 'lcx>,
|
||||
hcx: &mut StableHashingContext<'tcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
self.node.hash_stable(hcx, hasher);
|
||||
self.span.hash_stable(hcx, hasher);
|
||||
|
@ -22,6 +22,7 @@
|
||||
//! are *mostly* used as a part of that interface, but these should
|
||||
//! probably get a better home if someone can find one.
|
||||
|
||||
use hir;
|
||||
use hir::def;
|
||||
use hir::def_id::{CrateNum, DefId, DefIndex, LOCAL_CRATE};
|
||||
use hir::map as hir_map;
|
||||
@ -34,6 +35,7 @@ use session::search_paths::PathKind;
|
||||
use util::nodemap::NodeSet;
|
||||
|
||||
use std::any::Any;
|
||||
use std::collections::BTreeMap;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::rc::Rc;
|
||||
use owning_ref::ErasedBoxRef;
|
||||
@ -132,7 +134,7 @@ pub struct NativeLibrary {
|
||||
pub kind: NativeLibraryKind,
|
||||
pub name: Symbol,
|
||||
pub cfg: Option<ast::MetaItem>,
|
||||
pub foreign_items: Vec<DefIndex>,
|
||||
pub foreign_items: Vec<DefId>,
|
||||
}
|
||||
|
||||
pub enum LoadedMacro {
|
||||
@ -218,6 +220,26 @@ pub trait MetadataLoader {
|
||||
-> Result<ErasedBoxRef<[u8]>, String>;
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct ExternConstBody<'tcx> {
|
||||
pub body: &'tcx hir::Body,
|
||||
|
||||
// It would require a lot of infrastructure to enable stable-hashing Bodies
|
||||
// from other crates, so we hash on export and just store the fingerprint
|
||||
// with them.
|
||||
pub fingerprint: ich::Fingerprint,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct ExternBodyNestedBodies {
|
||||
pub nested_bodies: Rc<BTreeMap<hir::BodyId, hir::Body>>,
|
||||
|
||||
// It would require a lot of infrastructure to enable stable-hashing Bodies
|
||||
// from other crates, so we hash on export and just store the fingerprint
|
||||
// with them.
|
||||
pub fingerprint: ich::Fingerprint,
|
||||
}
|
||||
|
||||
/// A store of Rust crates, through with their metadata
|
||||
/// can be accessed.
|
||||
///
|
||||
|
@ -19,6 +19,11 @@ pub enum SymbolExportLevel {
|
||||
Rust,
|
||||
}
|
||||
|
||||
impl_stable_hash_for!(enum self::SymbolExportLevel {
|
||||
C,
|
||||
Rust
|
||||
});
|
||||
|
||||
impl SymbolExportLevel {
|
||||
pub fn is_below_threshold(self, threshold: SymbolExportLevel) -> bool {
|
||||
if threshold == SymbolExportLevel::Rust {
|
||||
|
@ -192,8 +192,7 @@ pub fn extract(attrs: &[ast::Attribute]) -> Option<Symbol> {
|
||||
pub fn collect<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) -> LanguageItems {
|
||||
let mut collector = LanguageItemCollector::new(tcx);
|
||||
for &cnum in tcx.crates().iter() {
|
||||
for &(index, item_index) in tcx.defined_lang_items(cnum).iter() {
|
||||
let def_id = DefId { krate: cnum, index: index };
|
||||
for &(def_id, item_index) in tcx.defined_lang_items(cnum).iter() {
|
||||
collector.collect_item(item_index, def_id);
|
||||
}
|
||||
}
|
||||
|
@ -369,7 +369,13 @@ impl<'a, 'tcx: 'a> ItemLikeVisitor<'tcx> for CollectPrivateImplItemsVisitor<'a,
|
||||
}
|
||||
}
|
||||
|
||||
fn reachable_set<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, crate_num: CrateNum) -> Rc<NodeSet> {
|
||||
// We introduce a new-type here, so we can have a specialized HashStable
|
||||
// implementation for it.
|
||||
#[derive(Clone)]
|
||||
pub struct ReachableSet(pub Rc<NodeSet>);
|
||||
|
||||
|
||||
fn reachable_set<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, crate_num: CrateNum) -> ReachableSet {
|
||||
debug_assert!(crate_num == LOCAL_CRATE);
|
||||
|
||||
let access_levels = &tcx.privacy_access_levels(LOCAL_CRATE);
|
||||
@ -414,7 +420,7 @@ fn reachable_set<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, crate_num: CrateNum) ->
|
||||
reachable_context.propagate();
|
||||
|
||||
// Return the set of reachable symbols.
|
||||
Rc::new(reachable_context.reachable_symbols)
|
||||
ReachableSet(Rc::new(reachable_context.reachable_symbols))
|
||||
}
|
||||
|
||||
pub fn provide(providers: &mut Providers) {
|
||||
|
@ -14,6 +14,7 @@
|
||||
//! Most of the documentation on regions can be found in
|
||||
//! `middle/infer/region_inference/README.md`
|
||||
|
||||
use ich::{StableHashingContext, NodeIdHashingMode};
|
||||
use util::nodemap::{FxHashMap, FxHashSet};
|
||||
use ty;
|
||||
|
||||
@ -31,6 +32,8 @@ use hir::def_id::DefId;
|
||||
use hir::intravisit::{self, Visitor, NestedVisitorMap};
|
||||
use hir::{Block, Arm, Pat, PatKind, Stmt, Expr, Local};
|
||||
use mir::transform::MirSource;
|
||||
use rustc_data_structures::stable_hasher::{HashStable, StableHasher,
|
||||
StableHasherResult};
|
||||
|
||||
/// Scope represents a statically-describable scope that can be
|
||||
/// used to bound the lifetime/region for values.
|
||||
@ -1235,3 +1238,32 @@ pub fn provide(providers: &mut Providers) {
|
||||
..*providers
|
||||
};
|
||||
}
|
||||
|
||||
impl<'gcx> HashStable<StableHashingContext<'gcx>> for ScopeTree {
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'gcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
let ScopeTree {
|
||||
root_body,
|
||||
root_parent,
|
||||
ref parent_map,
|
||||
ref var_map,
|
||||
ref destruction_scopes,
|
||||
ref rvalue_scopes,
|
||||
ref closure_tree,
|
||||
ref yield_in_scope,
|
||||
} = *self;
|
||||
|
||||
hcx.with_node_id_hashing_mode(NodeIdHashingMode::HashDefPath, |hcx| {
|
||||
root_body.hash_stable(hcx, hasher);
|
||||
root_parent.hash_stable(hcx, hasher);
|
||||
});
|
||||
|
||||
parent_map.hash_stable(hcx, hasher);
|
||||
var_map.hash_stable(hcx, hasher);
|
||||
destruction_scopes.hash_stable(hcx, hasher);
|
||||
rvalue_scopes.hash_stable(hcx, hasher);
|
||||
closure_tree.hash_stable(hcx, hasher);
|
||||
yield_in_scope.hash_stable(hcx, hasher);
|
||||
}
|
||||
}
|
||||
|
@ -39,22 +39,24 @@ use hir::intravisit::{self, Visitor, NestedVisitorMap};
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable, Debug)]
|
||||
pub enum Region {
|
||||
Static,
|
||||
EarlyBound(/* index */ u32, /* lifetime decl */ ast::NodeId),
|
||||
LateBound(ty::DebruijnIndex, /* lifetime decl */ ast::NodeId),
|
||||
EarlyBound(/* index */ u32, /* lifetime decl */ DefId),
|
||||
LateBound(ty::DebruijnIndex, /* lifetime decl */ DefId),
|
||||
LateBoundAnon(ty::DebruijnIndex, /* anon index */ u32),
|
||||
Free(DefId, /* lifetime decl */ ast::NodeId),
|
||||
Free(DefId, /* lifetime decl */ DefId),
|
||||
}
|
||||
|
||||
impl Region {
|
||||
fn early(index: &mut u32, def: &hir::LifetimeDef) -> (ast::Name, Region) {
|
||||
fn early(hir_map: &Map, index: &mut u32, def: &hir::LifetimeDef) -> (ast::Name, Region) {
|
||||
let i = *index;
|
||||
*index += 1;
|
||||
(def.lifetime.name, Region::EarlyBound(i, def.lifetime.id))
|
||||
let def_id = hir_map.local_def_id(def.lifetime.id);
|
||||
(def.lifetime.name, Region::EarlyBound(i, def_id))
|
||||
}
|
||||
|
||||
fn late(def: &hir::LifetimeDef) -> (ast::Name, Region) {
|
||||
fn late(hir_map: &Map, def: &hir::LifetimeDef) -> (ast::Name, Region) {
|
||||
let depth = ty::DebruijnIndex::new(1);
|
||||
(def.lifetime.name, Region::LateBound(depth, def.lifetime.id))
|
||||
let def_id = hir_map.local_def_id(def.lifetime.id);
|
||||
(def.lifetime.name, Region::LateBound(depth, def_id))
|
||||
}
|
||||
|
||||
fn late_anon(index: &Cell<u32>) -> Region {
|
||||
@ -64,7 +66,7 @@ impl Region {
|
||||
Region::LateBoundAnon(depth, i)
|
||||
}
|
||||
|
||||
fn id(&self) -> Option<ast::NodeId> {
|
||||
fn id(&self) -> Option<DefId> {
|
||||
match *self {
|
||||
Region::Static |
|
||||
Region::LateBoundAnon(..) => None,
|
||||
@ -337,7 +339,7 @@ impl<'a, 'tcx> Visitor<'tcx> for LifetimeContext<'a, 'tcx> {
|
||||
0
|
||||
};
|
||||
let lifetimes = generics.lifetimes.iter().map(|def| {
|
||||
Region::early(&mut index, def)
|
||||
Region::early(self.hir_map, &mut index, def)
|
||||
}).collect();
|
||||
let scope = Scope::Binder {
|
||||
lifetimes,
|
||||
@ -368,7 +370,9 @@ impl<'a, 'tcx> Visitor<'tcx> for LifetimeContext<'a, 'tcx> {
|
||||
match ty.node {
|
||||
hir::TyBareFn(ref c) => {
|
||||
let scope = Scope::Binder {
|
||||
lifetimes: c.lifetimes.iter().map(Region::late).collect(),
|
||||
lifetimes: c.lifetimes.iter().map(|def| {
|
||||
Region::late(self.hir_map, def)
|
||||
}).collect(),
|
||||
s: self.scope
|
||||
};
|
||||
self.with(scope, |old_scope, this| {
|
||||
@ -467,7 +471,9 @@ impl<'a, 'tcx> Visitor<'tcx> for LifetimeContext<'a, 'tcx> {
|
||||
if !bound_lifetimes.is_empty() {
|
||||
self.trait_ref_hack = true;
|
||||
let scope = Scope::Binder {
|
||||
lifetimes: bound_lifetimes.iter().map(Region::late).collect(),
|
||||
lifetimes: bound_lifetimes.iter().map(|def| {
|
||||
Region::late(self.hir_map, def)
|
||||
}).collect(),
|
||||
s: self.scope
|
||||
};
|
||||
let result = self.with(scope, |old_scope, this| {
|
||||
@ -512,7 +518,9 @@ impl<'a, 'tcx> Visitor<'tcx> for LifetimeContext<'a, 'tcx> {
|
||||
"nested quantification of lifetimes");
|
||||
}
|
||||
let scope = Scope::Binder {
|
||||
lifetimes: trait_ref.bound_lifetimes.iter().map(Region::late).collect(),
|
||||
lifetimes: trait_ref.bound_lifetimes.iter().map(|def| {
|
||||
Region::late(self.hir_map, def)
|
||||
}).collect(),
|
||||
s: self.scope
|
||||
};
|
||||
self.with(scope, |old_scope, this| {
|
||||
@ -647,10 +655,13 @@ fn extract_labels(ctxt: &mut LifetimeContext, body: &hir::Body) {
|
||||
Scope::Binder { ref lifetimes, s } => {
|
||||
// FIXME (#24278): non-hygienic comparison
|
||||
if let Some(def) = lifetimes.get(&label) {
|
||||
let node_id = hir_map.as_local_node_id(def.id().unwrap())
|
||||
.unwrap();
|
||||
|
||||
signal_shadowing_problem(
|
||||
sess,
|
||||
label,
|
||||
original_lifetime(hir_map.span(def.id().unwrap())),
|
||||
original_lifetime(hir_map.span(node_id)),
|
||||
shadower_label(label_span));
|
||||
return;
|
||||
}
|
||||
@ -749,7 +760,8 @@ fn object_lifetime_defaults_for_item(hir_map: &Map, generics: &hir::Generics)
|
||||
generics.lifetimes.iter().enumerate().find(|&(_, def)| {
|
||||
def.lifetime.name == name
|
||||
}).map_or(Set1::Many, |(i, def)| {
|
||||
Set1::One(Region::EarlyBound(i as u32, def.lifetime.id))
|
||||
let def_id = hir_map.local_def_id(def.lifetime.id);
|
||||
Set1::One(Region::EarlyBound(i as u32, def_id))
|
||||
})
|
||||
}
|
||||
}
|
||||
@ -835,9 +847,9 @@ impl<'a, 'tcx> LifetimeContext<'a, 'tcx> {
|
||||
|
||||
let lifetimes = generics.lifetimes.iter().map(|def| {
|
||||
if self.map.late_bound.contains(&def.lifetime.id) {
|
||||
Region::late(def)
|
||||
Region::late(self.hir_map, def)
|
||||
} else {
|
||||
Region::early(&mut index, def)
|
||||
Region::early(self.hir_map, &mut index, def)
|
||||
}
|
||||
}).collect();
|
||||
|
||||
@ -1483,10 +1495,14 @@ impl<'a, 'tcx> LifetimeContext<'a, 'tcx> {
|
||||
|
||||
Scope::Binder { ref lifetimes, s } => {
|
||||
if let Some(&def) = lifetimes.get(&lifetime.name) {
|
||||
let node_id = self.hir_map
|
||||
.as_local_node_id(def.id().unwrap())
|
||||
.unwrap();
|
||||
|
||||
signal_shadowing_problem(
|
||||
self.sess,
|
||||
lifetime.name,
|
||||
original_lifetime(self.hir_map.span(def.id().unwrap())),
|
||||
original_lifetime(self.hir_map.span(node_id)),
|
||||
shadower_lifetime(&lifetime));
|
||||
return;
|
||||
}
|
||||
|
@ -65,6 +65,11 @@ pub struct DeprecationEntry {
|
||||
origin: Option<HirId>,
|
||||
}
|
||||
|
||||
impl_stable_hash_for!(struct self::DeprecationEntry {
|
||||
attr,
|
||||
origin
|
||||
});
|
||||
|
||||
impl DeprecationEntry {
|
||||
fn local(attr: Deprecation, id: HirId) -> DeprecationEntry {
|
||||
DeprecationEntry {
|
||||
@ -102,6 +107,13 @@ pub struct Index<'tcx> {
|
||||
active_features: FxHashSet<Symbol>,
|
||||
}
|
||||
|
||||
impl_stable_hash_for!(struct self::Index<'tcx> {
|
||||
stab_map,
|
||||
depr_map,
|
||||
staged_api,
|
||||
active_features
|
||||
});
|
||||
|
||||
// A private tree-walker for producing an Index.
|
||||
struct Annotator<'a, 'tcx: 'a> {
|
||||
tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
|
@ -12,6 +12,9 @@ use syntax::ast::NodeId;
|
||||
use syntax::symbol::InternedString;
|
||||
use ty::Instance;
|
||||
use util::nodemap::FxHashMap;
|
||||
use rustc_data_structures::stable_hasher::{HashStable, StableHasherResult,
|
||||
StableHasher};
|
||||
use ich::{Fingerprint, StableHashingContext, NodeIdHashingMode};
|
||||
|
||||
#[derive(PartialEq, Eq, Clone, Copy, Debug, Hash)]
|
||||
pub enum TransItem<'tcx> {
|
||||
@ -20,6 +23,26 @@ pub enum TransItem<'tcx> {
|
||||
GlobalAsm(NodeId),
|
||||
}
|
||||
|
||||
impl<'tcx> HashStable<StableHashingContext<'tcx>> for TransItem<'tcx> {
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'tcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
::std::mem::discriminant(self).hash_stable(hcx, hasher);
|
||||
|
||||
match *self {
|
||||
TransItem::Fn(ref instance) => {
|
||||
instance.hash_stable(hcx, hasher);
|
||||
}
|
||||
TransItem::Static(node_id) |
|
||||
TransItem::GlobalAsm(node_id) => {
|
||||
hcx.with_node_id_hashing_mode(NodeIdHashingMode::HashDefPath, |hcx| {
|
||||
node_id.hash_stable(hcx, hasher);
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct CodegenUnit<'tcx> {
|
||||
/// A name for this CGU. Incremental compilation requires that
|
||||
/// name be unique amongst **all** crates. Therefore, it should
|
||||
@ -44,6 +67,20 @@ pub enum Linkage {
|
||||
Common,
|
||||
}
|
||||
|
||||
impl_stable_hash_for!(enum self::Linkage {
|
||||
External,
|
||||
AvailableExternally,
|
||||
LinkOnceAny,
|
||||
LinkOnceODR,
|
||||
WeakAny,
|
||||
WeakODR,
|
||||
Appending,
|
||||
Internal,
|
||||
Private,
|
||||
ExternalWeak,
|
||||
Common
|
||||
});
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
|
||||
pub enum Visibility {
|
||||
Default,
|
||||
@ -51,6 +88,12 @@ pub enum Visibility {
|
||||
Protected,
|
||||
}
|
||||
|
||||
impl_stable_hash_for!(enum self::Visibility {
|
||||
Default,
|
||||
Hidden,
|
||||
Protected
|
||||
});
|
||||
|
||||
impl<'tcx> CodegenUnit<'tcx> {
|
||||
pub fn new(name: InternedString) -> CodegenUnit<'tcx> {
|
||||
CodegenUnit {
|
||||
@ -78,6 +121,29 @@ impl<'tcx> CodegenUnit<'tcx> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'tcx> HashStable<StableHashingContext<'tcx>> for CodegenUnit<'tcx> {
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'tcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
let CodegenUnit {
|
||||
ref items,
|
||||
name,
|
||||
} = *self;
|
||||
|
||||
name.hash_stable(hcx, hasher);
|
||||
|
||||
let mut items: Vec<(Fingerprint, _)> = items.iter().map(|(trans_item, &attrs)| {
|
||||
let mut hasher = StableHasher::new();
|
||||
trans_item.hash_stable(hcx, &mut hasher);
|
||||
let trans_item_fingerprint = hasher.finish();
|
||||
(trans_item_fingerprint, attrs)
|
||||
}).collect();
|
||||
|
||||
items.sort_unstable_by_key(|i| i.0);
|
||||
items.hash_stable(hcx, hasher);
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Default)]
|
||||
pub struct Stats {
|
||||
pub n_glues_created: usize,
|
||||
@ -92,6 +158,18 @@ pub struct Stats {
|
||||
pub fn_stats: Vec<(String, usize)>,
|
||||
}
|
||||
|
||||
impl_stable_hash_for!(struct self::Stats {
|
||||
n_glues_created,
|
||||
n_null_glues,
|
||||
n_real_glues,
|
||||
n_fns,
|
||||
n_inlines,
|
||||
n_closures,
|
||||
n_llvm_insns,
|
||||
llvm_insns,
|
||||
fn_stats
|
||||
});
|
||||
|
||||
impl Stats {
|
||||
pub fn extend(&mut self, stats: Stats) {
|
||||
self.n_glues_created += stats.n_glues_created;
|
||||
@ -108,3 +186,4 @@ impl Stats {
|
||||
self.fn_stats.extend(stats.fn_stats);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -35,9 +35,9 @@ impl serialize::Decodable for Cache {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for Cache {
|
||||
impl<'gcx> HashStable<StableHashingContext<'gcx>> for Cache {
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
_: &mut StableHashingContext<'a, 'gcx, 'tcx>,
|
||||
_: &mut StableHashingContext<'gcx>,
|
||||
_: &mut StableHasher<W>) {
|
||||
// do nothing
|
||||
}
|
||||
|
@ -19,8 +19,10 @@ pub use self::DebugInfoLevel::*;
|
||||
use session::{early_error, early_warn, Session};
|
||||
use session::search_paths::SearchPaths;
|
||||
|
||||
use ich::StableHashingContext;
|
||||
use rustc_back::{LinkerFlavor, PanicStrategy, RelroLevel};
|
||||
use rustc_back::target::Target;
|
||||
use rustc_data_structures::stable_hasher::ToStableHashKey;
|
||||
use lint;
|
||||
use middle::cstore;
|
||||
|
||||
@ -90,6 +92,25 @@ pub enum OutputType {
|
||||
DepInfo,
|
||||
}
|
||||
|
||||
impl_stable_hash_for!(enum self::OutputType {
|
||||
Bitcode,
|
||||
Assembly,
|
||||
LlvmAssembly,
|
||||
Mir,
|
||||
Metadata,
|
||||
Object,
|
||||
Exe,
|
||||
DepInfo
|
||||
});
|
||||
|
||||
impl<'tcx> ToStableHashKey<StableHashingContext<'tcx>> for OutputType {
|
||||
type KeyType = OutputType;
|
||||
#[inline]
|
||||
fn to_stable_hash_key(&self, _: &StableHashingContext<'tcx>) -> Self::KeyType {
|
||||
*self
|
||||
}
|
||||
}
|
||||
|
||||
impl OutputType {
|
||||
fn is_compatible_with_codegen_units_and_single_output_file(&self) -> bool {
|
||||
match *self {
|
||||
@ -149,6 +170,10 @@ impl Default for ErrorOutputType {
|
||||
#[derive(Clone, Hash)]
|
||||
pub struct OutputTypes(BTreeMap<OutputType, Option<PathBuf>>);
|
||||
|
||||
impl_stable_hash_for!(tuple_struct self::OutputTypes {
|
||||
map
|
||||
});
|
||||
|
||||
impl OutputTypes {
|
||||
pub fn new(entries: &[(OutputType, Option<PathBuf>)]) -> OutputTypes {
|
||||
OutputTypes(BTreeMap::from_iter(entries.iter()
|
||||
@ -373,6 +398,14 @@ pub struct OutputFilenames {
|
||||
pub outputs: OutputTypes,
|
||||
}
|
||||
|
||||
impl_stable_hash_for!(struct self::OutputFilenames {
|
||||
out_directory,
|
||||
out_filestem,
|
||||
single_output_file,
|
||||
extra,
|
||||
outputs
|
||||
});
|
||||
|
||||
/// Codegen unit names generated by the numbered naming scheme will contain this
|
||||
/// marker right before the index of the codegen unit.
|
||||
pub const NUMBERED_CODEGEN_UNIT_MARKER: &'static str = ".cgu-";
|
||||
@ -905,7 +938,7 @@ options! {CodegenOptions, CodegenSetter, basic_codegen_options,
|
||||
debug_assertions: Option<bool> = (None, parse_opt_bool, [TRACKED],
|
||||
"explicitly enable the cfg(debug_assertions) directive"),
|
||||
inline_threshold: Option<usize> = (None, parse_opt_uint, [TRACKED],
|
||||
"set the inlining threshold for"),
|
||||
"set the threshold for inlining a function (default: 225)"),
|
||||
panic: Option<PanicStrategy> = (None, parse_panic_strategy,
|
||||
[TRACKED], "panic strategy to compile crate with"),
|
||||
}
|
||||
@ -1052,9 +1085,9 @@ options! {DebuggingOptions, DebuggingSetter, basic_debugging_options,
|
||||
linker_flavor: Option<LinkerFlavor> = (None, parse_linker_flavor, [UNTRACKED],
|
||||
"Linker flavor"),
|
||||
fuel: Option<(String, u64)> = (None, parse_optimization_fuel, [TRACKED],
|
||||
"Set the optimization fuel quota for a crate."),
|
||||
"set the optimization fuel quota for a crate"),
|
||||
print_fuel: Option<String> = (None, parse_opt_string, [TRACKED],
|
||||
"Make Rustc print the total optimization fuel used by a crate."),
|
||||
"make Rustc print the total optimization fuel used by a crate"),
|
||||
remap_path_prefix_from: Vec<String> = (vec![], parse_string_push, [TRACKED],
|
||||
"add a source pattern to the file path remapping config"),
|
||||
remap_path_prefix_to: Vec<String> = (vec![], parse_string_push, [TRACKED],
|
||||
|
@ -11,6 +11,9 @@
|
||||
use super::OverlapError;
|
||||
|
||||
use hir::def_id::DefId;
|
||||
use ich::{self, StableHashingContext};
|
||||
use rustc_data_structures::stable_hasher::{HashStable, StableHasher,
|
||||
StableHasherResult};
|
||||
use traits;
|
||||
use ty::{self, TyCtxt, TypeFoldable};
|
||||
use ty::fast_reject::{self, SimplifiedType};
|
||||
@ -365,3 +368,21 @@ pub fn ancestors(tcx: TyCtxt,
|
||||
current_source: Some(Node::Impl(start_from_impl)),
|
||||
}
|
||||
}
|
||||
|
||||
impl<'gcx> HashStable<StableHashingContext<'gcx>> for Children {
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'gcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
let Children {
|
||||
ref nonblanket_impls,
|
||||
ref blanket_impls,
|
||||
} = *self;
|
||||
|
||||
ich::hash_stable_trait_impls(hcx, hasher, blanket_impls, nonblanket_impls);
|
||||
}
|
||||
}
|
||||
|
||||
impl_stable_hash_for!(struct self::Graph {
|
||||
parent,
|
||||
children
|
||||
});
|
||||
|
@ -21,7 +21,7 @@ use hir::def_id::{CrateNum, DefId, DefIndex, LOCAL_CRATE};
|
||||
use hir::map as hir_map;
|
||||
use hir::map::DefPathHash;
|
||||
use lint::{self, Lint};
|
||||
use ich::{self, StableHashingContext, NodeIdHashingMode};
|
||||
use ich::{StableHashingContext, NodeIdHashingMode};
|
||||
use middle::const_val::ConstVal;
|
||||
use middle::cstore::{CrateStore, LinkMeta, EncodedMetadataHashes};
|
||||
use middle::cstore::EncodedMetadata;
|
||||
@ -49,8 +49,8 @@ use ty::BindingMode;
|
||||
use util::nodemap::{NodeMap, NodeSet, DefIdSet, ItemLocalMap};
|
||||
use util::nodemap::{FxHashMap, FxHashSet};
|
||||
use rustc_data_structures::accumulate_vec::AccumulateVec;
|
||||
use rustc_data_structures::stable_hasher::{HashStable, StableHasher,
|
||||
StableHasherResult};
|
||||
use rustc_data_structures::stable_hasher::{HashStable, hash_stable_hashmap,
|
||||
StableHasher, StableHasherResult};
|
||||
|
||||
use arena::{TypedArena, DroplessArena};
|
||||
use rustc_const_math::{ConstInt, ConstUsize};
|
||||
@ -687,9 +687,9 @@ impl<'tcx> TypeckTables<'tcx> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for TypeckTables<'gcx> {
|
||||
impl<'gcx> HashStable<StableHashingContext<'gcx>> for TypeckTables<'gcx> {
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
|
||||
hcx: &mut StableHashingContext<'gcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
let ty::TypeckTables {
|
||||
local_id_root,
|
||||
@ -714,12 +714,12 @@ impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for Typeck
|
||||
} = *self;
|
||||
|
||||
hcx.with_node_id_hashing_mode(NodeIdHashingMode::HashDefPath, |hcx| {
|
||||
ich::hash_stable_itemlocalmap(hcx, hasher, type_dependent_defs);
|
||||
ich::hash_stable_itemlocalmap(hcx, hasher, node_types);
|
||||
ich::hash_stable_itemlocalmap(hcx, hasher, node_substs);
|
||||
ich::hash_stable_itemlocalmap(hcx, hasher, adjustments);
|
||||
ich::hash_stable_itemlocalmap(hcx, hasher, pat_binding_modes);
|
||||
ich::hash_stable_hashmap(hcx, hasher, upvar_capture_map, |hcx, up_var_id| {
|
||||
type_dependent_defs.hash_stable(hcx, hasher);
|
||||
node_types.hash_stable(hcx, hasher);
|
||||
node_substs.hash_stable(hcx, hasher);
|
||||
adjustments.hash_stable(hcx, hasher);
|
||||
pat_binding_modes.hash_stable(hcx, hasher);
|
||||
hash_stable_hashmap(hcx, hasher, upvar_capture_map, |up_var_id, hcx| {
|
||||
let ty::UpvarId {
|
||||
var_id,
|
||||
closure_expr_id
|
||||
@ -736,22 +736,19 @@ impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for Typeck
|
||||
krate: local_id_root.krate,
|
||||
index: closure_expr_id,
|
||||
};
|
||||
((hcx.def_path_hash(var_owner_def_id), var_id.local_id),
|
||||
(hcx.def_path_hash(var_owner_def_id),
|
||||
var_id.local_id,
|
||||
hcx.def_path_hash(closure_def_id))
|
||||
});
|
||||
|
||||
ich::hash_stable_itemlocalmap(hcx, hasher, closure_tys);
|
||||
ich::hash_stable_itemlocalmap(hcx, hasher, closure_kinds);
|
||||
ich::hash_stable_itemlocalmap(hcx, hasher, liberated_fn_sigs);
|
||||
ich::hash_stable_itemlocalmap(hcx, hasher, fru_field_types);
|
||||
ich::hash_stable_itemlocalmap(hcx, hasher, cast_kinds);
|
||||
ich::hash_stable_itemlocalmap(hcx, hasher, generator_sigs);
|
||||
ich::hash_stable_itemlocalmap(hcx, hasher, generator_interiors);
|
||||
|
||||
ich::hash_stable_hashset(hcx, hasher, used_trait_imports, |hcx, def_id| {
|
||||
hcx.def_path_hash(*def_id)
|
||||
});
|
||||
|
||||
closure_tys.hash_stable(hcx, hasher);
|
||||
closure_kinds.hash_stable(hcx, hasher);
|
||||
liberated_fn_sigs.hash_stable(hcx, hasher);
|
||||
fru_field_types.hash_stable(hcx, hasher);
|
||||
cast_kinds.hash_stable(hcx, hasher);
|
||||
generator_sigs.hash_stable(hcx, hasher);
|
||||
generator_interiors.hash_stable(hcx, hasher);
|
||||
used_trait_imports.hash_stable(hcx, hasher);
|
||||
tainted_by_errors.hash_stable(hcx, hasher);
|
||||
free_region_map.hash_stable(hcx, hasher);
|
||||
})
|
||||
@ -1083,6 +1080,9 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
|
||||
None
|
||||
};
|
||||
|
||||
// FIXME(mw): Each of the Vecs in the trait_map should be brought into
|
||||
// a deterministic order here. Otherwise we might end up with
|
||||
// unnecessarily unstable incr. comp. hashes.
|
||||
let mut trait_map = FxHashMap();
|
||||
for (k, v) in resolutions.trait_map {
|
||||
let hir_id = hir.node_to_hir_id(k);
|
||||
@ -1171,17 +1171,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
|
||||
}
|
||||
|
||||
pub fn lang_items(self) -> Rc<middle::lang_items::LanguageItems> {
|
||||
// FIXME(#42293) Right now we insert a `with_ignore` node in the dep
|
||||
// graph here to ignore the fact that `get_lang_items` below depends on
|
||||
// the entire crate. For now this'll prevent false positives of
|
||||
// recompiling too much when anything changes.
|
||||
//
|
||||
// Once red/green incremental compilation lands we should be able to
|
||||
// remove this because while the crate changes often the lint level map
|
||||
// will change rarely.
|
||||
self.dep_graph.with_ignore(|| {
|
||||
self.get_lang_items(LOCAL_CRATE)
|
||||
})
|
||||
self.get_lang_items(LOCAL_CRATE)
|
||||
}
|
||||
|
||||
pub fn stability(self) -> Rc<stability::Index<'tcx>> {
|
||||
@ -1235,6 +1225,15 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
|
||||
pub fn crate_data_as_rc_any(self, cnum: CrateNum) -> Rc<Any> {
|
||||
self.cstore.crate_data_as_rc_any(cnum)
|
||||
}
|
||||
|
||||
pub fn create_stable_hashing_context(self) -> StableHashingContext<'gcx> {
|
||||
let krate = self.dep_graph.with_ignore(|| self.gcx.hir.krate());
|
||||
|
||||
StableHashingContext::new(self.sess,
|
||||
krate,
|
||||
self.hir.definitions(),
|
||||
self.cstore)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'tcx> TyCtxt<'a, 'tcx, 'tcx> {
|
||||
@ -2198,7 +2197,15 @@ pub fn provide(providers: &mut ty::maps::Providers) {
|
||||
};
|
||||
providers.get_lang_items = |tcx, id| {
|
||||
assert_eq!(id, LOCAL_CRATE);
|
||||
Rc::new(middle::lang_items::collect(tcx))
|
||||
// FIXME(#42293) Right now we insert a `with_ignore` node in the dep
|
||||
// graph here to ignore the fact that `get_lang_items` below depends on
|
||||
// the entire crate. For now this'll prevent false positives of
|
||||
// recompiling too much when anything changes.
|
||||
//
|
||||
// Once red/green incremental compilation lands we should be able to
|
||||
// remove this because while the crate changes often the lint level map
|
||||
// will change rarely.
|
||||
tcx.dep_graph.with_ignore(|| Rc::new(middle::lang_items::collect(tcx)))
|
||||
};
|
||||
providers.freevars = |tcx, id| tcx.gcx.freevars.get(&id).cloned();
|
||||
providers.maybe_unused_trait_import = |tcx, id| {
|
||||
|
@ -9,29 +9,44 @@
|
||||
// except according to those terms.
|
||||
|
||||
use hir::def_id::DefId;
|
||||
use ty::{self, Ty, TyCtxt};
|
||||
use ich::StableHashingContext;
|
||||
use rustc_data_structures::stable_hasher::{StableHasher, StableHasherResult,
|
||||
HashStable};
|
||||
use std::fmt::Debug;
|
||||
use std::hash::Hash;
|
||||
use std::mem;
|
||||
use syntax::ast;
|
||||
use ty::{self, Ty, TyCtxt};
|
||||
|
||||
use self::SimplifiedType::*;
|
||||
use self::SimplifiedTypeGen::*;
|
||||
|
||||
/// See `simplify_type
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
|
||||
pub enum SimplifiedType {
|
||||
pub type SimplifiedType = SimplifiedTypeGen<DefId>;
|
||||
|
||||
/// See `simplify_type`
|
||||
///
|
||||
/// Note that we keep this type generic over the type of identifier it uses
|
||||
/// because we sometimes need to use SimplifiedTypeGen values as stable sorting
|
||||
/// keys (in which case we use a DefPathHash as id-type) but in the general case
|
||||
/// the non-stable but fast to construct DefId-version is the better choice.
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
|
||||
pub enum SimplifiedTypeGen<D>
|
||||
where D: Copy + Debug + Ord + Eq + Hash
|
||||
{
|
||||
BoolSimplifiedType,
|
||||
CharSimplifiedType,
|
||||
IntSimplifiedType(ast::IntTy),
|
||||
UintSimplifiedType(ast::UintTy),
|
||||
FloatSimplifiedType(ast::FloatTy),
|
||||
AdtSimplifiedType(DefId),
|
||||
AdtSimplifiedType(D),
|
||||
StrSimplifiedType,
|
||||
ArraySimplifiedType,
|
||||
PtrSimplifiedType,
|
||||
NeverSimplifiedType,
|
||||
TupleSimplifiedType(usize),
|
||||
TraitSimplifiedType(DefId),
|
||||
ClosureSimplifiedType(DefId),
|
||||
GeneratorSimplifiedType(DefId),
|
||||
AnonSimplifiedType(DefId),
|
||||
TraitSimplifiedType(D),
|
||||
ClosureSimplifiedType(D),
|
||||
GeneratorSimplifiedType(D),
|
||||
AnonSimplifiedType(D),
|
||||
FunctionSimplifiedType(usize),
|
||||
ParameterSimplifiedType,
|
||||
}
|
||||
@ -101,3 +116,62 @@ pub fn simplify_type<'a, 'gcx, 'tcx>(tcx: TyCtxt<'a, 'gcx, 'tcx>,
|
||||
ty::TyInfer(_) | ty::TyError => None,
|
||||
}
|
||||
}
|
||||
|
||||
impl<D: Copy + Debug + Ord + Eq + Hash> SimplifiedTypeGen<D> {
|
||||
pub fn map_def<U, F>(self, map: F) -> SimplifiedTypeGen<U>
|
||||
where F: Fn(D) -> U,
|
||||
U: Copy + Debug + Ord + Eq + Hash,
|
||||
{
|
||||
match self {
|
||||
BoolSimplifiedType => BoolSimplifiedType,
|
||||
CharSimplifiedType => CharSimplifiedType,
|
||||
IntSimplifiedType(t) => IntSimplifiedType(t),
|
||||
UintSimplifiedType(t) => UintSimplifiedType(t),
|
||||
FloatSimplifiedType(t) => FloatSimplifiedType(t),
|
||||
AdtSimplifiedType(d) => AdtSimplifiedType(map(d)),
|
||||
StrSimplifiedType => StrSimplifiedType,
|
||||
ArraySimplifiedType => ArraySimplifiedType,
|
||||
PtrSimplifiedType => PtrSimplifiedType,
|
||||
NeverSimplifiedType => NeverSimplifiedType,
|
||||
TupleSimplifiedType(n) => TupleSimplifiedType(n),
|
||||
TraitSimplifiedType(d) => TraitSimplifiedType(map(d)),
|
||||
ClosureSimplifiedType(d) => ClosureSimplifiedType(map(d)),
|
||||
GeneratorSimplifiedType(d) => GeneratorSimplifiedType(map(d)),
|
||||
AnonSimplifiedType(d) => AnonSimplifiedType(map(d)),
|
||||
FunctionSimplifiedType(n) => FunctionSimplifiedType(n),
|
||||
ParameterSimplifiedType => ParameterSimplifiedType,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'gcx, D> HashStable<StableHashingContext<'gcx>> for SimplifiedTypeGen<D>
|
||||
where D: Copy + Debug + Ord + Eq + Hash +
|
||||
HashStable<StableHashingContext<'gcx>>,
|
||||
{
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'gcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
mem::discriminant(self).hash_stable(hcx, hasher);
|
||||
match *self {
|
||||
BoolSimplifiedType |
|
||||
CharSimplifiedType |
|
||||
StrSimplifiedType |
|
||||
ArraySimplifiedType |
|
||||
PtrSimplifiedType |
|
||||
NeverSimplifiedType |
|
||||
ParameterSimplifiedType => {
|
||||
// nothing to do
|
||||
}
|
||||
IntSimplifiedType(t) => t.hash_stable(hcx, hasher),
|
||||
UintSimplifiedType(t) => t.hash_stable(hcx, hasher),
|
||||
FloatSimplifiedType(t) => t.hash_stable(hcx, hasher),
|
||||
AdtSimplifiedType(d) => d.hash_stable(hcx, hasher),
|
||||
TupleSimplifiedType(n) => n.hash_stable(hcx, hasher),
|
||||
TraitSimplifiedType(d) => d.hash_stable(hcx, hasher),
|
||||
ClosureSimplifiedType(d) => d.hash_stable(hcx, hasher),
|
||||
GeneratorSimplifiedType(d) => d.hash_stable(hcx, hasher),
|
||||
AnonSimplifiedType(d) => d.hash_stable(hcx, hasher),
|
||||
FunctionSimplifiedType(n) => n.hash_stable(hcx, hasher),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -23,8 +23,13 @@ use std::cmp;
|
||||
use std::fmt;
|
||||
use std::i64;
|
||||
use std::iter;
|
||||
use std::mem;
|
||||
use std::ops::Deref;
|
||||
|
||||
use ich::StableHashingContext;
|
||||
use rustc_data_structures::stable_hasher::{HashStable, StableHasher,
|
||||
StableHasherResult};
|
||||
|
||||
/// Parsed [Data layout](http://llvm.org/docs/LangRef.html#data-layout)
|
||||
/// for a target, which contains everything needed to compute layouts.
|
||||
pub struct TargetDataLayout {
|
||||
@ -2300,3 +2305,128 @@ impl<'a, 'tcx> TyLayout<'tcx> {
|
||||
cx.layout_of(cx.normalize_projections(self.field_type(cx, i)))
|
||||
}
|
||||
}
|
||||
|
||||
impl<'gcx> HashStable<StableHashingContext<'gcx>> for Layout
|
||||
{
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'gcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
use ty::layout::Layout::*;
|
||||
mem::discriminant(self).hash_stable(hcx, hasher);
|
||||
|
||||
match *self {
|
||||
Scalar { value, non_zero } => {
|
||||
value.hash_stable(hcx, hasher);
|
||||
non_zero.hash_stable(hcx, hasher);
|
||||
}
|
||||
Vector { element, count } => {
|
||||
element.hash_stable(hcx, hasher);
|
||||
count.hash_stable(hcx, hasher);
|
||||
}
|
||||
Array { sized, align, primitive_align, element_size, count } => {
|
||||
sized.hash_stable(hcx, hasher);
|
||||
align.hash_stable(hcx, hasher);
|
||||
primitive_align.hash_stable(hcx, hasher);
|
||||
element_size.hash_stable(hcx, hasher);
|
||||
count.hash_stable(hcx, hasher);
|
||||
}
|
||||
FatPointer { ref metadata, non_zero } => {
|
||||
metadata.hash_stable(hcx, hasher);
|
||||
non_zero.hash_stable(hcx, hasher);
|
||||
}
|
||||
CEnum { discr, signed, non_zero, min, max } => {
|
||||
discr.hash_stable(hcx, hasher);
|
||||
signed.hash_stable(hcx, hasher);
|
||||
non_zero.hash_stable(hcx, hasher);
|
||||
min.hash_stable(hcx, hasher);
|
||||
max.hash_stable(hcx, hasher);
|
||||
}
|
||||
Univariant { ref variant, non_zero } => {
|
||||
variant.hash_stable(hcx, hasher);
|
||||
non_zero.hash_stable(hcx, hasher);
|
||||
}
|
||||
UntaggedUnion { ref variants } => {
|
||||
variants.hash_stable(hcx, hasher);
|
||||
}
|
||||
General { discr, ref variants, size, align, primitive_align } => {
|
||||
discr.hash_stable(hcx, hasher);
|
||||
variants.hash_stable(hcx, hasher);
|
||||
size.hash_stable(hcx, hasher);
|
||||
align.hash_stable(hcx, hasher);
|
||||
primitive_align.hash_stable(hcx, hasher);
|
||||
}
|
||||
RawNullablePointer { nndiscr, ref value } => {
|
||||
nndiscr.hash_stable(hcx, hasher);
|
||||
value.hash_stable(hcx, hasher);
|
||||
}
|
||||
StructWrappedNullablePointer {
|
||||
nndiscr,
|
||||
ref nonnull,
|
||||
ref discrfield,
|
||||
ref discrfield_source
|
||||
} => {
|
||||
nndiscr.hash_stable(hcx, hasher);
|
||||
nonnull.hash_stable(hcx, hasher);
|
||||
discrfield.hash_stable(hcx, hasher);
|
||||
discrfield_source.hash_stable(hcx, hasher);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl_stable_hash_for!(enum ::ty::layout::Integer {
|
||||
I1,
|
||||
I8,
|
||||
I16,
|
||||
I32,
|
||||
I64,
|
||||
I128
|
||||
});
|
||||
|
||||
impl_stable_hash_for!(enum ::ty::layout::Primitive {
|
||||
Int(integer),
|
||||
F32,
|
||||
F64,
|
||||
Pointer
|
||||
});
|
||||
|
||||
impl_stable_hash_for!(struct ::ty::layout::Align {
|
||||
abi,
|
||||
pref
|
||||
});
|
||||
|
||||
impl_stable_hash_for!(struct ::ty::layout::Size {
|
||||
raw
|
||||
});
|
||||
|
||||
impl<'gcx> HashStable<StableHashingContext<'gcx>> for LayoutError<'gcx>
|
||||
{
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'gcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
use ty::layout::LayoutError::*;
|
||||
mem::discriminant(self).hash_stable(hcx, hasher);
|
||||
|
||||
match *self {
|
||||
Unknown(t) |
|
||||
SizeOverflow(t) => t.hash_stable(hcx, hasher)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl_stable_hash_for!(struct ::ty::layout::Struct {
|
||||
align,
|
||||
primitive_align,
|
||||
packed,
|
||||
sized,
|
||||
offsets,
|
||||
memory_index,
|
||||
min_size
|
||||
});
|
||||
|
||||
impl_stable_hash_for!(struct ::ty::layout::Union {
|
||||
align,
|
||||
primitive_align,
|
||||
min_size,
|
||||
packed
|
||||
});
|
||||
|
@ -16,9 +16,11 @@ use hir::{self, TraitCandidate, ItemLocalId};
|
||||
use hir::svh::Svh;
|
||||
use lint;
|
||||
use middle::const_val;
|
||||
use middle::cstore::{ExternCrate, LinkagePreference, NativeLibrary};
|
||||
use middle::cstore::{NativeLibraryKind, DepKind, CrateSource};
|
||||
use middle::cstore::{ExternCrate, LinkagePreference, NativeLibrary,
|
||||
ExternBodyNestedBodies};
|
||||
use middle::cstore::{NativeLibraryKind, DepKind, CrateSource, ExternConstBody};
|
||||
use middle::privacy::AccessLevels;
|
||||
use middle::reachable::ReachableSet;
|
||||
use middle::region;
|
||||
use middle::resolve_lifetime::{Region, ObjectLifetimeDefault};
|
||||
use middle::stability::{self, DeprecationEntry};
|
||||
@ -36,7 +38,7 @@ use ty::item_path;
|
||||
use ty::steal::Steal;
|
||||
use ty::subst::Substs;
|
||||
use ty::fast_reject::SimplifiedType;
|
||||
use util::nodemap::{DefIdSet, NodeSet, DefIdMap};
|
||||
use util::nodemap::{DefIdSet, DefIdMap};
|
||||
use util::common::{profq_msg, ProfileQueriesMsg};
|
||||
|
||||
use rustc_data_structures::indexed_set::IdxSetBuf;
|
||||
@ -44,11 +46,11 @@ use rustc_back::PanicStrategy;
|
||||
use rustc_data_structures::indexed_vec::IndexVec;
|
||||
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
|
||||
use std::cell::{RefCell, RefMut, Cell};
|
||||
|
||||
use std::fmt::Debug;
|
||||
use std::hash::Hash;
|
||||
use std::marker::PhantomData;
|
||||
use std::mem;
|
||||
use std::collections::BTreeMap;
|
||||
use std::ops::Deref;
|
||||
use std::rc::Rc;
|
||||
use std::sync::Arc;
|
||||
@ -925,9 +927,8 @@ macro_rules! define_maps {
|
||||
span = key.default_span(tcx)
|
||||
}
|
||||
|
||||
let dep_node = Self::to_dep_node(tcx, &key);
|
||||
let res = tcx.cycle_check(span, Query::$name(key), || {
|
||||
let dep_node = Self::to_dep_node(tcx, &key);
|
||||
|
||||
tcx.sess.diagnostic().track_diagnostics(|| {
|
||||
if dep_node.kind.is_anon() {
|
||||
tcx.dep_graph.with_anon_task(dep_node.kind, || {
|
||||
@ -1300,7 +1301,7 @@ define_maps! { <'tcx>
|
||||
/// Performs the privacy check and computes "access levels".
|
||||
[] fn privacy_access_levels: PrivacyAccessLevels(CrateNum) -> Rc<AccessLevels>,
|
||||
|
||||
[] fn reachable_set: reachability_dep_node(CrateNum) -> Rc<NodeSet>,
|
||||
[] fn reachable_set: reachability_dep_node(CrateNum) -> ReachableSet,
|
||||
|
||||
/// Per-body `region::ScopeTree`. The `DefId` should be the owner-def-id for the body;
|
||||
/// in the case of closures, this will be redirected to the enclosing function.
|
||||
@ -1320,8 +1321,7 @@ define_maps! { <'tcx>
|
||||
[] fn impl_parent: ImplParent(DefId) -> Option<DefId>,
|
||||
[] fn trait_of_item: TraitOfItem(DefId) -> Option<DefId>,
|
||||
[] fn is_exported_symbol: IsExportedSymbol(DefId) -> bool,
|
||||
[] fn item_body_nested_bodies: ItemBodyNestedBodies(DefId)
|
||||
-> Rc<BTreeMap<hir::BodyId, hir::Body>>,
|
||||
[] fn item_body_nested_bodies: ItemBodyNestedBodies(DefId) -> ExternBodyNestedBodies,
|
||||
[] fn const_is_rvalue_promotable_to_static: ConstIsRvaluePromotableToStatic(DefId) -> bool,
|
||||
[] fn is_mir_available: IsMirAvailable(DefId) -> bool,
|
||||
|
||||
@ -1399,9 +1399,9 @@ define_maps! { <'tcx>
|
||||
[] fn extern_mod_stmt_cnum: ExternModStmtCnum(DefId) -> Option<CrateNum>,
|
||||
|
||||
[] fn get_lang_items: get_lang_items_node(CrateNum) -> Rc<LanguageItems>,
|
||||
[] fn defined_lang_items: DefinedLangItems(CrateNum) -> Rc<Vec<(DefIndex, usize)>>,
|
||||
[] fn defined_lang_items: DefinedLangItems(CrateNum) -> Rc<Vec<(DefId, usize)>>,
|
||||
[] fn missing_lang_items: MissingLangItems(CrateNum) -> Rc<Vec<LangItem>>,
|
||||
[] fn extern_const_body: ExternConstBody(DefId) -> &'tcx hir::Body,
|
||||
[] fn extern_const_body: ExternConstBody(DefId) -> ExternConstBody<'tcx>,
|
||||
[] fn visible_parent_map: visible_parent_map_node(CrateNum)
|
||||
-> Rc<DefIdMap<DefId>>,
|
||||
[] fn missing_extern_crate_item: MissingExternCrateItem(CrateNum) -> bool,
|
||||
@ -1417,7 +1417,7 @@ define_maps! { <'tcx>
|
||||
[] fn all_crate_nums: all_crate_nums_node(CrateNum) -> Rc<Vec<CrateNum>>,
|
||||
|
||||
[] fn exported_symbols: ExportedSymbols(CrateNum)
|
||||
-> Arc<Vec<(String, DefId, SymbolExportLevel)>>,
|
||||
-> Arc<Vec<(String, Option<DefId>, SymbolExportLevel)>>,
|
||||
[] fn collect_and_partition_translation_items:
|
||||
collect_and_partition_translation_items_node(CrateNum)
|
||||
-> (Arc<DefIdSet>, Arc<Vec<Arc<CodegenUnit<'tcx>>>>),
|
||||
|
@ -500,9 +500,9 @@ impl<'tcx> TyS<'tcx> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for ty::TyS<'gcx> {
|
||||
impl<'gcx> HashStable<StableHashingContext<'gcx>> for ty::TyS<'gcx> {
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
|
||||
hcx: &mut StableHashingContext<'gcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
let ty::TyS {
|
||||
ref sty,
|
||||
@ -1334,9 +1334,9 @@ impl<'tcx> serialize::UseSpecializedEncodable for &'tcx AdtDef {
|
||||
impl<'tcx> serialize::UseSpecializedDecodable for &'tcx AdtDef {}
|
||||
|
||||
|
||||
impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for AdtDef {
|
||||
impl<'gcx> HashStable<StableHashingContext<'gcx>> for AdtDef {
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
|
||||
hcx: &mut StableHashingContext<'gcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
let ty::AdtDef {
|
||||
did,
|
||||
@ -2575,6 +2575,10 @@ pub struct SymbolName {
|
||||
pub name: InternedString
|
||||
}
|
||||
|
||||
impl_stable_hash_for!(struct self::SymbolName {
|
||||
name
|
||||
});
|
||||
|
||||
impl Deref for SymbolName {
|
||||
type Target = str;
|
||||
|
||||
|
@ -11,13 +11,15 @@
|
||||
use hir;
|
||||
use hir::def_id::DefId;
|
||||
use hir::map::DefPathHash;
|
||||
use ich::{self, StableHashingContext};
|
||||
use traits::specialization_graph;
|
||||
use ty::fast_reject;
|
||||
use ty::fold::TypeFoldable;
|
||||
use ty::{Ty, TyCtxt};
|
||||
|
||||
use rustc_data_structures::fx::FxHashMap;
|
||||
|
||||
use rustc_data_structures::stable_hasher::{HashStable, StableHasher,
|
||||
StableHasherResult};
|
||||
use std::rc::Rc;
|
||||
|
||||
/// A trait's definition with type information.
|
||||
@ -183,3 +185,16 @@ pub(super) fn trait_impls_of_provider<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
non_blanket_impls: non_blanket_impls,
|
||||
})
|
||||
}
|
||||
|
||||
impl<'gcx> HashStable<StableHashingContext<'gcx>> for TraitImpls {
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'gcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
let TraitImpls {
|
||||
ref blanket_impls,
|
||||
ref non_blanket_impls,
|
||||
} = *self;
|
||||
|
||||
ich::hash_stable_trait_impls(hcx, hasher, blanket_impls, non_blanket_impls);
|
||||
}
|
||||
}
|
||||
|
@ -12,7 +12,7 @@
|
||||
|
||||
use hir::def_id::{DefId, LOCAL_CRATE};
|
||||
use hir::map::DefPathData;
|
||||
use ich::{StableHashingContext, NodeIdHashingMode};
|
||||
use ich::NodeIdHashingMode;
|
||||
use middle::const_val::ConstVal;
|
||||
use traits::{self, Reveal};
|
||||
use ty::{self, Ty, TyCtxt, TypeFoldable};
|
||||
@ -214,7 +214,7 @@ impl<'a, 'tcx> TyCtxt<'a, 'tcx, 'tcx> {
|
||||
/// context it's calculated within. This is used by the `type_id` intrinsic.
|
||||
pub fn type_id_hash(self, ty: Ty<'tcx>) -> u64 {
|
||||
let mut hasher = StableHasher::new();
|
||||
let mut hcx = StableHashingContext::new(self);
|
||||
let mut hcx = self.create_stable_hashing_context();
|
||||
|
||||
// We want the type_id be independent of the types free regions, so we
|
||||
// erase them. The erase_regions() call will also anonymize bound
|
||||
|
@ -34,7 +34,6 @@ use syntax_pos::Span;
|
||||
use std::cmp::Ordering;
|
||||
|
||||
use rustc_const_math::*;
|
||||
|
||||
macro_rules! signal {
|
||||
($e:expr, $exn:expr) => {
|
||||
return Err(ConstEvalErr { span: $e.span, kind: $exn })
|
||||
@ -366,7 +365,7 @@ fn eval_const_expr_partial<'a, 'tcx>(cx: &ConstContext<'a, 'tcx>,
|
||||
}
|
||||
} else {
|
||||
if tcx.is_const_fn(def_id) {
|
||||
tcx.extern_const_body(def_id)
|
||||
tcx.extern_const_body(def_id).body
|
||||
} else {
|
||||
signal!(e, TypeckError)
|
||||
}
|
||||
@ -790,7 +789,7 @@ fn const_eval<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
tcx.mir_const_qualif(def_id);
|
||||
tcx.hir.body(tcx.hir.body_owned_by(id))
|
||||
} else {
|
||||
tcx.extern_const_body(def_id)
|
||||
tcx.extern_const_body(def_id).body
|
||||
};
|
||||
ConstContext::new(tcx, key.param_env.and(substs), tables).eval(&body.value)
|
||||
}
|
||||
|
@ -607,7 +607,7 @@ impl<'a, 'tcx> PatternContext<'a, 'tcx> {
|
||||
let body = if let Some(id) = self.tcx.hir.as_local_node_id(def_id) {
|
||||
self.tcx.hir.body(self.tcx.hir.body_owned_by(id))
|
||||
} else {
|
||||
self.tcx.extern_const_body(def_id)
|
||||
self.tcx.extern_const_body(def_id).body
|
||||
};
|
||||
let pat = self.lower_const_expr(&body.value, pat_id, span);
|
||||
self.tables = old_tables;
|
||||
|
@ -8,7 +8,7 @@
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
use std::hash::{Hash, Hasher};
|
||||
use std::hash::{Hash, Hasher, BuildHasher};
|
||||
use std::marker::PhantomData;
|
||||
use std::mem;
|
||||
use blake2b::Blake2bHasher;
|
||||
@ -192,6 +192,28 @@ impl<W> Hasher for StableHasher<W> {
|
||||
}
|
||||
|
||||
|
||||
/// Something that can provide a stable hashing context.
|
||||
pub trait StableHashingContextProvider {
|
||||
type ContextType;
|
||||
fn create_stable_hashing_context(&self) -> Self::ContextType;
|
||||
}
|
||||
|
||||
impl<'a, T: StableHashingContextProvider> StableHashingContextProvider for &'a T {
|
||||
type ContextType = T::ContextType;
|
||||
|
||||
fn create_stable_hashing_context(&self) -> Self::ContextType {
|
||||
(**self).create_stable_hashing_context()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, T: StableHashingContextProvider> StableHashingContextProvider for &'a mut T {
|
||||
type ContextType = T::ContextType;
|
||||
|
||||
fn create_stable_hashing_context(&self) -> Self::ContextType {
|
||||
(**self).create_stable_hashing_context()
|
||||
}
|
||||
}
|
||||
|
||||
/// Something that implements `HashStable<CTX>` can be hashed in a way that is
|
||||
/// stable across multiple compilation sessions.
|
||||
pub trait HashStable<CTX> {
|
||||
@ -200,6 +222,14 @@ pub trait HashStable<CTX> {
|
||||
hasher: &mut StableHasher<W>);
|
||||
}
|
||||
|
||||
/// Implement this for types that can be turned into stable keys like, for
|
||||
/// example, for DefId that can be converted to a DefPathHash. This is used for
|
||||
/// bringing maps into a predictable order before hashing them.
|
||||
pub trait ToStableHashKey<HCX> {
|
||||
type KeyType: Ord + Clone + Sized + HashStable<HCX>;
|
||||
fn to_stable_hash_key(&self, hcx: &HCX) -> Self::KeyType;
|
||||
}
|
||||
|
||||
// Implement HashStable by just calling `Hash::hash()`. This works fine for
|
||||
// self-contained values that don't depend on the hashing context `CTX`.
|
||||
macro_rules! impl_stable_hash_via_hash {
|
||||
@ -259,7 +289,8 @@ impl<T1: HashStable<CTX>, CTX> HashStable<CTX> for (T1,) {
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
ctx: &mut CTX,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
self.0.hash_stable(ctx, hasher);
|
||||
let (ref _0,) = *self;
|
||||
_0.hash_stable(ctx, hasher);
|
||||
}
|
||||
}
|
||||
|
||||
@ -267,8 +298,24 @@ impl<T1: HashStable<CTX>, T2: HashStable<CTX>, CTX> HashStable<CTX> for (T1, T2)
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
ctx: &mut CTX,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
self.0.hash_stable(ctx, hasher);
|
||||
self.1.hash_stable(ctx, hasher);
|
||||
let (ref _0, ref _1) = *self;
|
||||
_0.hash_stable(ctx, hasher);
|
||||
_1.hash_stable(ctx, hasher);
|
||||
}
|
||||
}
|
||||
|
||||
impl<T1, T2, T3, CTX> HashStable<CTX> for (T1, T2, T3)
|
||||
where T1: HashStable<CTX>,
|
||||
T2: HashStable<CTX>,
|
||||
T3: HashStable<CTX>,
|
||||
{
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
ctx: &mut CTX,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
let (ref _0, ref _1, ref _2) = *self;
|
||||
_0.hash_stable(ctx, hasher);
|
||||
_1.hash_stable(ctx, hasher);
|
||||
_2.hash_stable(ctx, hasher);
|
||||
}
|
||||
}
|
||||
|
||||
@ -292,7 +339,7 @@ impl<T: HashStable<CTX>, CTX> HashStable<CTX> for Vec<T> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: HashStable<CTX>, CTX> HashStable<CTX> for Box<T> {
|
||||
impl<T: ?Sized + HashStable<CTX>, CTX> HashStable<CTX> for Box<T> {
|
||||
#[inline]
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
ctx: &mut CTX,
|
||||
@ -301,7 +348,7 @@ impl<T: HashStable<CTX>, CTX> HashStable<CTX> for Box<T> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: HashStable<CTX>, CTX> HashStable<CTX> for ::std::rc::Rc<T> {
|
||||
impl<T: ?Sized + HashStable<CTX>, CTX> HashStable<CTX> for ::std::rc::Rc<T> {
|
||||
#[inline]
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
ctx: &mut CTX,
|
||||
@ -310,7 +357,7 @@ impl<T: HashStable<CTX>, CTX> HashStable<CTX> for ::std::rc::Rc<T> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: HashStable<CTX>, CTX> HashStable<CTX> for ::std::sync::Arc<T> {
|
||||
impl<T: ?Sized + HashStable<CTX>, CTX> HashStable<CTX> for ::std::sync::Arc<T> {
|
||||
#[inline]
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
ctx: &mut CTX,
|
||||
@ -339,6 +386,14 @@ impl<CTX> HashStable<CTX> for String {
|
||||
}
|
||||
}
|
||||
|
||||
impl<HCX> ToStableHashKey<HCX> for String {
|
||||
type KeyType = String;
|
||||
#[inline]
|
||||
fn to_stable_hash_key(&self, _: &HCX) -> Self::KeyType {
|
||||
self.clone()
|
||||
}
|
||||
}
|
||||
|
||||
impl<CTX> HashStable<CTX> for bool {
|
||||
#[inline]
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
@ -365,8 +420,24 @@ impl<T, CTX> HashStable<CTX> for Option<T>
|
||||
}
|
||||
}
|
||||
|
||||
impl<T1, T2, CTX> HashStable<CTX> for Result<T1, T2>
|
||||
where T1: HashStable<CTX>,
|
||||
T2: HashStable<CTX>,
|
||||
{
|
||||
#[inline]
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
ctx: &mut CTX,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
mem::discriminant(self).hash_stable(ctx, hasher);
|
||||
match *self {
|
||||
Ok(ref x) => x.hash_stable(ctx, hasher),
|
||||
Err(ref x) => x.hash_stable(ctx, hasher),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, T, CTX> HashStable<CTX> for &'a T
|
||||
where T: HashStable<CTX>
|
||||
where T: HashStable<CTX> + ?Sized
|
||||
{
|
||||
#[inline]
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
@ -385,34 +456,6 @@ impl<T, CTX> HashStable<CTX> for ::std::mem::Discriminant<T> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<K, V, CTX> HashStable<CTX> for ::std::collections::BTreeMap<K, V>
|
||||
where K: Ord + HashStable<CTX>,
|
||||
V: HashStable<CTX>,
|
||||
{
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
ctx: &mut CTX,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
self.len().hash_stable(ctx, hasher);
|
||||
for (k, v) in self {
|
||||
k.hash_stable(ctx, hasher);
|
||||
v.hash_stable(ctx, hasher);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T, CTX> HashStable<CTX> for ::std::collections::BTreeSet<T>
|
||||
where T: Ord + HashStable<CTX>,
|
||||
{
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
ctx: &mut CTX,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
self.len().hash_stable(ctx, hasher);
|
||||
for v in self {
|
||||
v.hash_stable(ctx, hasher);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<I: ::indexed_vec::Idx, T, CTX> HashStable<CTX> for ::indexed_vec::IndexVec<I, T>
|
||||
where T: HashStable<CTX>,
|
||||
{
|
||||
@ -425,3 +468,93 @@ impl<I: ::indexed_vec::Idx, T, CTX> HashStable<CTX> for ::indexed_vec::IndexVec<
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
impl<I: ::indexed_vec::Idx, CTX> HashStable<CTX> for ::indexed_set::IdxSetBuf<I>
|
||||
{
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
ctx: &mut CTX,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
self.words().hash_stable(ctx, hasher);
|
||||
}
|
||||
}
|
||||
|
||||
impl_stable_hash_via_hash!(::std::path::Path);
|
||||
impl_stable_hash_via_hash!(::std::path::PathBuf);
|
||||
|
||||
impl<K, V, R, HCX> HashStable<HCX> for ::std::collections::HashMap<K, V, R>
|
||||
where K: ToStableHashKey<HCX> + Eq + Hash,
|
||||
V: HashStable<HCX>,
|
||||
R: BuildHasher,
|
||||
{
|
||||
#[inline]
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut HCX,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
hash_stable_hashmap(hcx, hasher, self, ToStableHashKey::to_stable_hash_key);
|
||||
}
|
||||
}
|
||||
|
||||
impl<K, R, HCX> HashStable<HCX> for ::std::collections::HashSet<K, R>
|
||||
where K: ToStableHashKey<HCX> + Eq + Hash,
|
||||
R: BuildHasher,
|
||||
{
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut HCX,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
let mut keys: Vec<_> = self.iter()
|
||||
.map(|k| k.to_stable_hash_key(hcx))
|
||||
.collect();
|
||||
keys.sort_unstable();
|
||||
keys.hash_stable(hcx, hasher);
|
||||
}
|
||||
}
|
||||
|
||||
impl<K, V, HCX> HashStable<HCX> for ::std::collections::BTreeMap<K, V>
|
||||
where K: ToStableHashKey<HCX>,
|
||||
V: HashStable<HCX>,
|
||||
{
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut HCX,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
let mut entries: Vec<_> = self.iter()
|
||||
.map(|(k, v)| (k.to_stable_hash_key(hcx), v))
|
||||
.collect();
|
||||
entries.sort_unstable_by(|&(ref sk1, _), &(ref sk2, _)| sk1.cmp(sk2));
|
||||
entries.hash_stable(hcx, hasher);
|
||||
}
|
||||
}
|
||||
|
||||
impl<K, HCX> HashStable<HCX> for ::std::collections::BTreeSet<K>
|
||||
where K: ToStableHashKey<HCX>,
|
||||
{
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut HCX,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
let mut keys: Vec<_> = self.iter()
|
||||
.map(|k| k.to_stable_hash_key(hcx))
|
||||
.collect();
|
||||
keys.sort_unstable();
|
||||
keys.hash_stable(hcx, hasher);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn hash_stable_hashmap<HCX, K, V, R, SK, F, W>(
|
||||
hcx: &mut HCX,
|
||||
hasher: &mut StableHasher<W>,
|
||||
map: &::std::collections::HashMap<K, V, R>,
|
||||
to_stable_hash_key: F)
|
||||
where K: Eq + Hash,
|
||||
V: HashStable<HCX>,
|
||||
R: BuildHasher,
|
||||
SK: HashStable<HCX> + Ord + Clone,
|
||||
F: Fn(&K, &HCX) -> SK,
|
||||
W: StableHasherResult,
|
||||
{
|
||||
let mut entries: Vec<_> = map.iter()
|
||||
.map(|(k, v)| (to_stable_hash_key(k, hcx), v))
|
||||
.collect();
|
||||
entries.sort_unstable_by(|&(ref sk1, _), &(ref sk2, _)| sk1.cmp(sk2));
|
||||
entries.hash_stable(hcx, hasher);
|
||||
}
|
||||
|
||||
|
@ -175,7 +175,7 @@ pub fn compile_input(sess: &Session,
|
||||
// Construct the HIR map
|
||||
let hir_map = time(sess.time_passes(),
|
||||
"indexing hir",
|
||||
|| hir_map::map_crate(&mut hir_forest, defs));
|
||||
|| hir_map::map_crate(&mut hir_forest, &defs));
|
||||
|
||||
{
|
||||
let _ignore = hir_map.dep_graph.in_ignore();
|
||||
|
@ -133,7 +133,7 @@ fn test_env<F>(source_string: &str,
|
||||
|
||||
let arena = DroplessArena::new();
|
||||
let arenas = ty::GlobalArenas::new();
|
||||
let hir_map = hir_map::map_crate(&mut hir_forest, defs);
|
||||
let hir_map = hir_map::map_crate(&mut hir_forest, &defs);
|
||||
|
||||
// run just enough stuff to build a tcx:
|
||||
let named_region_map = resolve_lifetime::krate(&sess, &*cstore, &hir_map);
|
||||
|
@ -31,7 +31,7 @@ use std::cell::RefCell;
|
||||
use std::hash::Hash;
|
||||
use rustc::dep_graph::{DepNode, DepKind};
|
||||
use rustc::hir;
|
||||
use rustc::hir::def_id::{CRATE_DEF_INDEX, DefId};
|
||||
use rustc::hir::def_id::{CRATE_DEF_INDEX, DefId, DefIndex};
|
||||
use rustc::hir::map::DefPathHash;
|
||||
use rustc::hir::itemlikevisit::ItemLikeVisitor;
|
||||
use rustc::ich::{Fingerprint, StableHashingContext};
|
||||
@ -90,24 +90,27 @@ impl<'a> ::std::ops::Index<&'a DepNode> for IncrementalHashesMap {
|
||||
}
|
||||
|
||||
struct ComputeItemHashesVisitor<'a, 'tcx: 'a> {
|
||||
hcx: StableHashingContext<'a, 'tcx, 'tcx>,
|
||||
tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
hcx: StableHashingContext<'tcx>,
|
||||
hashes: IncrementalHashesMap,
|
||||
}
|
||||
|
||||
impl<'a, 'tcx: 'a> ComputeItemHashesVisitor<'a, 'tcx> {
|
||||
fn compute_and_store_ich_for_item_like<T>(&mut self,
|
||||
dep_node: DepNode,
|
||||
def_index: DefIndex,
|
||||
hash_bodies: bool,
|
||||
item_like: T)
|
||||
where T: HashStable<StableHashingContext<'a, 'tcx, 'tcx>>
|
||||
where T: HashStable<StableHashingContext<'tcx>>
|
||||
{
|
||||
if !hash_bodies && !self.hcx.tcx().sess.opts.build_dep_graph() {
|
||||
if !hash_bodies && !self.tcx.sess.opts.build_dep_graph() {
|
||||
// If we just need the hashes in order to compute the SVH, we don't
|
||||
// need have two hashes per item. Just the one containing also the
|
||||
// item's body is sufficient.
|
||||
return
|
||||
}
|
||||
|
||||
let def_path_hash = self.hcx.local_def_path_hash(def_index);
|
||||
|
||||
let mut hasher = IchHasher::new();
|
||||
self.hcx.while_hashing_hir_bodies(hash_bodies, |hcx| {
|
||||
item_like.hash_stable(hcx, &mut hasher);
|
||||
@ -115,23 +118,33 @@ impl<'a, 'tcx: 'a> ComputeItemHashesVisitor<'a, 'tcx> {
|
||||
|
||||
let bytes_hashed = hasher.bytes_hashed();
|
||||
let item_hash = hasher.finish();
|
||||
let dep_node = if hash_bodies {
|
||||
def_path_hash.to_dep_node(DepKind::HirBody)
|
||||
} else {
|
||||
def_path_hash.to_dep_node(DepKind::Hir)
|
||||
};
|
||||
debug!("calculate_def_hash: dep_node={:?} hash={:?}", dep_node, item_hash);
|
||||
self.hashes.insert(dep_node, item_hash);
|
||||
|
||||
let tcx = self.hcx.tcx();
|
||||
let bytes_hashed =
|
||||
tcx.sess.perf_stats.incr_comp_bytes_hashed.get() +
|
||||
bytes_hashed;
|
||||
tcx.sess.perf_stats.incr_comp_bytes_hashed.set(bytes_hashed);
|
||||
self.tcx.sess.perf_stats.incr_comp_bytes_hashed.get() + bytes_hashed;
|
||||
self.tcx.sess.perf_stats.incr_comp_bytes_hashed.set(bytes_hashed);
|
||||
|
||||
if hash_bodies {
|
||||
let in_scope_traits_map = self.tcx.in_scope_traits_map(def_index);
|
||||
let mut hasher = IchHasher::new();
|
||||
in_scope_traits_map.hash_stable(&mut self.hcx, &mut hasher);
|
||||
let dep_node = def_path_hash.to_dep_node(DepKind::InScopeTraits);
|
||||
self.hashes.insert(dep_node, hasher.finish());
|
||||
}
|
||||
}
|
||||
|
||||
fn compute_crate_hash(&mut self) {
|
||||
let tcx = self.hcx.tcx();
|
||||
let krate = tcx.hir.krate();
|
||||
let krate = self.tcx.hir.krate();
|
||||
|
||||
let mut crate_state = IchHasher::new();
|
||||
|
||||
let crate_disambiguator = tcx.sess.local_crate_disambiguator();
|
||||
let crate_disambiguator = self.tcx.sess.local_crate_disambiguator();
|
||||
"crate_disambiguator".hash(&mut crate_state);
|
||||
crate_disambiguator.as_str().len().hash(&mut crate_state);
|
||||
crate_disambiguator.as_str().hash(&mut crate_state);
|
||||
@ -145,6 +158,7 @@ impl<'a, 'tcx: 'a> ComputeItemHashesVisitor<'a, 'tcx> {
|
||||
// This `match` determines what kinds of nodes
|
||||
// go into the SVH:
|
||||
match item_dep_node.kind {
|
||||
DepKind::InScopeTraits |
|
||||
DepKind::Hir |
|
||||
DepKind::HirBody => {
|
||||
// We want to incoporate these into the
|
||||
@ -195,18 +209,17 @@ impl<'a, 'tcx: 'a> ComputeItemHashesVisitor<'a, 'tcx> {
|
||||
body_ids: _,
|
||||
} = *krate;
|
||||
|
||||
let def_path_hash = self.hcx.tcx().hir.definitions().def_path_hash(CRATE_DEF_INDEX);
|
||||
self.compute_and_store_ich_for_item_like(def_path_hash.to_dep_node(DepKind::Hir),
|
||||
self.compute_and_store_ich_for_item_like(CRATE_DEF_INDEX,
|
||||
false,
|
||||
(module, (span, attrs)));
|
||||
self.compute_and_store_ich_for_item_like(def_path_hash.to_dep_node(DepKind::HirBody),
|
||||
self.compute_and_store_ich_for_item_like(CRATE_DEF_INDEX,
|
||||
true,
|
||||
(module, (span, attrs)));
|
||||
}
|
||||
|
||||
fn compute_and_store_ich_for_trait_impls(&mut self, krate: &'tcx hir::Crate)
|
||||
{
|
||||
let tcx = self.hcx.tcx();
|
||||
let tcx = self.tcx;
|
||||
|
||||
let mut impls: Vec<(DefPathHash, Fingerprint)> = krate
|
||||
.trait_impls
|
||||
@ -251,34 +264,31 @@ impl<'a, 'tcx: 'a> ComputeItemHashesVisitor<'a, 'tcx> {
|
||||
|
||||
impl<'a, 'tcx: 'a> ItemLikeVisitor<'tcx> for ComputeItemHashesVisitor<'a, 'tcx> {
|
||||
fn visit_item(&mut self, item: &'tcx hir::Item) {
|
||||
let def_id = self.hcx.tcx().hir.local_def_id(item.id);
|
||||
let def_path_hash = self.hcx.tcx().def_path_hash(def_id);
|
||||
self.compute_and_store_ich_for_item_like(def_path_hash.to_dep_node(DepKind::Hir),
|
||||
let def_index = self.tcx.hir.local_def_id(item.id).index;
|
||||
self.compute_and_store_ich_for_item_like(def_index,
|
||||
false,
|
||||
item);
|
||||
self.compute_and_store_ich_for_item_like(def_path_hash.to_dep_node(DepKind::HirBody),
|
||||
self.compute_and_store_ich_for_item_like(def_index,
|
||||
true,
|
||||
item);
|
||||
}
|
||||
|
||||
fn visit_trait_item(&mut self, item: &'tcx hir::TraitItem) {
|
||||
let def_id = self.hcx.tcx().hir.local_def_id(item.id);
|
||||
let def_path_hash = self.hcx.tcx().def_path_hash(def_id);
|
||||
self.compute_and_store_ich_for_item_like(def_path_hash.to_dep_node(DepKind::Hir),
|
||||
let def_index = self.tcx.hir.local_def_id(item.id).index;
|
||||
self.compute_and_store_ich_for_item_like(def_index,
|
||||
false,
|
||||
item);
|
||||
self.compute_and_store_ich_for_item_like(def_path_hash.to_dep_node(DepKind::HirBody),
|
||||
self.compute_and_store_ich_for_item_like(def_index,
|
||||
true,
|
||||
item);
|
||||
}
|
||||
|
||||
fn visit_impl_item(&mut self, item: &'tcx hir::ImplItem) {
|
||||
let def_id = self.hcx.tcx().hir.local_def_id(item.id);
|
||||
let def_path_hash = self.hcx.tcx().def_path_hash(def_id);
|
||||
self.compute_and_store_ich_for_item_like(def_path_hash.to_dep_node(DepKind::Hir),
|
||||
let def_index = self.tcx.hir.local_def_id(item.id).index;
|
||||
self.compute_and_store_ich_for_item_like(def_index,
|
||||
false,
|
||||
item);
|
||||
self.compute_and_store_ich_for_item_like(def_path_hash.to_dep_node(DepKind::HirBody),
|
||||
self.compute_and_store_ich_for_item_like(def_index,
|
||||
true,
|
||||
item);
|
||||
}
|
||||
@ -292,7 +302,8 @@ pub fn compute_incremental_hashes_map<'a, 'tcx: 'a>(tcx: TyCtxt<'a, 'tcx, 'tcx>)
|
||||
let krate = tcx.hir.krate();
|
||||
|
||||
let mut visitor = ComputeItemHashesVisitor {
|
||||
hcx: StableHashingContext::new(tcx),
|
||||
tcx,
|
||||
hcx: tcx.create_stable_hashing_context(),
|
||||
hashes: IncrementalHashesMap::new(),
|
||||
};
|
||||
|
||||
@ -301,12 +312,11 @@ pub fn compute_incremental_hashes_map<'a, 'tcx: 'a>(tcx: TyCtxt<'a, 'tcx, 'tcx>)
|
||||
krate.visit_all_item_likes(&mut visitor);
|
||||
|
||||
for macro_def in krate.exported_macros.iter() {
|
||||
let def_id = tcx.hir.local_def_id(macro_def.id);
|
||||
let def_path_hash = tcx.def_path_hash(def_id);
|
||||
visitor.compute_and_store_ich_for_item_like(def_path_hash.to_dep_node(DepKind::Hir),
|
||||
let def_index = tcx.hir.local_def_id(macro_def.id).index;
|
||||
visitor.compute_and_store_ich_for_item_like(def_index,
|
||||
false,
|
||||
macro_def);
|
||||
visitor.compute_and_store_ich_for_item_like(def_path_hash.to_dep_node(DepKind::HirBody),
|
||||
visitor.compute_and_store_ich_for_item_like(def_index,
|
||||
true,
|
||||
macro_def);
|
||||
}
|
||||
|
@ -49,6 +49,7 @@ impl<'a, 'tcx> HashContext<'a, 'tcx> {
|
||||
match dep_node.kind {
|
||||
DepKind::Krate |
|
||||
DepKind::Hir |
|
||||
DepKind::InScopeTraits |
|
||||
DepKind::HirBody =>
|
||||
true,
|
||||
DepKind::MetaData => {
|
||||
@ -66,6 +67,7 @@ impl<'a, 'tcx> HashContext<'a, 'tcx> {
|
||||
}
|
||||
|
||||
// HIR nodes (which always come from our crate) are an input:
|
||||
DepKind::InScopeTraits |
|
||||
DepKind::Hir |
|
||||
DepKind::HirBody => {
|
||||
Some(self.incremental_hashes_map[dep_node])
|
||||
|
@ -104,6 +104,7 @@ fn does_still_exist(tcx: TyCtxt, dep_node: &DepNode) -> bool {
|
||||
match dep_node.kind {
|
||||
DepKind::Hir |
|
||||
DepKind::HirBody |
|
||||
DepKind::InScopeTraits |
|
||||
DepKind::MetaData => {
|
||||
dep_node.extract_def_id(tcx).is_some()
|
||||
}
|
||||
|
@ -16,26 +16,46 @@ use schema::*;
|
||||
use rustc::hir;
|
||||
use rustc::ty::{self, TyCtxt};
|
||||
|
||||
use rustc::ich::Fingerprint;
|
||||
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
|
||||
|
||||
#[derive(RustcEncodable, RustcDecodable)]
|
||||
pub struct Ast<'tcx> {
|
||||
pub body: Lazy<hir::Body>,
|
||||
pub tables: Lazy<ty::TypeckTables<'tcx>>,
|
||||
pub nested_bodies: LazySeq<hir::Body>,
|
||||
pub rvalue_promotable_to_static: bool,
|
||||
pub stable_bodies_hash: Fingerprint,
|
||||
}
|
||||
|
||||
impl_stable_hash_for!(struct Ast<'tcx> {
|
||||
body,
|
||||
tables,
|
||||
nested_bodies,
|
||||
rvalue_promotable_to_static
|
||||
rvalue_promotable_to_static,
|
||||
stable_bodies_hash
|
||||
});
|
||||
|
||||
impl<'a, 'b, 'tcx> IsolatedEncoder<'a, 'b, 'tcx> {
|
||||
pub fn encode_body(&mut self, body_id: hir::BodyId) -> Lazy<Ast<'tcx>> {
|
||||
let body = self.tcx.hir.body(body_id);
|
||||
let lazy_body = self.lazy(body);
|
||||
|
||||
// In order to avoid having to hash hir::Bodies from extern crates, we
|
||||
// hash them here, during export, and store the hash with metadata.
|
||||
let stable_bodies_hash = {
|
||||
let mut hcx = self.tcx.create_stable_hashing_context();
|
||||
let mut hasher = StableHasher::new();
|
||||
|
||||
hcx.while_hashing_hir_bodies(true, |hcx| {
|
||||
hcx.while_hashing_spans(false, |hcx| {
|
||||
body.hash_stable(hcx, &mut hasher);
|
||||
});
|
||||
});
|
||||
|
||||
hasher.finish()
|
||||
};
|
||||
|
||||
let lazy_body = self.lazy(body);
|
||||
let tables = self.tcx.body_tables(body_id);
|
||||
let lazy_tables = self.lazy(tables);
|
||||
|
||||
@ -54,6 +74,7 @@ impl<'a, 'b, 'tcx> IsolatedEncoder<'a, 'b, 'tcx> {
|
||||
tables: lazy_tables,
|
||||
nested_bodies: lazy_nested_bodies,
|
||||
rvalue_promotable_to_static,
|
||||
stable_bodies_hash,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
@ -301,7 +301,10 @@ impl<'a> CrateLoader<'a> {
|
||||
.decode(&cmeta)
|
||||
.filter(|lib| relevant_lib(self.sess, lib) &&
|
||||
lib.kind == cstore::NativeLibraryKind::NativeUnknown)
|
||||
.flat_map(|lib| lib.foreign_items.into_iter())
|
||||
.flat_map(|lib| {
|
||||
assert!(lib.foreign_items.iter().all(|def_id| def_id.krate == cnum));
|
||||
lib.foreign_items.into_iter().map(|def_id| def_id.index)
|
||||
})
|
||||
.collect();
|
||||
|
||||
cmeta.dllimport_foreign_items = dllimports;
|
||||
|
@ -159,7 +159,7 @@ provide! { <'tcx> tcx, def_id, other, cdata,
|
||||
is_exported_symbol => {
|
||||
cdata.exported_symbols.contains(&def_id.index)
|
||||
}
|
||||
item_body_nested_bodies => { Rc::new(cdata.item_body_nested_bodies(def_id.index)) }
|
||||
item_body_nested_bodies => { cdata.item_body_nested_bodies(def_id.index) }
|
||||
const_is_rvalue_promotable_to_static => {
|
||||
cdata.const_is_rvalue_promotable_to_static(def_id.index)
|
||||
}
|
||||
@ -264,7 +264,7 @@ pub fn provide_local<'tcx>(providers: &mut Providers<'tcx>) {
|
||||
tcx.native_libraries(id.krate)
|
||||
.iter()
|
||||
.filter(|lib| native_libs::relevant_lib(&tcx.sess, lib))
|
||||
.find(|l| l.foreign_items.contains(&id.index))
|
||||
.find(|l| l.foreign_items.contains(&id))
|
||||
.map(|l| l.kind)
|
||||
},
|
||||
native_libraries: |tcx, cnum| {
|
||||
|
@ -17,9 +17,11 @@ use rustc::hir::map::{DefKey, DefPath, DefPathData, DefPathHash};
|
||||
use rustc::hir;
|
||||
|
||||
use rustc::middle::const_val::ByteArray;
|
||||
use rustc::middle::cstore::LinkagePreference;
|
||||
use rustc::middle::cstore::{LinkagePreference, ExternConstBody,
|
||||
ExternBodyNestedBodies};
|
||||
use rustc::hir::def::{self, Def, CtorKind};
|
||||
use rustc::hir::def_id::{CrateNum, DefId, DefIndex, CRATE_DEF_INDEX, LOCAL_CRATE};
|
||||
use rustc::ich::Fingerprint;
|
||||
use rustc::middle::lang_items;
|
||||
use rustc::session::Session;
|
||||
use rustc::ty::{self, Ty, TyCtxt};
|
||||
@ -659,10 +661,11 @@ impl<'a, 'tcx> CrateMetadata {
|
||||
}
|
||||
|
||||
/// Iterates over the language items in the given crate.
|
||||
pub fn get_lang_items(&self) -> Vec<(DefIndex, usize)> {
|
||||
pub fn get_lang_items(&self) -> Vec<(DefId, usize)> {
|
||||
self.root
|
||||
.lang_items
|
||||
.decode(self)
|
||||
.map(|(def_index, index)| (self.local_def_id(def_index), index))
|
||||
.collect()
|
||||
}
|
||||
|
||||
@ -774,12 +777,16 @@ impl<'a, 'tcx> CrateMetadata {
|
||||
pub fn extern_const_body(&self,
|
||||
tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
id: DefIndex)
|
||||
-> &'tcx hir::Body {
|
||||
-> ExternConstBody<'tcx> {
|
||||
assert!(!self.is_proc_macro(id));
|
||||
let ast = self.entry(id).ast.unwrap();
|
||||
let def_id = self.local_def_id(id);
|
||||
let body = ast.decode((self, tcx)).body.decode((self, tcx));
|
||||
tcx.hir.intern_inlined_body(def_id, body)
|
||||
let ast = ast.decode((self, tcx));
|
||||
let body = ast.body.decode((self, tcx));
|
||||
ExternConstBody {
|
||||
body: tcx.hir.intern_inlined_body(def_id, body),
|
||||
fingerprint: ast.stable_bodies_hash,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn item_body_tables(&self,
|
||||
@ -790,10 +797,23 @@ impl<'a, 'tcx> CrateMetadata {
|
||||
tcx.alloc_tables(ast.tables.decode((self, tcx)))
|
||||
}
|
||||
|
||||
pub fn item_body_nested_bodies(&self, id: DefIndex) -> BTreeMap<hir::BodyId, hir::Body> {
|
||||
self.entry(id).ast.into_iter().flat_map(|ast| {
|
||||
ast.decode(self).nested_bodies.decode(self).map(|body| (body.id(), body))
|
||||
}).collect()
|
||||
pub fn item_body_nested_bodies(&self, id: DefIndex) -> ExternBodyNestedBodies {
|
||||
if let Some(ref ast) = self.entry(id).ast {
|
||||
let ast = ast.decode(self);
|
||||
let nested_bodies: BTreeMap<_, _> = ast.nested_bodies
|
||||
.decode(self)
|
||||
.map(|body| (body.id(), body))
|
||||
.collect();
|
||||
ExternBodyNestedBodies {
|
||||
nested_bodies: Rc::new(nested_bodies),
|
||||
fingerprint: ast.stable_bodies_hash,
|
||||
}
|
||||
} else {
|
||||
ExternBodyNestedBodies {
|
||||
nested_bodies: Rc::new(BTreeMap::new()),
|
||||
fingerprint: Fingerprint::zero(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn const_is_rvalue_promotable_to_static(&self, id: DefIndex) -> bool {
|
||||
|
@ -23,7 +23,7 @@ use rustc_serialize::Encodable;
|
||||
pub struct IsolatedEncoder<'a, 'b: 'a, 'tcx: 'b> {
|
||||
pub tcx: TyCtxt<'b, 'tcx, 'tcx>,
|
||||
ecx: &'a mut EncodeContext<'b, 'tcx>,
|
||||
hcx: Option<(StableHashingContext<'b, 'tcx, 'tcx>, StableHasher<Fingerprint>)>,
|
||||
hcx: Option<(StableHashingContext<'tcx>, StableHasher<Fingerprint>)>,
|
||||
}
|
||||
|
||||
impl<'a, 'b: 'a, 'tcx: 'b> IsolatedEncoder<'a, 'b, 'tcx> {
|
||||
@ -40,9 +40,9 @@ impl<'a, 'b: 'a, 'tcx: 'b> IsolatedEncoder<'a, 'b, 'tcx> {
|
||||
// Except when -Zquery-dep-graph is specified because we don't
|
||||
// want to mess up our tests.
|
||||
let hcx = if tcx.sess.opts.debugging_opts.query_dep_graph {
|
||||
StableHashingContext::new(tcx)
|
||||
tcx.create_stable_hashing_context()
|
||||
} else {
|
||||
StableHashingContext::new(tcx).force_span_hashing()
|
||||
tcx.create_stable_hashing_context().force_span_hashing()
|
||||
};
|
||||
|
||||
Some((hcx, StableHasher::new()))
|
||||
@ -61,7 +61,7 @@ impl<'a, 'b: 'a, 'tcx: 'b> IsolatedEncoder<'a, 'b, 'tcx> {
|
||||
}
|
||||
|
||||
pub fn lazy<T>(&mut self, value: &T) -> Lazy<T>
|
||||
where T: Encodable + HashStable<StableHashingContext<'b, 'tcx, 'tcx>>
|
||||
where T: Encodable + HashStable<StableHashingContext<'tcx>>
|
||||
{
|
||||
if let Some((ref mut hcx, ref mut hasher)) = self.hcx {
|
||||
value.hash_stable(hcx, hasher);
|
||||
@ -72,7 +72,7 @@ impl<'a, 'b: 'a, 'tcx: 'b> IsolatedEncoder<'a, 'b, 'tcx> {
|
||||
|
||||
pub fn lazy_seq<I, T>(&mut self, iter: I) -> LazySeq<T>
|
||||
where I: IntoIterator<Item = T>,
|
||||
T: Encodable + HashStable<StableHashingContext<'b, 'tcx, 'tcx>>
|
||||
T: Encodable + HashStable<StableHashingContext<'tcx>>
|
||||
{
|
||||
if let Some((ref mut hcx, ref mut hasher)) = self.hcx {
|
||||
let iter = iter.into_iter();
|
||||
@ -111,7 +111,7 @@ impl<'a, 'b: 'a, 'tcx: 'b> IsolatedEncoder<'a, 'b, 'tcx> {
|
||||
|
||||
pub fn lazy_seq_ref<'x, I, T>(&mut self, iter: I) -> LazySeq<T>
|
||||
where I: IntoIterator<Item = &'x T>,
|
||||
T: 'x + Encodable + HashStable<StableHashingContext<'b, 'tcx, 'tcx>>
|
||||
T: 'x + Encodable + HashStable<StableHashingContext<'tcx>>
|
||||
{
|
||||
if let Some((ref mut hcx, ref mut hasher)) = self.hcx {
|
||||
let iter = iter.into_iter();
|
||||
@ -149,7 +149,7 @@ impl<'a, 'b: 'a, 'tcx: 'b> IsolatedEncoder<'a, 'b, 'tcx> {
|
||||
}
|
||||
|
||||
pub fn lazy_seq_from_slice<T>(&mut self, slice: &[T]) -> LazySeq<T>
|
||||
where T: Encodable + HashStable<StableHashingContext<'b, 'tcx, 'tcx>>
|
||||
where T: Encodable + HashStable<StableHashingContext<'tcx>>
|
||||
{
|
||||
if let Some((ref mut hcx, ref mut hasher)) = self.hcx {
|
||||
slice.hash_stable(hcx, hasher);
|
||||
@ -159,7 +159,7 @@ impl<'a, 'b: 'a, 'tcx: 'b> IsolatedEncoder<'a, 'b, 'tcx> {
|
||||
}
|
||||
|
||||
pub fn lazy_seq_ref_from_slice<T>(&mut self, slice: &[&T]) -> LazySeq<T>
|
||||
where T: Encodable + HashStable<StableHashingContext<'b, 'tcx, 'tcx>>
|
||||
where T: Encodable + HashStable<StableHashingContext<'tcx>>
|
||||
{
|
||||
if let Some((ref mut hcx, ref mut hasher)) = self.hcx {
|
||||
slice.hash_stable(hcx, hasher);
|
||||
|
@ -96,7 +96,7 @@ impl<'a, 'tcx> ItemLikeVisitor<'tcx> for Collector<'a, 'tcx> {
|
||||
list[0].meta_item().unwrap().clone()
|
||||
});
|
||||
let foreign_items = fm.items.iter()
|
||||
.map(|it| self.tcx.hir.local_def_id(it.id).index)
|
||||
.map(|it| self.tcx.hir.local_def_id(it.id))
|
||||
.collect();
|
||||
let lib = NativeLibrary {
|
||||
name: n,
|
||||
|
@ -229,9 +229,9 @@ pub struct TraitImpls {
|
||||
pub impls: LazySeq<DefIndex>,
|
||||
}
|
||||
|
||||
impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for TraitImpls {
|
||||
impl<'gcx> HashStable<StableHashingContext<'gcx>> for TraitImpls {
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
|
||||
hcx: &mut StableHashingContext<'gcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
let TraitImpls {
|
||||
trait_id: (krate, def_index),
|
||||
@ -312,9 +312,9 @@ pub enum EntryKind<'tcx> {
|
||||
AssociatedConst(AssociatedContainer, u8),
|
||||
}
|
||||
|
||||
impl<'a, 'gcx, 'tcx> HashStable<StableHashingContext<'a, 'gcx, 'tcx>> for EntryKind<'tcx> {
|
||||
impl<'gcx> HashStable<StableHashingContext<'gcx>> for EntryKind<'gcx> {
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
hcx: &mut StableHashingContext<'a, 'gcx, 'tcx>,
|
||||
hcx: &mut StableHashingContext<'gcx>,
|
||||
hasher: &mut StableHasher<W>) {
|
||||
mem::discriminant(self).hash_stable(hcx, hasher);
|
||||
match *self {
|
||||
|
@ -14,7 +14,7 @@ use std::sync::Arc;
|
||||
use base;
|
||||
use monomorphize::Instance;
|
||||
use rustc::hir::def_id::CrateNum;
|
||||
use rustc::hir::def_id::{DefId, LOCAL_CRATE, INVALID_CRATE, CRATE_DEF_INDEX};
|
||||
use rustc::hir::def_id::{DefId, LOCAL_CRATE};
|
||||
use rustc::middle::exported_symbols::SymbolExportLevel;
|
||||
use rustc::session::config;
|
||||
use rustc::ty::TyCtxt;
|
||||
@ -24,7 +24,7 @@ use rustc_allocator::ALLOCATOR_METHODS;
|
||||
|
||||
pub type ExportedSymbols = FxHashMap<
|
||||
CrateNum,
|
||||
Arc<Vec<(String, DefId, SymbolExportLevel)>>,
|
||||
Arc<Vec<(String, Option<DefId>, SymbolExportLevel)>>,
|
||||
>;
|
||||
|
||||
pub fn threshold(tcx: TyCtxt) -> SymbolExportLevel {
|
||||
@ -65,11 +65,13 @@ pub fn provide_local(providers: &mut Providers) {
|
||||
Rc::new(tcx.exported_symbols(cnum)
|
||||
.iter()
|
||||
.filter_map(|&(_, id, level)| {
|
||||
if level.is_below_threshold(export_threshold) {
|
||||
Some(id)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
id.and_then(|id| {
|
||||
if level.is_below_threshold(export_threshold) {
|
||||
Some(id)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
})
|
||||
.collect())
|
||||
};
|
||||
@ -95,25 +97,20 @@ pub fn provide_local(providers: &mut Providers) {
|
||||
let name = tcx.symbol_name(Instance::mono(tcx, def_id));
|
||||
let export_level = export_level(tcx, def_id);
|
||||
debug!("EXPORTED SYMBOL (local): {} ({:?})", name, export_level);
|
||||
(str::to_owned(&name), def_id, export_level)
|
||||
(str::to_owned(&name), Some(def_id), export_level)
|
||||
})
|
||||
.collect();
|
||||
|
||||
const INVALID_DEF_ID: DefId = DefId {
|
||||
krate: INVALID_CRATE,
|
||||
index: CRATE_DEF_INDEX,
|
||||
};
|
||||
|
||||
if let Some(_) = *tcx.sess.entry_fn.borrow() {
|
||||
local_crate.push(("main".to_string(),
|
||||
INVALID_DEF_ID,
|
||||
None,
|
||||
SymbolExportLevel::C));
|
||||
}
|
||||
|
||||
if tcx.sess.allocator_kind.get().is_some() {
|
||||
for method in ALLOCATOR_METHODS {
|
||||
local_crate.push((format!("__rust_{}", method.name),
|
||||
INVALID_DEF_ID,
|
||||
None,
|
||||
SymbolExportLevel::Rust));
|
||||
}
|
||||
}
|
||||
@ -123,12 +120,12 @@ pub fn provide_local(providers: &mut Providers) {
|
||||
let idx = def_id.index;
|
||||
let disambiguator = tcx.sess.local_crate_disambiguator();
|
||||
let registrar = tcx.sess.generate_derive_registrar_symbol(disambiguator, idx);
|
||||
local_crate.push((registrar, def_id, SymbolExportLevel::C));
|
||||
local_crate.push((registrar, Some(def_id), SymbolExportLevel::C));
|
||||
}
|
||||
|
||||
if tcx.sess.crate_types.borrow().contains(&config::CrateTypeDylib) {
|
||||
local_crate.push((metadata_symbol_name(tcx),
|
||||
INVALID_DEF_ID,
|
||||
None,
|
||||
SymbolExportLevel::Rust));
|
||||
}
|
||||
Arc::new(local_crate)
|
||||
@ -178,7 +175,7 @@ pub fn provide_extern(providers: &mut Providers) {
|
||||
export_level(tcx, def_id)
|
||||
};
|
||||
debug!("EXPORTED SYMBOL (re-export): {} ({:?})", name, export_level);
|
||||
(str::to_owned(&name), def_id, export_level)
|
||||
(str::to_owned(&name), Some(def_id), export_level)
|
||||
})
|
||||
.collect();
|
||||
|
||||
|
@ -41,7 +41,6 @@ use rustc::middle::trans::{Linkage, Visibility, Stats};
|
||||
use rustc::middle::cstore::{EncodedMetadata, EncodedMetadataHashes};
|
||||
use rustc::ty::{self, Ty, TyCtxt};
|
||||
use rustc::ty::maps::Providers;
|
||||
use rustc::dep_graph::AssertDepGraphSafe;
|
||||
use rustc::middle::cstore::{self, LinkMeta, LinkagePreference};
|
||||
use rustc::hir::map as hir_map;
|
||||
use rustc::util::common::{time, print_time_passes_entry};
|
||||
@ -894,7 +893,7 @@ fn iter_globals(llmod: llvm::ModuleRef) -> ValueIter {
|
||||
/// This list is later used by linkers to determine the set of symbols needed to
|
||||
/// be exposed from a dynamic library and it's also encoded into the metadata.
|
||||
pub fn find_exported_symbols(tcx: TyCtxt) -> NodeSet {
|
||||
tcx.reachable_set(LOCAL_CRATE).iter().cloned().filter(|&id| {
|
||||
tcx.reachable_set(LOCAL_CRATE).0.iter().cloned().filter(|&id| {
|
||||
// Next, we want to ignore some FFI functions that are not exposed from
|
||||
// this crate. Reachable FFI functions can be lumped into two
|
||||
// categories:
|
||||
@ -1070,7 +1069,6 @@ pub fn trans_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
let start_time = Instant::now();
|
||||
all_stats.extend(tcx.compile_codegen_unit(*cgu.name()));
|
||||
total_trans_time += start_time.elapsed();
|
||||
|
||||
ongoing_translation.check_for_errors(tcx.sess);
|
||||
}
|
||||
|
||||
@ -1371,8 +1369,8 @@ fn compile_codegen_unit<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
let dep_node = cgu.work_product_dep_node();
|
||||
let ((stats, module), _) =
|
||||
tcx.dep_graph.with_task(dep_node,
|
||||
AssertDepGraphSafe(tcx),
|
||||
AssertDepGraphSafe(cgu),
|
||||
tcx,
|
||||
cgu,
|
||||
module_translation);
|
||||
let time_to_translate = start_time.elapsed();
|
||||
|
||||
@ -1393,14 +1391,10 @@ fn compile_codegen_unit<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
return stats;
|
||||
|
||||
fn module_translation<'a, 'tcx>(
|
||||
tcx: AssertDepGraphSafe<TyCtxt<'a, 'tcx, 'tcx>>,
|
||||
args: AssertDepGraphSafe<Arc<CodegenUnit<'tcx>>>)
|
||||
tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
cgu: Arc<CodegenUnit<'tcx>>)
|
||||
-> (Stats, ModuleTranslation)
|
||||
{
|
||||
// FIXME(#40304): We ought to be using the id as a key and some queries, I think.
|
||||
let AssertDepGraphSafe(tcx) = tcx;
|
||||
let AssertDepGraphSafe(cgu) = args;
|
||||
|
||||
let cgu_name = cgu.name().to_string();
|
||||
let cgu_id = cgu.work_product_id();
|
||||
let symbol_name_hash = cgu.compute_symbol_name_hash(tcx);
|
||||
@ -1567,3 +1561,25 @@ pub fn visibility_to_llvm(linkage: Visibility) -> llvm::Visibility {
|
||||
Visibility::Protected => llvm::Visibility::Protected,
|
||||
}
|
||||
}
|
||||
|
||||
// FIXME(mw): Anything that is produced via DepGraph::with_task() must implement
|
||||
// the HashStable trait. Normally DepGraph::with_task() calls are
|
||||
// hidden behind queries, but CGU creation is a special case in two
|
||||
// ways: (1) it's not a query and (2) CGU are output nodes, so their
|
||||
// Fingerprints are not actually needed. It remains to be clarified
|
||||
// how exactly this case will be handled in the red/green system but
|
||||
// for now we content ourselves with providing a no-op HashStable
|
||||
// implementation for CGUs.
|
||||
mod temp_stable_hash_impls {
|
||||
use rustc_data_structures::stable_hasher::{StableHasherResult, StableHasher,
|
||||
HashStable};
|
||||
use ModuleTranslation;
|
||||
|
||||
impl<HCX> HashStable<HCX> for ModuleTranslation {
|
||||
fn hash_stable<W: StableHasherResult>(&self,
|
||||
_: &mut HCX,
|
||||
_: &mut StableHasher<W>) {
|
||||
// do nothing
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -14,6 +14,7 @@ use llvm::{ContextRef, ModuleRef, ValueRef};
|
||||
use rustc::dep_graph::{DepGraph, DepGraphSafe};
|
||||
use rustc::hir;
|
||||
use rustc::hir::def_id::DefId;
|
||||
use rustc::ich::StableHashingContext;
|
||||
use rustc::traits;
|
||||
use debuginfo;
|
||||
use callee;
|
||||
@ -25,8 +26,9 @@ use partitioning::CodegenUnit;
|
||||
use type_::Type;
|
||||
use rustc_data_structures::base_n;
|
||||
use rustc::middle::trans::Stats;
|
||||
use rustc::session::Session;
|
||||
use rustc_data_structures::stable_hasher::StableHashingContextProvider;
|
||||
use rustc::session::config::{self, NoDebugInfo};
|
||||
use rustc::session::Session;
|
||||
use rustc::ty::layout::{LayoutCx, LayoutError, LayoutTyper, TyLayout};
|
||||
use rustc::ty::{self, Ty, TyCtxt};
|
||||
use rustc::util::nodemap::FxHashMap;
|
||||
@ -134,6 +136,17 @@ impl<'a, 'tcx> CrateContext<'a, 'tcx> {
|
||||
impl<'a, 'tcx> DepGraphSafe for CrateContext<'a, 'tcx> {
|
||||
}
|
||||
|
||||
impl<'a, 'tcx> DepGraphSafe for SharedCrateContext<'a, 'tcx> {
|
||||
}
|
||||
|
||||
impl<'a, 'tcx> StableHashingContextProvider for SharedCrateContext<'a, 'tcx> {
|
||||
type ContextType = StableHashingContext<'tcx>;
|
||||
|
||||
fn create_stable_hashing_context(&self) -> Self::ContextType {
|
||||
self.tcx.create_stable_hashing_context()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_reloc_model(sess: &Session) -> llvm::RelocMode {
|
||||
let reloc_model_arg = match sess.opts.cg.relocation_model {
|
||||
Some(ref s) => &s[..],
|
||||
|
@ -96,6 +96,10 @@ impl<'o, 'gcx: 'tcx, 'tcx> AstConv<'gcx, 'tcx>+'o {
|
||||
-> ty::Region<'tcx>
|
||||
{
|
||||
let tcx = self.tcx();
|
||||
let lifetime_name = |def_id| {
|
||||
tcx.hir.name(tcx.hir.as_local_node_id(def_id).unwrap())
|
||||
};
|
||||
|
||||
let hir_id = tcx.hir.node_to_hir_id(lifetime.id);
|
||||
let r = match tcx.named_region(hir_id) {
|
||||
Some(rl::Region::Static) => {
|
||||
@ -103,9 +107,9 @@ impl<'o, 'gcx: 'tcx, 'tcx> AstConv<'gcx, 'tcx>+'o {
|
||||
}
|
||||
|
||||
Some(rl::Region::LateBound(debruijn, id)) => {
|
||||
let name = tcx.hir.name(id);
|
||||
let name = lifetime_name(id);
|
||||
tcx.mk_region(ty::ReLateBound(debruijn,
|
||||
ty::BrNamed(tcx.hir.local_def_id(id), name)))
|
||||
ty::BrNamed(id, name)))
|
||||
}
|
||||
|
||||
Some(rl::Region::LateBoundAnon(debruijn, index)) => {
|
||||
@ -113,19 +117,19 @@ impl<'o, 'gcx: 'tcx, 'tcx> AstConv<'gcx, 'tcx>+'o {
|
||||
}
|
||||
|
||||
Some(rl::Region::EarlyBound(index, id)) => {
|
||||
let name = tcx.hir.name(id);
|
||||
let name = lifetime_name(id);
|
||||
tcx.mk_region(ty::ReEarlyBound(ty::EarlyBoundRegion {
|
||||
def_id: tcx.hir.local_def_id(id),
|
||||
def_id: id,
|
||||
index,
|
||||
name,
|
||||
}))
|
||||
}
|
||||
|
||||
Some(rl::Region::Free(scope, id)) => {
|
||||
let name = tcx.hir.name(id);
|
||||
let name = lifetime_name(id);
|
||||
tcx.mk_region(ty::ReFree(ty::FreeRegion {
|
||||
scope,
|
||||
bound_region: ty::BrNamed(tcx.hir.local_def_id(id), name)
|
||||
bound_region: ty::BrNamed(id, name)
|
||||
}))
|
||||
|
||||
// (*) -- not late-bound, won't change
|
||||
|
@ -14,7 +14,8 @@
|
||||
//! We walk the set of items and, for each member, generate new constraints.
|
||||
|
||||
use hir::def_id::DefId;
|
||||
use rustc::dep_graph::{AssertDepGraphSafe, DepKind};
|
||||
use rustc::dep_graph::{DepGraphSafe, DepKind};
|
||||
use rustc::ich::StableHashingContext;
|
||||
use rustc::ty::subst::Substs;
|
||||
use rustc::ty::{self, Ty, TyCtxt};
|
||||
use syntax::ast;
|
||||
@ -22,6 +23,7 @@ use rustc::hir;
|
||||
use rustc::hir::itemlikevisit::ItemLikeVisitor;
|
||||
|
||||
use rustc_data_structures::transitive_relation::TransitiveRelation;
|
||||
use rustc_data_structures::stable_hasher::StableHashingContextProvider;
|
||||
|
||||
use super::terms::*;
|
||||
use super::terms::VarianceTerm::*;
|
||||
@ -138,6 +140,16 @@ impl<'a, 'tcx, 'v> ItemLikeVisitor<'v> for ConstraintContext<'a, 'tcx> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'tcx> StableHashingContextProvider for ConstraintContext<'a, 'tcx> {
|
||||
type ContextType = StableHashingContext<'tcx>;
|
||||
|
||||
fn create_stable_hashing_context(&self) -> Self::ContextType {
|
||||
self.terms_cx.tcx.create_stable_hashing_context()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'tcx> DepGraphSafe for ConstraintContext<'a, 'tcx> {}
|
||||
|
||||
impl<'a, 'tcx> ConstraintContext<'a, 'tcx> {
|
||||
fn visit_node_helper(&mut self, id: ast::NodeId) {
|
||||
let tcx = self.terms_cx.tcx;
|
||||
@ -151,14 +163,14 @@ impl<'a, 'tcx> ConstraintContext<'a, 'tcx> {
|
||||
// on dep-graph management.
|
||||
let dep_node = def_id.to_dep_node(tcx, DepKind::ItemVarianceConstraints);
|
||||
tcx.dep_graph.with_task(dep_node,
|
||||
AssertDepGraphSafe(self),
|
||||
self,
|
||||
def_id,
|
||||
visit_item_task);
|
||||
|
||||
fn visit_item_task<'a, 'tcx>(ccx: AssertDepGraphSafe<&mut ConstraintContext<'a, 'tcx>>,
|
||||
fn visit_item_task<'a, 'tcx>(ccx: &mut ConstraintContext<'a, 'tcx>,
|
||||
def_id: DefId)
|
||||
{
|
||||
ccx.0.build_constraints_for_item(def_id);
|
||||
ccx.build_constraints_for_item(def_id);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -474,9 +474,9 @@ impl hir::print::PpAnn for InlinedConst {
|
||||
}
|
||||
|
||||
pub fn print_inlined_const(cx: &DocContext, did: DefId) -> String {
|
||||
let body = cx.tcx.extern_const_body(did);
|
||||
let body = cx.tcx.extern_const_body(did).body;
|
||||
let inlined = InlinedConst {
|
||||
nested_bodies: cx.tcx.item_body_nested_bodies(did)
|
||||
nested_bodies: cx.tcx.item_body_nested_bodies(did).nested_bodies
|
||||
};
|
||||
hir::print::to_string(&inlined, |s| s.print_expr(&body.value))
|
||||
}
|
||||
|
@ -1835,7 +1835,8 @@ impl Clean<Type> for hir::Ty {
|
||||
for (i, lt_param) in generics.lifetimes.iter().enumerate() {
|
||||
if let Some(lt) = provided_params.lifetimes.get(i).cloned() {
|
||||
if !lt.is_elided() {
|
||||
lt_substs.insert(lt_param.lifetime.id, lt.clean(cx));
|
||||
let lt_def_id = cx.tcx.hir.local_def_id(lt_param.lifetime.id);
|
||||
lt_substs.insert(lt_def_id, lt.clean(cx));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -24,7 +24,7 @@ use rustc_trans::back::link;
|
||||
use rustc_resolve as resolve;
|
||||
use rustc_metadata::cstore::CStore;
|
||||
|
||||
use syntax::{ast, codemap};
|
||||
use syntax::codemap;
|
||||
use syntax::feature_gate::UnstableFeatures;
|
||||
use syntax::fold::Folder;
|
||||
use errors;
|
||||
@ -65,7 +65,7 @@ pub struct DocContext<'a, 'tcx: 'a> {
|
||||
/// Table type parameter definition -> substituted type
|
||||
pub ty_substs: RefCell<FxHashMap<Def, clean::Type>>,
|
||||
/// Table node id of lifetime parameter definition -> substituted lifetime
|
||||
pub lt_substs: RefCell<FxHashMap<ast::NodeId, clean::Lifetime>>,
|
||||
pub lt_substs: RefCell<FxHashMap<DefId, clean::Lifetime>>,
|
||||
}
|
||||
|
||||
impl<'a, 'tcx> DocContext<'a, 'tcx> {
|
||||
@ -77,7 +77,7 @@ impl<'a, 'tcx> DocContext<'a, 'tcx> {
|
||||
/// the substitutions for a type alias' RHS.
|
||||
pub fn enter_alias<F, R>(&self,
|
||||
ty_substs: FxHashMap<Def, clean::Type>,
|
||||
lt_substs: FxHashMap<ast::NodeId, clean::Lifetime>,
|
||||
lt_substs: FxHashMap<DefId, clean::Lifetime>,
|
||||
f: F) -> R
|
||||
where F: FnOnce() -> R {
|
||||
let (old_tys, old_lts) =
|
||||
@ -175,7 +175,7 @@ pub fn run_core(search_paths: SearchPaths,
|
||||
|
||||
let arena = DroplessArena::new();
|
||||
let arenas = GlobalArenas::new();
|
||||
let hir_map = hir_map::map_crate(&mut hir_forest, defs);
|
||||
let hir_map = hir_map::map_crate(&mut hir_forest, &defs);
|
||||
let output_filenames = driver::build_output_filenames(&input,
|
||||
&None,
|
||||
&None,
|
||||
|
@ -2671,8 +2671,9 @@ fn item_struct(w: &mut fmt::Formatter, cx: &Context, it: &clean::Item,
|
||||
let ns_id = derive_id(format!("{}.{}",
|
||||
field.name.as_ref().unwrap(),
|
||||
ItemType::StructField.name_space()));
|
||||
write!(w, "<span id='{id}' class=\"{item_type}\">
|
||||
<span id='{ns_id}' class='invisible'>
|
||||
write!(w, "<span id=\"{id}\" class=\"{item_type} small-section-header\">
|
||||
<a href=\"#{id}\" class=\"anchor field\"></a>
|
||||
<span id=\"{ns_id}\" class='invisible'>
|
||||
<code>{name}: {ty}</code>
|
||||
</span></span>",
|
||||
item_type = ItemType::StructField,
|
||||
@ -2793,7 +2794,8 @@ fn item_enum(w: &mut fmt::Formatter, cx: &Context, it: &clean::Item,
|
||||
let ns_id = derive_id(format!("{}.{}",
|
||||
variant.name.as_ref().unwrap(),
|
||||
ItemType::Variant.name_space()));
|
||||
write!(w, "<span id='{id}' class='variant'>\
|
||||
write!(w, "<span id=\"{id}\" class=\"variant small-section-header\">\
|
||||
<a href=\"#{id}\" class=\"anchor field\"></a>\
|
||||
<span id='{ns_id}' class='invisible'><code>{name}",
|
||||
id = id,
|
||||
ns_id = ns_id,
|
||||
|
@ -288,6 +288,7 @@ nav.sub {
|
||||
|
||||
.docblock {
|
||||
margin-left: 24px;
|
||||
position: relative;
|
||||
}
|
||||
|
||||
.content .out-of-band {
|
||||
@ -456,8 +457,13 @@ a {
|
||||
}
|
||||
.anchor {
|
||||
display: none;
|
||||
position: absolute;
|
||||
left: -25px;
|
||||
}
|
||||
.anchor:after {
|
||||
.anchor.field {
|
||||
left: -20px;
|
||||
}
|
||||
.anchor:before {
|
||||
content: '\2002\00a7\2002';
|
||||
}
|
||||
|
||||
@ -625,7 +631,9 @@ a.test-arrow:hover{
|
||||
text-decoration: none;
|
||||
}
|
||||
|
||||
.section-header:hover a:after {
|
||||
.section-header:hover a:before {
|
||||
position: absolute;
|
||||
left: -25px;
|
||||
content: '\2002\00a7\2002';
|
||||
}
|
||||
|
||||
@ -817,6 +825,7 @@ span.since {
|
||||
position: absolute;
|
||||
left: -1px;
|
||||
margin-top: 7px;
|
||||
z-index: 1;
|
||||
}
|
||||
|
||||
.tooltip {
|
||||
@ -853,3 +862,7 @@ span.since {
|
||||
border-style: solid;
|
||||
border-color: transparent black transparent transparent;
|
||||
}
|
||||
|
||||
pre.rust {
|
||||
position: relative;
|
||||
}
|
||||
|
@ -124,7 +124,7 @@ pub fn run(input: &str,
|
||||
render_type);
|
||||
|
||||
{
|
||||
let map = hir::map::map_crate(&mut hir_forest, defs);
|
||||
let map = hir::map::map_crate(&mut hir_forest, &defs);
|
||||
let krate = map.krate();
|
||||
let mut hir_collector = HirCollector {
|
||||
sess: &sess,
|
||||
|
@ -53,6 +53,7 @@
|
||||
|
||||
use alloc::allocator;
|
||||
use any::TypeId;
|
||||
use borrow::Cow;
|
||||
use cell;
|
||||
use char;
|
||||
use fmt::{self, Debug, Display};
|
||||
@ -217,6 +218,20 @@ impl<'a> From<&'a str> for Box<Error> {
|
||||
}
|
||||
}
|
||||
|
||||
#[stable(feature = "cow_box_error", since = "1.22.0")]
|
||||
impl<'a, 'b> From<Cow<'b, str>> for Box<Error + Send + Sync + 'a> {
|
||||
fn from(err: Cow<'b, str>) -> Box<Error + Send + Sync + 'a> {
|
||||
From::from(String::from(err))
|
||||
}
|
||||
}
|
||||
|
||||
#[stable(feature = "cow_box_error", since = "1.22.0")]
|
||||
impl<'a> From<Cow<'a, str>> for Box<Error> {
|
||||
fn from(err: Cow<'a, str>) -> Box<Error> {
|
||||
From::from(String::from(err))
|
||||
}
|
||||
}
|
||||
|
||||
#[unstable(feature = "never_type_impls", issue = "35121")]
|
||||
impl Error for ! {
|
||||
fn description(&self) -> &str { *self }
|
||||
|
@ -242,6 +242,7 @@
|
||||
#![feature(allocator_internals)]
|
||||
#![feature(allow_internal_unsafe)]
|
||||
#![feature(allow_internal_unstable)]
|
||||
#![feature(align_offset)]
|
||||
#![feature(asm)]
|
||||
#![feature(box_syntax)]
|
||||
#![feature(cfg_target_has_atomic)]
|
||||
|
@ -65,15 +65,12 @@ pub mod fallback {
|
||||
let usize_bytes = mem::size_of::<usize>();
|
||||
|
||||
// search up to an aligned boundary
|
||||
let align = (ptr as usize) & (usize_bytes- 1);
|
||||
let mut offset;
|
||||
if align > 0 {
|
||||
offset = cmp::min(usize_bytes - align, len);
|
||||
let mut offset = ptr.align_offset(usize_bytes);
|
||||
if offset > 0 {
|
||||
offset = cmp::min(offset, len);
|
||||
if let Some(index) = text[..offset].iter().position(|elt| *elt == x) {
|
||||
return Some(index);
|
||||
}
|
||||
} else {
|
||||
offset = 0;
|
||||
}
|
||||
|
||||
// search the body of the text
|
||||
|
@ -287,6 +287,8 @@ impl Builder {
|
||||
/// Names the thread-to-be. Currently the name is used for identification
|
||||
/// only in panic messages.
|
||||
///
|
||||
/// The name must not contain null bytes (`\0`).
|
||||
///
|
||||
/// For more information about named threads, see
|
||||
/// [this module-level documentation][naming-threads].
|
||||
///
|
||||
@ -355,6 +357,10 @@ impl Builder {
|
||||
/// [`io::Result`]: ../../std/io/type.Result.html
|
||||
/// [`JoinHandle`]: ../../std/thread/struct.JoinHandle.html
|
||||
///
|
||||
/// # Panics
|
||||
///
|
||||
/// Panics if a thread name was set and it contained null bytes.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
@ -941,6 +947,7 @@ pub struct Thread {
|
||||
|
||||
impl Thread {
|
||||
// Used only internally to construct a thread object without spawning
|
||||
// Panics if the name contains nuls.
|
||||
pub(crate) fn new(name: Option<String>) -> Thread {
|
||||
let cname = name.map(|n| {
|
||||
CString::new(n).expect("thread name may not contain interior null bytes")
|
||||
|
@ -1221,7 +1221,8 @@ pub enum ImplItemKind {
|
||||
Macro(Mac),
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Copy)]
|
||||
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Copy,
|
||||
PartialOrd, Ord)]
|
||||
pub enum IntTy {
|
||||
Is,
|
||||
I8,
|
||||
@ -1274,7 +1275,8 @@ impl IntTy {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Copy)]
|
||||
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Copy,
|
||||
PartialOrd, Ord)]
|
||||
pub enum UintTy {
|
||||
Us,
|
||||
U8,
|
||||
@ -1324,7 +1326,8 @@ impl fmt::Display for UintTy {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Copy)]
|
||||
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Copy,
|
||||
PartialOrd, Ord)]
|
||||
pub enum FloatTy {
|
||||
F32,
|
||||
F64,
|
||||
|
@ -12,6 +12,7 @@
|
||||
// scope.
|
||||
|
||||
// revisions: rpass1 rpass2
|
||||
// compile-flags: -Z query-dep-graph
|
||||
|
||||
#![feature(rustc_attrs)]
|
||||
|
||||
@ -47,13 +48,15 @@ mod mod3 {
|
||||
use Trait2;
|
||||
|
||||
#[rustc_clean(label="Hir", cfg="rpass2")]
|
||||
#[rustc_dirty(label="HirBody", cfg="rpass2")]
|
||||
#[rustc_clean(label="HirBody", cfg="rpass2")]
|
||||
#[rustc_dirty(label="TypeckTables", cfg="rpass2")]
|
||||
fn bar() {
|
||||
().method();
|
||||
}
|
||||
|
||||
#[rustc_clean(label="Hir", cfg="rpass2")]
|
||||
#[rustc_clean(label="HirBody", cfg="rpass2")]
|
||||
#[rustc_clean(label="TypeckTables", cfg="rpass2")]
|
||||
fn baz() {
|
||||
22; // no method call, traits in scope don't matter
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user