Get rid of redundant `HashSet`

This commit is contained in:
Oliver Schneider 2018-04-13 18:48:41 +02:00
parent 748e71e8f4
commit 7f7d4c376a
No known key found for this signature in database
GPG Key ID: 1D5CB4FC597C3004
3 changed files with 28 additions and 30 deletions

View File

@ -178,7 +178,7 @@ pub fn specialized_encode_alloc_id<
AllocKind::Fn.encode(encoder)?;
fn_instance.encode(encoder)?;
} else if let Some(did) = tcx.interpret_interner.get_static(alloc_id) {
// referring to statics doesn't need to know about their allocations, just hash the DefId
// referring to statics doesn't need to know about their allocations, just about its DefId
AllocKind::Static.encode(encoder)?;
did.encode(encoder)?;
} else {

View File

@ -201,7 +201,6 @@ impl<'sess> OnDiskCache<'sess> {
predicate_shorthands: FxHashMap(),
expn_info_shorthands: FxHashMap(),
interpret_allocs: FxHashMap(),
interpret_alloc_ids: FxHashSet(),
interpret_allocs_inverse: Vec::new(),
codemap: CachingCodemapView::new(tcx.sess.codemap()),
file_to_file_index,
@ -284,7 +283,12 @@ impl<'sess> OnDiskCache<'sess> {
let mut interpret_alloc_index = Vec::new();
let mut n = 0;
loop {
let new_n = encoder.interpret_alloc_ids.len();
let new_n = encoder.interpret_allocs_inverse.len();
// if we have found new ids, serialize those, too
if n == new_n {
// otherwise, abort
break;
}
for idx in n..new_n {
let id = encoder.interpret_allocs_inverse[idx];
let pos = AbsoluteBytePos::new(encoder.position());
@ -295,11 +299,6 @@ impl<'sess> OnDiskCache<'sess> {
id,
)?;
}
// if we have found new ids, serialize those, too
if n == new_n {
// otherwise, abort
break;
}
n = new_n;
}
interpret_alloc_index
@ -802,7 +801,6 @@ struct CacheEncoder<'enc, 'a, 'tcx, E>
expn_info_shorthands: FxHashMap<Mark, AbsoluteBytePos>,
interpret_allocs: FxHashMap<interpret::AllocId, usize>,
interpret_allocs_inverse: Vec<interpret::AllocId>,
interpret_alloc_ids: FxHashSet<interpret::AllocId>,
codemap: CachingCodemapView<'tcx>,
file_to_file_index: FxHashMap<*const FileMap, FileMapIndex>,
}
@ -839,14 +837,15 @@ impl<'enc, 'a, 'tcx, E> SpecializedEncoder<interpret::AllocId> for CacheEncoder<
where E: 'enc + ty_codec::TyEncoder
{
fn specialized_encode(&mut self, alloc_id: &interpret::AllocId) -> Result<(), Self::Error> {
let index = if self.interpret_alloc_ids.insert(*alloc_id) {
let idx = self.interpret_alloc_ids.len() - 1;
assert_eq!(idx, self.interpret_allocs_inverse.len());
self.interpret_allocs_inverse.push(*alloc_id);
assert!(self.interpret_allocs.insert(*alloc_id, idx).is_none());
idx
} else {
self.interpret_allocs[alloc_id]
use std::collections::hash_map::Entry;
let index = match self.interpret_allocs.entry(*alloc_id) {
Entry::Occupied(e) => *e.get(),
Entry::Vacant(e) => {
let idx = self.interpret_allocs_inverse.len();
self.interpret_allocs_inverse.push(*alloc_id);
e.insert(idx);
idx
},
};
index.encode(self)

View File

@ -29,7 +29,7 @@ use rustc::ty::{self, Ty, TyCtxt, ReprOptions, SymbolName};
use rustc::ty::codec::{self as ty_codec, TyEncoder};
use rustc::session::config::{self, CrateTypeProcMacro};
use rustc::util::nodemap::{FxHashMap, FxHashSet};
use rustc::util::nodemap::FxHashMap;
use rustc_data_structures::stable_hasher::StableHasher;
use rustc_serialize::{Encodable, Encoder, SpecializedEncoder, opaque};
@ -62,7 +62,6 @@ pub struct EncodeContext<'a, 'tcx: 'a> {
interpret_allocs: FxHashMap<interpret::AllocId, usize>,
interpret_allocs_inverse: Vec<interpret::AllocId>,
interpret_alloc_ids: FxHashSet<interpret::AllocId>,
// This is used to speed up Span encoding.
filemap_cache: Lrc<FileMap>,
@ -199,14 +198,15 @@ impl<'a, 'tcx> SpecializedEncoder<Ty<'tcx>> for EncodeContext<'a, 'tcx> {
impl<'a, 'tcx> SpecializedEncoder<interpret::AllocId> for EncodeContext<'a, 'tcx> {
fn specialized_encode(&mut self, alloc_id: &interpret::AllocId) -> Result<(), Self::Error> {
let index = if self.interpret_alloc_ids.insert(*alloc_id) {
let idx = self.interpret_alloc_ids.len() - 1;
assert_eq!(idx, self.interpret_allocs_inverse.len());
self.interpret_allocs_inverse.push(*alloc_id);
assert!(self.interpret_allocs.insert(*alloc_id, idx).is_none());
idx
} else {
self.interpret_allocs[alloc_id]
use std::collections::hash_map::Entry;
let index = match self.interpret_allocs.entry(*alloc_id) {
Entry::Occupied(e) => *e.get(),
Entry::Vacant(e) => {
let idx = self.interpret_allocs_inverse.len();
self.interpret_allocs_inverse.push(*alloc_id);
e.insert(idx);
idx
},
};
index.encode(self)
@ -456,7 +456,7 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
let mut n = 0;
trace!("beginning to encode alloc ids");
loop {
let new_n = self.interpret_alloc_ids.len();
let new_n = self.interpret_allocs_inverse.len();
// if we have found new ids, serialize those, too
if n == new_n {
// otherwise, abort
@ -487,7 +487,7 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
let is_proc_macro = tcx.sess.crate_types.borrow().contains(&CrateTypeProcMacro);
let has_default_lib_allocator =
attr::contains_name(tcx.hir.krate_attrs(), "default_lib_allocator");
let has_global_allocator = tcx.sess.has_global_allocator.get();
let has_global_allocator = *tcx.sess.has_global_allocator.get();
let root = self.lazy(&CrateRoot {
name: tcx.crate_name(LOCAL_CRATE),
@ -1792,7 +1792,6 @@ pub fn encode_metadata<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
filemap_cache: tcx.sess.codemap().files()[0].clone(),
interpret_allocs: Default::default(),
interpret_allocs_inverse: Default::default(),
interpret_alloc_ids: Default::default(),
};
// Encode the rustc version string in a predictable location.