Auto merge of #75278 - cuviper:indexmap, r=Mark-Simulacrum

Upgrade indexmap and use it more

First this upgrades `indexmap` to 1.5.1, which is now based on `hashbrown::raw::RawTable`. This means it shares a lot of the same performance characteristics for insert, lookup, etc., while keeping items in insertion order.

Then across various rustc crates, this replaces a lot of `Vec`+`HashMap` pairs with a single `IndexMap` or `IndexSet`.

Closes #60608.
r? @eddyb
This commit is contained in:
bors 2020-08-09 20:08:50 +00:00
commit 18f3be7704
21 changed files with 144 additions and 193 deletions

View File

@ -1382,9 +1382,13 @@ dependencies = [
[[package]] [[package]]
name = "indexmap" name = "indexmap"
version = "1.0.2" version = "1.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7e81a7c05f79578dbc15793d8b619db9ba32b4577003ef3af1a91c416798c58d" checksum = "86b45e59b16c76b11bf9738fd5d38879d3bd28ad292d7b313608becb17ae2df9"
dependencies = [
"autocfg",
"hashbrown",
]
[[package]] [[package]]
name = "installer" name = "installer"

View File

@ -6,7 +6,7 @@ use llvm::coverageinfo::CounterMappingRegion;
use log::debug; use log::debug;
use rustc_codegen_ssa::coverageinfo::map::{Counter, CounterExpression, Region}; use rustc_codegen_ssa::coverageinfo::map::{Counter, CounterExpression, Region};
use rustc_codegen_ssa::traits::{BaseTypeMethods, ConstMethods}; use rustc_codegen_ssa::traits::{BaseTypeMethods, ConstMethods};
use rustc_data_structures::fx::FxHashMap; use rustc_data_structures::fx::FxIndexSet;
use rustc_llvm::RustString; use rustc_llvm::RustString;
use std::ffi::CString; use std::ffi::CString;
@ -76,13 +76,12 @@ pub fn finalize<'ll, 'tcx>(cx: &CodegenCx<'ll, 'tcx>) {
} }
struct CoverageMapGenerator { struct CoverageMapGenerator {
filenames: Vec<CString>, filenames: FxIndexSet<CString>,
filename_to_index: FxHashMap<CString, u32>,
} }
impl CoverageMapGenerator { impl CoverageMapGenerator {
fn new() -> Self { fn new() -> Self {
Self { filenames: Vec::new(), filename_to_index: FxHashMap::default() } Self { filenames: FxIndexSet::default() }
} }
/// Using the `expressions` and `counter_regions` collected for the current function, generate /// Using the `expressions` and `counter_regions` collected for the current function, generate
@ -122,16 +121,8 @@ impl CoverageMapGenerator {
let c_filename = let c_filename =
CString::new(file_name).expect("null error converting filename to C string"); CString::new(file_name).expect("null error converting filename to C string");
debug!(" file_id: {} = '{:?}'", current_file_id, c_filename); debug!(" file_id: {} = '{:?}'", current_file_id, c_filename);
let filenames_index = match self.filename_to_index.get(&c_filename) { let (filenames_index, _) = self.filenames.insert_full(c_filename);
Some(index) => *index, virtual_file_mapping.push(filenames_index as u32);
None => {
let index = self.filenames.len() as u32;
self.filenames.push(c_filename.clone());
self.filename_to_index.insert(c_filename.clone(), index);
index
}
};
virtual_file_mapping.push(filenames_index);
} }
mapping_regions.push(CounterMappingRegion::code_region( mapping_regions.push(CounterMappingRegion::code_region(
counter, counter,

View File

@ -97,8 +97,11 @@ impl CoverageInfoBuilderMethods<'tcx> for Builder<'a, 'll, 'tcx> {
} }
} }
pub(crate) fn write_filenames_section_to_buffer(filenames: &Vec<CString>, buffer: &RustString) { pub(crate) fn write_filenames_section_to_buffer<'a>(
let c_str_vec = filenames.iter().map(|cstring| cstring.as_ptr()).collect::<Vec<_>>(); filenames: impl IntoIterator<Item = &'a CString>,
buffer: &RustString,
) {
let c_str_vec = filenames.into_iter().map(|cstring| cstring.as_ptr()).collect::<Vec<_>>();
unsafe { unsafe {
llvm::LLVMRustCoverageWriteFilenamesSectionToBuffer( llvm::LLVMRustCoverageWriteFilenamesSectionToBuffer(
c_str_vec.as_ptr(), c_str_vec.as_ptr(),

View File

@ -11,7 +11,7 @@ doctest = false
[dependencies] [dependencies]
ena = "0.14" ena = "0.14"
indexmap = "1" indexmap = "1.5.1"
log = { package = "tracing", version = "0.1" } log = { package = "tracing", version = "0.1" }
jobserver_crate = { version = "0.1.13", package = "jobserver" } jobserver_crate = { version = "0.1.13", package = "jobserver" }
lazy_static = "1" lazy_static = "1"

View File

@ -1,4 +1,4 @@
use crate::fx::FxHashMap; use crate::fx::FxIndexSet;
use crate::stable_hasher::{HashStable, StableHasher}; use crate::stable_hasher::{HashStable, StableHasher};
use crate::sync::Lock; use crate::sync::Lock;
use rustc_index::bit_set::BitMatrix; use rustc_index::bit_set::BitMatrix;
@ -13,10 +13,7 @@ mod tests;
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
pub struct TransitiveRelation<T: Eq + Hash> { pub struct TransitiveRelation<T: Eq + Hash> {
// List of elements. This is used to map from a T to a usize. // List of elements. This is used to map from a T to a usize.
elements: Vec<T>, elements: FxIndexSet<T>,
// Maps each element to an index.
map: FxHashMap<T, Index>,
// List of base edges in the graph. Require to compute transitive // List of base edges in the graph. Require to compute transitive
// closure. // closure.
@ -39,7 +36,6 @@ impl<T: Eq + Hash> Default for TransitiveRelation<T> {
fn default() -> Self { fn default() -> Self {
TransitiveRelation { TransitiveRelation {
elements: Default::default(), elements: Default::default(),
map: Default::default(),
edges: Default::default(), edges: Default::default(),
closure: Default::default(), closure: Default::default(),
} }
@ -65,20 +61,16 @@ impl<T: Clone + Debug + Eq + Hash> TransitiveRelation<T> {
} }
fn index(&self, a: &T) -> Option<Index> { fn index(&self, a: &T) -> Option<Index> {
self.map.get(a).cloned() self.elements.get_index_of(a).map(Index)
} }
fn add_index(&mut self, a: T) -> Index { fn add_index(&mut self, a: T) -> Index {
let &mut TransitiveRelation { ref mut elements, ref mut closure, ref mut map, .. } = self; let (index, added) = self.elements.insert_full(a);
if added {
*map.entry(a.clone()).or_insert_with(|| {
elements.push(a);
// if we changed the dimensions, clear the cache // if we changed the dimensions, clear the cache
*closure.get_mut() = None; *self.closure.get_mut() = None;
}
Index(elements.len() - 1) Index(index)
})
} }
/// Applies the (partial) function to each edge and returns a new /// Applies the (partial) function to each edge and returns a new
@ -430,14 +422,11 @@ where
{ {
fn decode<D: Decoder>(d: &mut D) -> Result<Self, D::Error> { fn decode<D: Decoder>(d: &mut D) -> Result<Self, D::Error> {
d.read_struct("TransitiveRelation", 2, |d| { d.read_struct("TransitiveRelation", 2, |d| {
let elements: Vec<T> = d.read_struct_field("elements", 0, |d| Decodable::decode(d))?; Ok(TransitiveRelation {
let edges = d.read_struct_field("edges", 1, |d| Decodable::decode(d))?; elements: d.read_struct_field("elements", 0, |d| Decodable::decode(d))?,
let map = elements edges: d.read_struct_field("edges", 1, |d| Decodable::decode(d))?,
.iter() closure: Lock::new(None),
.enumerate() })
.map(|(index, elem)| (elem.clone(), Index(index)))
.collect();
Ok(TransitiveRelation { elements, edges, map, closure: Lock::new(None) })
}) })
} }
} }
@ -452,8 +441,6 @@ where
let TransitiveRelation { let TransitiveRelation {
ref elements, ref elements,
ref edges, ref edges,
// "map" is just a copy of elements vec
map: _,
// "closure" is just a copy of the data above // "closure" is just a copy of the data above
closure: _, closure: _,
} = *self; } = *self;

View File

@ -4,7 +4,7 @@ use crate::rmeta::*;
use log::{debug, trace}; use log::{debug, trace};
use rustc_ast::ast; use rustc_ast::ast;
use rustc_data_structures::fingerprint::Fingerprint; use rustc_data_structures::fingerprint::Fingerprint;
use rustc_data_structures::fx::{FxHashMap, FxHashSet}; use rustc_data_structures::fx::{FxHashMap, FxHashSet, FxIndexSet};
use rustc_data_structures::stable_hasher::StableHasher; use rustc_data_structures::stable_hasher::StableHasher;
use rustc_data_structures::sync::{join, Lrc}; use rustc_data_structures::sync::{join, Lrc};
use rustc_hir as hir; use rustc_hir as hir;
@ -48,8 +48,7 @@ struct EncodeContext<'a, 'tcx> {
type_shorthands: FxHashMap<Ty<'tcx>, usize>, type_shorthands: FxHashMap<Ty<'tcx>, usize>,
predicate_shorthands: FxHashMap<ty::Predicate<'tcx>, usize>, predicate_shorthands: FxHashMap<ty::Predicate<'tcx>, usize>,
interpret_allocs: FxHashMap<interpret::AllocId, usize>, interpret_allocs: FxIndexSet<interpret::AllocId>,
interpret_allocs_inverse: Vec<interpret::AllocId>,
// This is used to speed up Span encoding. // This is used to speed up Span encoding.
// The `usize` is an index into the `MonotonicVec` // The `usize` is an index into the `MonotonicVec`
@ -331,17 +330,7 @@ impl<'a, 'b, 'tcx> SpecializedEncoder<ty::Predicate<'b>> for EncodeContext<'a, '
impl<'a, 'tcx> SpecializedEncoder<interpret::AllocId> for EncodeContext<'a, 'tcx> { impl<'a, 'tcx> SpecializedEncoder<interpret::AllocId> for EncodeContext<'a, 'tcx> {
fn specialized_encode(&mut self, alloc_id: &interpret::AllocId) -> Result<(), Self::Error> { fn specialized_encode(&mut self, alloc_id: &interpret::AllocId) -> Result<(), Self::Error> {
use std::collections::hash_map::Entry; let (index, _) = self.interpret_allocs.insert_full(*alloc_id);
let index = match self.interpret_allocs.entry(*alloc_id) {
Entry::Occupied(e) => *e.get(),
Entry::Vacant(e) => {
let idx = self.interpret_allocs_inverse.len();
self.interpret_allocs_inverse.push(*alloc_id);
e.insert(idx);
idx
}
};
index.encode(self) index.encode(self)
} }
} }
@ -583,7 +572,7 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
let mut n = 0; let mut n = 0;
trace!("beginning to encode alloc ids"); trace!("beginning to encode alloc ids");
loop { loop {
let new_n = self.interpret_allocs_inverse.len(); let new_n = self.interpret_allocs.len();
// if we have found new ids, serialize those, too // if we have found new ids, serialize those, too
if n == new_n { if n == new_n {
// otherwise, abort // otherwise, abort
@ -591,7 +580,7 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
} }
trace!("encoding {} further alloc ids", new_n - n); trace!("encoding {} further alloc ids", new_n - n);
for idx in n..new_n { for idx in n..new_n {
let id = self.interpret_allocs_inverse[idx]; let id = self.interpret_allocs[idx];
let pos = self.position() as u32; let pos = self.position() as u32;
interpret_alloc_index.push(pos); interpret_alloc_index.push(pos);
interpret::specialized_encode_alloc_id(self, tcx, id).unwrap(); interpret::specialized_encode_alloc_id(self, tcx, id).unwrap();
@ -2019,7 +2008,6 @@ fn encode_metadata_impl(tcx: TyCtxt<'_>) -> EncodedMetadata {
predicate_shorthands: Default::default(), predicate_shorthands: Default::default(),
source_file_cache: (source_map_files[0].clone(), 0), source_file_cache: (source_map_files[0].clone(), 0),
interpret_allocs: Default::default(), interpret_allocs: Default::default(),
interpret_allocs_inverse: Default::default(),
required_source_files: Some(GrowableBitSet::with_capacity(source_map_files.len())), required_source_files: Some(GrowableBitSet::with_capacity(source_map_files.len())),
is_proc_macro: tcx.sess.crate_types().contains(&CrateType::ProcMacro), is_proc_macro: tcx.sess.crate_types().contains(&CrateType::ProcMacro),
hygiene_ctxt: &hygiene_ctxt, hygiene_ctxt: &hygiene_ctxt,

View File

@ -5,7 +5,7 @@ use crate::ty::codec::{self as ty_codec, TyDecoder, TyEncoder};
use crate::ty::context::TyCtxt; use crate::ty::context::TyCtxt;
use crate::ty::{self, Ty}; use crate::ty::{self, Ty};
use rustc_data_structures::fingerprint::Fingerprint; use rustc_data_structures::fingerprint::Fingerprint;
use rustc_data_structures::fx::FxHashMap; use rustc_data_structures::fx::{FxHashMap, FxIndexSet};
use rustc_data_structures::sync::{HashMapExt, Lock, Lrc, OnceCell}; use rustc_data_structures::sync::{HashMapExt, Lock, Lrc, OnceCell};
use rustc_data_structures::thin_vec::ThinVec; use rustc_data_structures::thin_vec::ThinVec;
use rustc_errors::Diagnostic; use rustc_errors::Diagnostic;
@ -212,7 +212,6 @@ impl<'sess> OnDiskCache<'sess> {
type_shorthands: Default::default(), type_shorthands: Default::default(),
predicate_shorthands: Default::default(), predicate_shorthands: Default::default(),
interpret_allocs: Default::default(), interpret_allocs: Default::default(),
interpret_allocs_inverse: Vec::new(),
source_map: CachingSourceMapView::new(tcx.sess.source_map()), source_map: CachingSourceMapView::new(tcx.sess.source_map()),
file_to_file_index, file_to_file_index,
hygiene_context: &hygiene_encode_context, hygiene_context: &hygiene_encode_context,
@ -267,7 +266,7 @@ impl<'sess> OnDiskCache<'sess> {
let mut interpret_alloc_index = Vec::new(); let mut interpret_alloc_index = Vec::new();
let mut n = 0; let mut n = 0;
loop { loop {
let new_n = encoder.interpret_allocs_inverse.len(); let new_n = encoder.interpret_allocs.len();
// If we have found new IDs, serialize those too. // If we have found new IDs, serialize those too.
if n == new_n { if n == new_n {
// Otherwise, abort. // Otherwise, abort.
@ -275,7 +274,7 @@ impl<'sess> OnDiskCache<'sess> {
} }
interpret_alloc_index.reserve(new_n - n); interpret_alloc_index.reserve(new_n - n);
for idx in n..new_n { for idx in n..new_n {
let id = encoder.interpret_allocs_inverse[idx]; let id = encoder.interpret_allocs[idx];
let pos = encoder.position() as u32; let pos = encoder.position() as u32;
interpret_alloc_index.push(pos); interpret_alloc_index.push(pos);
interpret::specialized_encode_alloc_id(&mut encoder, tcx, id)?; interpret::specialized_encode_alloc_id(&mut encoder, tcx, id)?;
@ -767,8 +766,7 @@ struct CacheEncoder<'a, 'tcx, E: ty_codec::TyEncoder> {
encoder: &'a mut E, encoder: &'a mut E,
type_shorthands: FxHashMap<Ty<'tcx>, usize>, type_shorthands: FxHashMap<Ty<'tcx>, usize>,
predicate_shorthands: FxHashMap<ty::Predicate<'tcx>, usize>, predicate_shorthands: FxHashMap<ty::Predicate<'tcx>, usize>,
interpret_allocs: FxHashMap<interpret::AllocId, usize>, interpret_allocs: FxIndexSet<interpret::AllocId>,
interpret_allocs_inverse: Vec<interpret::AllocId>,
source_map: CachingSourceMapView<'tcx>, source_map: CachingSourceMapView<'tcx>,
file_to_file_index: FxHashMap<*const SourceFile, SourceFileIndex>, file_to_file_index: FxHashMap<*const SourceFile, SourceFileIndex>,
hygiene_context: &'a HygieneEncodeContext, hygiene_context: &'a HygieneEncodeContext,
@ -807,17 +805,7 @@ where
E: 'a + TyEncoder, E: 'a + TyEncoder,
{ {
fn specialized_encode(&mut self, alloc_id: &interpret::AllocId) -> Result<(), Self::Error> { fn specialized_encode(&mut self, alloc_id: &interpret::AllocId) -> Result<(), Self::Error> {
use std::collections::hash_map::Entry; let (index, _) = self.interpret_allocs.insert_full(*alloc_id);
let index = match self.interpret_allocs.entry(*alloc_id) {
Entry::Occupied(e) => *e.get(),
Entry::Vacant(e) => {
let idx = self.interpret_allocs_inverse.len();
self.interpret_allocs_inverse.push(*alloc_id);
e.insert(idx);
idx
}
};
index.encode(self) index.encode(self)
} }
} }

View File

@ -3,9 +3,8 @@ use crate::borrow_check::path_utils::allow_two_phase_borrow;
use crate::borrow_check::place_ext::PlaceExt; use crate::borrow_check::place_ext::PlaceExt;
use crate::dataflow::indexes::BorrowIndex; use crate::dataflow::indexes::BorrowIndex;
use crate::dataflow::move_paths::MoveData; use crate::dataflow::move_paths::MoveData;
use rustc_data_structures::fx::{FxHashMap, FxHashSet}; use rustc_data_structures::fx::{FxHashMap, FxHashSet, FxIndexMap};
use rustc_index::bit_set::BitSet; use rustc_index::bit_set::BitSet;
use rustc_index::vec::IndexVec;
use rustc_middle::mir::traversal; use rustc_middle::mir::traversal;
use rustc_middle::mir::visit::{MutatingUseContext, NonUseContext, PlaceContext, Visitor}; use rustc_middle::mir::visit::{MutatingUseContext, NonUseContext, PlaceContext, Visitor};
use rustc_middle::mir::{self, Body, Local, Location}; use rustc_middle::mir::{self, Body, Local, Location};
@ -15,14 +14,11 @@ use std::ops::Index;
crate struct BorrowSet<'tcx> { crate struct BorrowSet<'tcx> {
/// The fundamental map relating bitvector indexes to the borrows /// The fundamental map relating bitvector indexes to the borrows
/// in the MIR. /// in the MIR. Each borrow is also uniquely identified in the MIR
crate borrows: IndexVec<BorrowIndex, BorrowData<'tcx>>, /// by the `Location` of the assignment statement in which it
/// appears on the right hand side. Thus the location is the map
/// Each borrow is also uniquely identified in the MIR by the /// key, and its position in the map corresponds to `BorrowIndex`.
/// `Location` of the assignment statement in which it appears on crate location_map: FxIndexMap<Location, BorrowData<'tcx>>,
/// the right hand side; we map each such location to the
/// corresponding `BorrowIndex`.
crate location_map: FxHashMap<Location, BorrowIndex>,
/// Locations which activate borrows. /// Locations which activate borrows.
/// NOTE: a given location may activate more than one borrow in the future /// NOTE: a given location may activate more than one borrow in the future
@ -40,7 +36,7 @@ impl<'tcx> Index<BorrowIndex> for BorrowSet<'tcx> {
type Output = BorrowData<'tcx>; type Output = BorrowData<'tcx>;
fn index(&self, index: BorrowIndex) -> &BorrowData<'tcx> { fn index(&self, index: BorrowIndex) -> &BorrowData<'tcx> {
&self.borrows[index] &self.location_map[index.as_usize()]
} }
} }
@ -129,7 +125,6 @@ impl<'tcx> BorrowSet<'tcx> {
let mut visitor = GatherBorrows { let mut visitor = GatherBorrows {
tcx, tcx,
body: &body, body: &body,
idx_vec: IndexVec::new(),
location_map: Default::default(), location_map: Default::default(),
activation_map: Default::default(), activation_map: Default::default(),
local_map: Default::default(), local_map: Default::default(),
@ -146,7 +141,6 @@ impl<'tcx> BorrowSet<'tcx> {
} }
BorrowSet { BorrowSet {
borrows: visitor.idx_vec,
location_map: visitor.location_map, location_map: visitor.location_map,
activation_map: visitor.activation_map, activation_map: visitor.activation_map,
local_map: visitor.local_map, local_map: visitor.local_map,
@ -157,13 +151,32 @@ impl<'tcx> BorrowSet<'tcx> {
crate fn activations_at_location(&self, location: Location) -> &[BorrowIndex] { crate fn activations_at_location(&self, location: Location) -> &[BorrowIndex] {
self.activation_map.get(&location).map(|activations| &activations[..]).unwrap_or(&[]) self.activation_map.get(&location).map(|activations| &activations[..]).unwrap_or(&[])
} }
crate fn len(&self) -> usize {
self.location_map.len()
}
crate fn indices(&self) -> impl Iterator<Item = BorrowIndex> {
BorrowIndex::from_usize(0)..BorrowIndex::from_usize(self.len())
}
crate fn iter_enumerated(&self) -> impl Iterator<Item = (BorrowIndex, &BorrowData<'tcx>)> {
self.indices().zip(self.location_map.values())
}
crate fn get_index_of(&self, location: &Location) -> Option<BorrowIndex> {
self.location_map.get_index_of(location).map(BorrowIndex::from)
}
crate fn contains(&self, location: &Location) -> bool {
self.location_map.contains_key(location)
}
} }
struct GatherBorrows<'a, 'tcx> { struct GatherBorrows<'a, 'tcx> {
tcx: TyCtxt<'tcx>, tcx: TyCtxt<'tcx>,
body: &'a Body<'tcx>, body: &'a Body<'tcx>,
idx_vec: IndexVec<BorrowIndex, BorrowData<'tcx>>, location_map: FxIndexMap<Location, BorrowData<'tcx>>,
location_map: FxHashMap<Location, BorrowIndex>,
activation_map: FxHashMap<Location, Vec<BorrowIndex>>, activation_map: FxHashMap<Location, Vec<BorrowIndex>>,
local_map: FxHashMap<mir::Local, FxHashSet<BorrowIndex>>, local_map: FxHashMap<mir::Local, FxHashSet<BorrowIndex>>,
@ -203,8 +216,8 @@ impl<'a, 'tcx> Visitor<'tcx> for GatherBorrows<'a, 'tcx> {
borrowed_place: *borrowed_place, borrowed_place: *borrowed_place,
assigned_place: *assigned_place, assigned_place: *assigned_place,
}; };
let idx = self.idx_vec.push(borrow); let (idx, _) = self.location_map.insert_full(location, borrow);
self.location_map.insert(location, idx); let idx = BorrowIndex::from(idx);
self.insert_as_pending_if_two_phase(location, assigned_place, kind, idx); self.insert_as_pending_if_two_phase(location, assigned_place, kind, idx);
@ -224,7 +237,7 @@ impl<'a, 'tcx> Visitor<'tcx> for GatherBorrows<'a, 'tcx> {
// //
// TMP = &mut place // TMP = &mut place
if let Some(&borrow_index) = self.pending_activations.get(temp) { if let Some(&borrow_index) = self.pending_activations.get(temp) {
let borrow_data = &mut self.idx_vec[borrow_index]; let borrow_data = &mut self.location_map[borrow_index.as_usize()];
// Watch out: the use of TMP in the borrow itself // Watch out: the use of TMP in the borrow itself
// doesn't count as an activation. =) // doesn't count as an activation. =)
@ -265,8 +278,7 @@ impl<'a, 'tcx> Visitor<'tcx> for GatherBorrows<'a, 'tcx> {
if let mir::Rvalue::Ref(region, kind, ref place) = *rvalue { if let mir::Rvalue::Ref(region, kind, ref place) = *rvalue {
// double-check that we already registered a BorrowData for this // double-check that we already registered a BorrowData for this
let borrow_index = self.location_map[&location]; let borrow_data = &self.location_map[&location];
let borrow_data = &self.idx_vec[borrow_index];
assert_eq!(borrow_data.reserve_location, location); assert_eq!(borrow_data.reserve_location, location);
assert_eq!(borrow_data.kind, kind); assert_eq!(borrow_data.kind, kind);
assert_eq!(borrow_data.region, region.to_region_vid()); assert_eq!(borrow_data.region, region.to_region_vid());
@ -316,7 +328,7 @@ impl<'a, 'tcx> GatherBorrows<'a, 'tcx> {
// Consider the borrow not activated to start. When we find an activation, we'll update // Consider the borrow not activated to start. When we find an activation, we'll update
// this field. // this field.
{ {
let borrow_data = &mut self.idx_vec[borrow_index]; let borrow_data = &mut self.location_map[borrow_index.as_usize()];
borrow_data.activation_location = TwoPhaseActivation::NotActivated; borrow_data.activation_location = TwoPhaseActivation::NotActivated;
} }
@ -332,7 +344,7 @@ impl<'a, 'tcx> GatherBorrows<'a, 'tcx> {
at borrow_index: {:?} with associated data {:?}", at borrow_index: {:?} with associated data {:?}",
temp, temp,
old_index, old_index,
self.idx_vec[old_index] self.location_map[old_index.as_usize()]
); );
} }
} }

View File

@ -217,7 +217,7 @@ impl<'cx, 'cg, 'tcx> ConstraintGeneration<'cx, 'cg, 'tcx> {
let places_conflict = places_conflict::places_conflict( let places_conflict = places_conflict::places_conflict(
self.infcx.tcx, self.infcx.tcx,
self.body, self.body,
self.borrow_set.borrows[borrow_index].borrowed_place, self.borrow_set[borrow_index].borrowed_place,
place, place,
places_conflict::PlaceConflictBias::NoOverlap, places_conflict::PlaceConflictBias::NoOverlap,
); );

View File

@ -166,8 +166,8 @@ impl<'cx, 'tcx> Visitor<'tcx> for InvalidationGenerator<'cx, 'tcx> {
// Invalidate all borrows of local places // Invalidate all borrows of local places
let borrow_set = self.borrow_set.clone(); let borrow_set = self.borrow_set.clone();
let resume = self.location_table.start_index(resume.start_location()); let resume = self.location_table.start_index(resume.start_location());
for i in borrow_set.borrows.indices() { for (i, data) in borrow_set.iter_enumerated() {
if borrow_of_local_data(borrow_set.borrows[i].borrowed_place) { if borrow_of_local_data(data.borrowed_place) {
self.all_facts.invalidates.push((resume, i)); self.all_facts.invalidates.push((resume, i));
} }
} }
@ -178,8 +178,8 @@ impl<'cx, 'tcx> Visitor<'tcx> for InvalidationGenerator<'cx, 'tcx> {
// Invalidate all borrows of local places // Invalidate all borrows of local places
let borrow_set = self.borrow_set.clone(); let borrow_set = self.borrow_set.clone();
let start = self.location_table.start_index(location); let start = self.location_table.start_index(location);
for i in borrow_set.borrows.indices() { for (i, data) in borrow_set.iter_enumerated() {
if borrow_of_local_data(borrow_set.borrows[i].borrowed_place) { if borrow_of_local_data(data.borrowed_place) {
self.all_facts.invalidates.push((start, i)); self.all_facts.invalidates.push((start, i));
} }
} }
@ -369,7 +369,7 @@ impl<'cx, 'tcx> InvalidationGenerator<'cx, 'tcx> {
let tcx = self.tcx; let tcx = self.tcx;
let body = self.body; let body = self.body;
let borrow_set = self.borrow_set.clone(); let borrow_set = self.borrow_set.clone();
let indices = self.borrow_set.borrows.indices(); let indices = self.borrow_set.indices();
each_borrow_involving_path( each_borrow_involving_path(
self, self,
tcx, tcx,

View File

@ -1131,11 +1131,8 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
( (
Reservation(WriteKind::MutableBorrow(bk)), Reservation(WriteKind::MutableBorrow(bk)),
BorrowKind::Shallow | BorrowKind::Shared, BorrowKind::Shallow | BorrowKind::Shared,
) if { ) if { tcx.migrate_borrowck() && this.borrow_set.contains(&location) } => {
tcx.migrate_borrowck() && this.borrow_set.location_map.contains_key(&location) let bi = this.borrow_set.get_index_of(&location).unwrap();
} =>
{
let bi = this.borrow_set.location_map[&location];
debug!( debug!(
"recording invalid reservation of place: {:?} with \ "recording invalid reservation of place: {:?} with \
borrow index {:?} as warning", borrow index {:?} as warning",

View File

@ -206,7 +206,7 @@ pub(in crate::borrow_check) fn compute_regions<'cx, 'tcx>(
// the `borrow_set`, their `BorrowIndex` are synthesized as the universal region index // the `borrow_set`, their `BorrowIndex` are synthesized as the universal region index
// added to the existing number of loans, as if they succeeded them in the set. // added to the existing number of loans, as if they succeeded them in the set.
// //
let borrow_count = borrow_set.borrows.len(); let borrow_count = borrow_set.len();
debug!( debug!(
"compute_regions: polonius placeholders, num_universals={}, borrow_count={}", "compute_regions: polonius placeholders, num_universals={}, borrow_count={}",
universal_regions.len(), universal_regions.len(),

View File

@ -1,4 +1,4 @@
use rustc_data_structures::fx::FxHashMap; use rustc_data_structures::fx::FxIndexSet;
use rustc_index::bit_set::{HybridBitSet, SparseBitMatrix}; use rustc_index::bit_set::{HybridBitSet, SparseBitMatrix};
use rustc_index::vec::Idx; use rustc_index::vec::Idx;
use rustc_index::vec::IndexVec; use rustc_index::vec::IndexVec;
@ -193,26 +193,25 @@ impl<N: Idx> LivenessValues<N> {
/// NLL. /// NLL.
#[derive(Default)] #[derive(Default)]
crate struct PlaceholderIndices { crate struct PlaceholderIndices {
to_index: FxHashMap<ty::PlaceholderRegion, PlaceholderIndex>, indices: FxIndexSet<ty::PlaceholderRegion>,
from_index: IndexVec<PlaceholderIndex, ty::PlaceholderRegion>,
} }
impl PlaceholderIndices { impl PlaceholderIndices {
crate fn insert(&mut self, placeholder: ty::PlaceholderRegion) -> PlaceholderIndex { crate fn insert(&mut self, placeholder: ty::PlaceholderRegion) -> PlaceholderIndex {
let PlaceholderIndices { to_index, from_index } = self; let (index, _) = self.indices.insert_full(placeholder);
*to_index.entry(placeholder).or_insert_with(|| from_index.push(placeholder)) index.into()
} }
crate fn lookup_index(&self, placeholder: ty::PlaceholderRegion) -> PlaceholderIndex { crate fn lookup_index(&self, placeholder: ty::PlaceholderRegion) -> PlaceholderIndex {
self.to_index[&placeholder] self.indices.get_index_of(&placeholder).unwrap().into()
} }
crate fn lookup_placeholder(&self, placeholder: PlaceholderIndex) -> ty::PlaceholderRegion { crate fn lookup_placeholder(&self, placeholder: PlaceholderIndex) -> ty::PlaceholderRegion {
self.from_index[placeholder] self.indices[placeholder.index()]
} }
crate fn len(&self) -> usize { crate fn len(&self) -> usize {
self.from_index.len() self.indices.len()
} }
} }

View File

@ -2469,11 +2469,11 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
// example). // example).
if let Some(all_facts) = all_facts { if let Some(all_facts) = all_facts {
let _prof_timer = self.infcx.tcx.prof.generic_activity("polonius_fact_generation"); let _prof_timer = self.infcx.tcx.prof.generic_activity("polonius_fact_generation");
if let Some(borrow_index) = borrow_set.location_map.get(&location) { if let Some(borrow_index) = borrow_set.get_index_of(&location) {
let region_vid = borrow_region.to_region_vid(); let region_vid = borrow_region.to_region_vid();
all_facts.borrow_region.push(( all_facts.borrow_region.push((
region_vid, region_vid,
*borrow_index, borrow_index,
location_table.mid_index(location), location_table.mid_index(location),
)); ));
} }

View File

@ -136,9 +136,9 @@ impl<'a, 'tcx> Borrows<'a, 'tcx> {
borrow_set: &Rc<BorrowSet<'tcx>>, borrow_set: &Rc<BorrowSet<'tcx>>,
) -> Self { ) -> Self {
let mut borrows_out_of_scope_at_location = FxHashMap::default(); let mut borrows_out_of_scope_at_location = FxHashMap::default();
for (borrow_index, borrow_data) in borrow_set.borrows.iter_enumerated() { for (borrow_index, borrow_data) in borrow_set.iter_enumerated() {
let borrow_region = borrow_data.region.to_region_vid(); let borrow_region = borrow_data.region.to_region_vid();
let location = borrow_set.borrows[borrow_index].reserve_location; let location = borrow_data.reserve_location;
precompute_borrows_out_of_scope( precompute_borrows_out_of_scope(
body, body,
@ -160,7 +160,7 @@ impl<'a, 'tcx> Borrows<'a, 'tcx> {
} }
pub fn location(&self, idx: BorrowIndex) -> &Location { pub fn location(&self, idx: BorrowIndex) -> &Location {
&self.borrow_set.borrows[idx].reserve_location &self.borrow_set[idx].reserve_location
} }
/// Add all borrows to the kill set, if those borrows are out of scope at `location`. /// Add all borrows to the kill set, if those borrows are out of scope at `location`.
@ -216,7 +216,7 @@ impl<'a, 'tcx> Borrows<'a, 'tcx> {
places_conflict( places_conflict(
self.tcx, self.tcx,
self.body, self.body,
self.borrow_set.borrows[i].borrowed_place, self.borrow_set[i].borrowed_place,
place, place,
PlaceConflictBias::NoOverlap, PlaceConflictBias::NoOverlap,
) )
@ -232,7 +232,7 @@ impl<'tcx> dataflow::AnalysisDomain<'tcx> for Borrows<'_, 'tcx> {
const NAME: &'static str = "borrows"; const NAME: &'static str = "borrows";
fn bits_per_block(&self, _: &mir::Body<'tcx>) -> usize { fn bits_per_block(&self, _: &mir::Body<'tcx>) -> usize {
self.borrow_set.borrows.len() * 2 self.borrow_set.len() * 2
} }
fn initialize_start_block(&self, _: &mir::Body<'tcx>, _: &mut BitSet<Self::Idx>) { fn initialize_start_block(&self, _: &mir::Body<'tcx>, _: &mut BitSet<Self::Idx>) {
@ -271,11 +271,11 @@ impl<'tcx> dataflow::GenKillAnalysis<'tcx> for Borrows<'_, 'tcx> {
) { ) {
return; return;
} }
let index = self.borrow_set.location_map.get(&location).unwrap_or_else(|| { let index = self.borrow_set.get_index_of(&location).unwrap_or_else(|| {
panic!("could not find BorrowIndex for location {:?}", location); panic!("could not find BorrowIndex for location {:?}", location);
}); });
trans.gen(*index); trans.gen(index);
} }
// Make sure there are no remaining borrows for variables // Make sure there are no remaining borrows for variables

View File

@ -11,7 +11,7 @@ use crate::build::{BlockAnd, BlockAndExtension, Builder};
use crate::build::{GuardFrame, GuardFrameLocal, LocalsForNode}; use crate::build::{GuardFrame, GuardFrameLocal, LocalsForNode};
use crate::thir::{self, *}; use crate::thir::{self, *};
use rustc_data_structures::{ use rustc_data_structures::{
fx::{FxHashMap, FxHashSet}, fx::{FxHashSet, FxIndexMap},
stack::ensure_sufficient_stack, stack::ensure_sufficient_stack,
}; };
use rustc_hir::HirId; use rustc_hir::HirId;
@ -817,9 +817,7 @@ enum TestKind<'tcx> {
/// ///
/// For `bool` we always generate two edges, one for `true` and one for /// For `bool` we always generate two edges, one for `true` and one for
/// `false`. /// `false`.
options: Vec<u128>, options: FxIndexMap<&'tcx ty::Const<'tcx>, u128>,
/// Reverse map used to ensure that the values in `options` are unique.
indices: FxHashMap<&'tcx ty::Const<'tcx>, usize>,
}, },
/// Test for equality with value, possibly after an unsizing coercion to /// Test for equality with value, possibly after an unsizing coercion to
@ -1396,14 +1394,13 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
// may want to add cases based on the candidates that are // may want to add cases based on the candidates that are
// available // available
match test.kind { match test.kind {
TestKind::SwitchInt { switch_ty, ref mut options, ref mut indices } => { TestKind::SwitchInt { switch_ty, ref mut options } => {
for candidate in candidates.iter() { for candidate in candidates.iter() {
if !self.add_cases_to_switch( if !self.add_cases_to_switch(
&match_place, &match_place,
candidate, candidate,
switch_ty, switch_ty,
options, options,
indices,
) { ) {
break; break;
} }

View File

@ -9,7 +9,7 @@ use crate::build::matches::{Candidate, MatchPair, Test, TestKind};
use crate::build::Builder; use crate::build::Builder;
use crate::thir::pattern::compare_const_vals; use crate::thir::pattern::compare_const_vals;
use crate::thir::*; use crate::thir::*;
use rustc_data_structures::fx::FxHashMap; use rustc_data_structures::fx::FxIndexMap;
use rustc_hir::RangeEnd; use rustc_hir::RangeEnd;
use rustc_index::bit_set::BitSet; use rustc_index::bit_set::BitSet;
use rustc_middle::mir::*; use rustc_middle::mir::*;
@ -44,8 +44,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
// these maps are empty to start; cases are // these maps are empty to start; cases are
// added below in add_cases_to_switch // added below in add_cases_to_switch
options: vec![], options: Default::default(),
indices: Default::default(),
}, },
} }
} }
@ -83,8 +82,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
test_place: &Place<'tcx>, test_place: &Place<'tcx>,
candidate: &Candidate<'pat, 'tcx>, candidate: &Candidate<'pat, 'tcx>,
switch_ty: Ty<'tcx>, switch_ty: Ty<'tcx>,
options: &mut Vec<u128>, options: &mut FxIndexMap<&'tcx ty::Const<'tcx>, u128>,
indices: &mut FxHashMap<&'tcx ty::Const<'tcx>, usize>,
) -> bool { ) -> bool {
let match_pair = match candidate.match_pairs.iter().find(|mp| mp.place == *test_place) { let match_pair = match candidate.match_pairs.iter().find(|mp| mp.place == *test_place) {
Some(match_pair) => match_pair, Some(match_pair) => match_pair,
@ -95,9 +93,8 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
match *match_pair.pattern.kind { match *match_pair.pattern.kind {
PatKind::Constant { value } => { PatKind::Constant { value } => {
indices.entry(value).or_insert_with(|| { options.entry(value).or_insert_with(|| {
options.push(value.eval_bits(self.hir.tcx(), self.hir.param_env, switch_ty)); value.eval_bits(self.hir.tcx(), self.hir.param_env, switch_ty)
options.len() - 1
}); });
true true
} }
@ -106,7 +103,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
} }
PatKind::Range(range) => { PatKind::Range(range) => {
// Check that none of the switch values are in the range. // Check that none of the switch values are in the range.
self.values_not_contained_in_range(range, indices).unwrap_or(false) self.values_not_contained_in_range(range, options).unwrap_or(false)
} }
PatKind::Slice { .. } PatKind::Slice { .. }
| PatKind::Array { .. } | PatKind::Array { .. }
@ -216,7 +213,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
); );
} }
TestKind::SwitchInt { switch_ty, ref options, indices: _ } => { TestKind::SwitchInt { switch_ty, ref options } => {
let target_blocks = make_target_blocks(self); let target_blocks = make_target_blocks(self);
let terminator = if switch_ty.kind == ty::Bool { let terminator = if switch_ty.kind == ty::Bool {
assert!(!options.is_empty() && options.len() <= 2); assert!(!options.is_empty() && options.len() <= 2);
@ -236,7 +233,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
TerminatorKind::SwitchInt { TerminatorKind::SwitchInt {
discr: Operand::Copy(place), discr: Operand::Copy(place),
switch_ty, switch_ty,
values: options.clone().into(), values: options.values().copied().collect(),
targets: target_blocks, targets: target_blocks,
} }
}; };
@ -532,20 +529,20 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
// FIXME(#29623) we could use PatKind::Range to rule // FIXME(#29623) we could use PatKind::Range to rule
// things out here, in some cases. // things out here, in some cases.
( (
&TestKind::SwitchInt { switch_ty: _, options: _, ref indices }, &TestKind::SwitchInt { switch_ty: _, ref options },
&PatKind::Constant { ref value }, &PatKind::Constant { ref value },
) if is_switch_ty(match_pair.pattern.ty) => { ) if is_switch_ty(match_pair.pattern.ty) => {
let index = indices[value]; let index = options.get_index_of(value).unwrap();
self.candidate_without_match_pair(match_pair_index, candidate); self.candidate_without_match_pair(match_pair_index, candidate);
Some(index) Some(index)
} }
( (
&TestKind::SwitchInt { switch_ty: _, ref options, ref indices }, &TestKind::SwitchInt { switch_ty: _, ref options },
&PatKind::Range(range), &PatKind::Range(range),
) => { ) => {
let not_contained = let not_contained =
self.values_not_contained_in_range(range, indices).unwrap_or(false); self.values_not_contained_in_range(range, options).unwrap_or(false);
if not_contained { if not_contained {
// No switch values are contained in the pattern range, // No switch values are contained in the pattern range,
@ -777,9 +774,9 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
fn values_not_contained_in_range( fn values_not_contained_in_range(
&self, &self,
range: PatRange<'tcx>, range: PatRange<'tcx>,
indices: &FxHashMap<&'tcx ty::Const<'tcx>, usize>, options: &FxIndexMap<&'tcx ty::Const<'tcx>, u128>,
) -> Option<bool> { ) -> Option<bool> {
for &val in indices.keys() { for &val in options.keys() {
if self.const_range_contains(range, val)? { if self.const_range_contains(range, val)? {
return Some(false); return Some(false);
} }

View File

@ -8,7 +8,7 @@ use crate::hygiene::SyntaxContext;
use crate::SESSION_GLOBALS; use crate::SESSION_GLOBALS;
use crate::{BytePos, SpanData}; use crate::{BytePos, SpanData};
use rustc_data_structures::fx::FxHashMap; use rustc_data_structures::fx::FxIndexSet;
/// A compressed span. /// A compressed span.
/// ///
@ -111,25 +111,18 @@ impl Span {
#[derive(Default)] #[derive(Default)]
pub struct SpanInterner { pub struct SpanInterner {
spans: FxHashMap<SpanData, u32>, spans: FxIndexSet<SpanData>,
span_data: Vec<SpanData>,
} }
impl SpanInterner { impl SpanInterner {
fn intern(&mut self, span_data: &SpanData) -> u32 { fn intern(&mut self, span_data: &SpanData) -> u32 {
if let Some(index) = self.spans.get(span_data) { let (index, _) = self.spans.insert_full(*span_data);
return *index; index as u32
}
let index = self.spans.len() as u32;
self.span_data.push(*span_data);
self.spans.insert(*span_data, index);
index
} }
#[inline] #[inline]
fn get(&self, index: u32) -> &SpanData { fn get(&self, index: u32) -> &SpanData {
&self.span_data[index as usize] &self.spans[index as usize]
} }
} }

View File

@ -1481,6 +1481,10 @@ impl<CTX> ToStableHashKey<CTX> for Symbol {
} }
// The `&'static str`s in this type actually point into the arena. // The `&'static str`s in this type actually point into the arena.
//
// The `FxHashMap`+`Vec` pair could be replaced by `FxIndexSet`, but #75278
// found that to regress performance up to 2% in some cases. This might be
// revisited after further improvements to `indexmap`.
#[derive(Default)] #[derive(Default)]
pub struct Interner { pub struct Interner {
arena: DroplessArena, arena: DroplessArena,

View File

@ -4,7 +4,7 @@
//! types computed here. //! types computed here.
use super::FnCtxt; use super::FnCtxt;
use rustc_data_structures::fx::{FxHashMap, FxHashSet}; use rustc_data_structures::fx::{FxHashSet, FxIndexSet};
use rustc_hir as hir; use rustc_hir as hir;
use rustc_hir::def::{CtorKind, DefKind, Res}; use rustc_hir::def::{CtorKind, DefKind, Res};
use rustc_hir::def_id::DefId; use rustc_hir::def_id::DefId;
@ -16,7 +16,7 @@ use rustc_span::Span;
struct InteriorVisitor<'a, 'tcx> { struct InteriorVisitor<'a, 'tcx> {
fcx: &'a FnCtxt<'a, 'tcx>, fcx: &'a FnCtxt<'a, 'tcx>,
types: FxHashMap<ty::GeneratorInteriorTypeCause<'tcx>, usize>, types: FxIndexSet<ty::GeneratorInteriorTypeCause<'tcx>>,
region_scope_tree: &'tcx region::ScopeTree, region_scope_tree: &'tcx region::ScopeTree,
expr_count: usize, expr_count: usize,
kind: hir::GeneratorKind, kind: hir::GeneratorKind,
@ -88,18 +88,15 @@ impl<'a, 'tcx> InteriorVisitor<'a, 'tcx> {
.span_note(yield_data.span, &*note) .span_note(yield_data.span, &*note)
.emit(); .emit();
} else { } else {
// Map the type to the number of types added before it // Insert the type into the ordered set.
let entries = self.types.len();
let scope_span = scope.map(|s| s.span(self.fcx.tcx, self.region_scope_tree)); let scope_span = scope.map(|s| s.span(self.fcx.tcx, self.region_scope_tree));
self.types self.types.insert(ty::GeneratorInteriorTypeCause {
.entry(ty::GeneratorInteriorTypeCause { span: source_span,
span: source_span, ty: &ty,
ty: &ty, scope_span,
scope_span, yield_span: yield_data.span,
yield_span: yield_data.span, expr: expr.map(|e| e.hir_id),
expr: expr.map(|e| e.hir_id), });
})
.or_insert(entries);
} }
} else { } else {
debug!( debug!(
@ -132,7 +129,7 @@ pub fn resolve_interior<'a, 'tcx>(
let body = fcx.tcx.hir().body(body_id); let body = fcx.tcx.hir().body(body_id);
let mut visitor = InteriorVisitor { let mut visitor = InteriorVisitor {
fcx, fcx,
types: FxHashMap::default(), types: FxIndexSet::default(),
region_scope_tree: fcx.tcx.region_scope_tree(def_id), region_scope_tree: fcx.tcx.region_scope_tree(def_id),
expr_count: 0, expr_count: 0,
kind, kind,
@ -144,10 +141,8 @@ pub fn resolve_interior<'a, 'tcx>(
let region_expr_count = visitor.region_scope_tree.body_expr_count(body_id).unwrap(); let region_expr_count = visitor.region_scope_tree.body_expr_count(body_id).unwrap();
assert_eq!(region_expr_count, visitor.expr_count); assert_eq!(region_expr_count, visitor.expr_count);
let mut types: Vec<_> = visitor.types.drain().collect(); // The types are already kept in insertion order.
let types = visitor.types;
// Sort types by insertion order
types.sort_by_key(|t| t.1);
// The types in the generator interior contain lifetimes local to the generator itself, // The types in the generator interior contain lifetimes local to the generator itself,
// which should not be exposed outside of the generator. Therefore, we replace these // which should not be exposed outside of the generator. Therefore, we replace these
@ -164,7 +159,7 @@ pub fn resolve_interior<'a, 'tcx>(
let mut captured_tys = FxHashSet::default(); let mut captured_tys = FxHashSet::default();
let type_causes: Vec<_> = types let type_causes: Vec<_> = types
.into_iter() .into_iter()
.filter_map(|(mut cause, _)| { .filter_map(|mut cause| {
// Erase regions and canonicalize late-bound regions to deduplicate as many types as we // Erase regions and canonicalize late-bound regions to deduplicate as many types as we
// can. // can.
let erased = fcx.tcx.erase_regions(&cause.ty); let erased = fcx.tcx.erase_regions(&cause.ty);

View File

@ -22,7 +22,7 @@ use rustc_ast::ast;
use rustc_ast::ast::MetaItemKind; use rustc_ast::ast::MetaItemKind;
use rustc_attr::{list_contains_name, InlineAttr, OptimizeAttr}; use rustc_attr::{list_contains_name, InlineAttr, OptimizeAttr};
use rustc_data_structures::captures::Captures; use rustc_data_structures::captures::Captures;
use rustc_data_structures::fx::{FxHashMap, FxHashSet}; use rustc_data_structures::fx::{FxHashMap, FxHashSet, FxIndexSet};
use rustc_errors::{struct_span_err, Applicability}; use rustc_errors::{struct_span_err, Applicability};
use rustc_hir as hir; use rustc_hir as hir;
use rustc_hir::def::{CtorKind, DefKind, Res}; use rustc_hir::def::{CtorKind, DefKind, Res};
@ -1718,21 +1718,17 @@ fn explicit_predicates_of(tcx: TyCtxt<'_>, def_id: DefId) -> ty::GenericPredicat
/// A data structure with unique elements, which preserves order of insertion. /// A data structure with unique elements, which preserves order of insertion.
/// Preserving the order of insertion is important here so as not to break /// Preserving the order of insertion is important here so as not to break
/// compile-fail UI tests. /// compile-fail UI tests.
// FIXME(eddyb) just use `IndexSet` from `indexmap`.
struct UniquePredicates<'tcx> { struct UniquePredicates<'tcx> {
predicates: Vec<(ty::Predicate<'tcx>, Span)>, predicates: FxIndexSet<(ty::Predicate<'tcx>, Span)>,
uniques: FxHashSet<(ty::Predicate<'tcx>, Span)>,
} }
impl<'tcx> UniquePredicates<'tcx> { impl<'tcx> UniquePredicates<'tcx> {
fn new() -> Self { fn new() -> Self {
UniquePredicates { predicates: vec![], uniques: FxHashSet::default() } UniquePredicates { predicates: FxIndexSet::default() }
} }
fn push(&mut self, value: (ty::Predicate<'tcx>, Span)) { fn push(&mut self, value: (ty::Predicate<'tcx>, Span)) {
if self.uniques.insert(value) { self.predicates.insert(value);
self.predicates.push(value);
}
} }
fn extend<I: IntoIterator<Item = (ty::Predicate<'tcx>, Span)>>(&mut self, iter: I) { fn extend<I: IntoIterator<Item = (ty::Predicate<'tcx>, Span)>>(&mut self, iter: I) {
@ -2014,7 +2010,7 @@ fn explicit_predicates_of(tcx: TyCtxt<'_>, def_id: DefId) -> ty::GenericPredicat
})) }))
} }
let mut predicates = predicates.predicates; let mut predicates: Vec<_> = predicates.predicates.into_iter().collect();
// Subtle: before we store the predicates into the tcx, we // Subtle: before we store the predicates into the tcx, we
// sort them so that predicates like `T: Foo<Item=U>` come // sort them so that predicates like `T: Foo<Item=U>` come