Auto merge of #75278 - cuviper:indexmap, r=Mark-Simulacrum
Upgrade indexmap and use it more First this upgrades `indexmap` to 1.5.1, which is now based on `hashbrown::raw::RawTable`. This means it shares a lot of the same performance characteristics for insert, lookup, etc., while keeping items in insertion order. Then across various rustc crates, this replaces a lot of `Vec`+`HashMap` pairs with a single `IndexMap` or `IndexSet`. Closes #60608. r? @eddyb
This commit is contained in:
commit
18f3be7704
@ -1382,9 +1382,13 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "indexmap"
|
||||
version = "1.0.2"
|
||||
version = "1.5.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7e81a7c05f79578dbc15793d8b619db9ba32b4577003ef3af1a91c416798c58d"
|
||||
checksum = "86b45e59b16c76b11bf9738fd5d38879d3bd28ad292d7b313608becb17ae2df9"
|
||||
dependencies = [
|
||||
"autocfg",
|
||||
"hashbrown",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "installer"
|
||||
|
@ -6,7 +6,7 @@ use llvm::coverageinfo::CounterMappingRegion;
|
||||
use log::debug;
|
||||
use rustc_codegen_ssa::coverageinfo::map::{Counter, CounterExpression, Region};
|
||||
use rustc_codegen_ssa::traits::{BaseTypeMethods, ConstMethods};
|
||||
use rustc_data_structures::fx::FxHashMap;
|
||||
use rustc_data_structures::fx::FxIndexSet;
|
||||
use rustc_llvm::RustString;
|
||||
|
||||
use std::ffi::CString;
|
||||
@ -76,13 +76,12 @@ pub fn finalize<'ll, 'tcx>(cx: &CodegenCx<'ll, 'tcx>) {
|
||||
}
|
||||
|
||||
struct CoverageMapGenerator {
|
||||
filenames: Vec<CString>,
|
||||
filename_to_index: FxHashMap<CString, u32>,
|
||||
filenames: FxIndexSet<CString>,
|
||||
}
|
||||
|
||||
impl CoverageMapGenerator {
|
||||
fn new() -> Self {
|
||||
Self { filenames: Vec::new(), filename_to_index: FxHashMap::default() }
|
||||
Self { filenames: FxIndexSet::default() }
|
||||
}
|
||||
|
||||
/// Using the `expressions` and `counter_regions` collected for the current function, generate
|
||||
@ -122,16 +121,8 @@ impl CoverageMapGenerator {
|
||||
let c_filename =
|
||||
CString::new(file_name).expect("null error converting filename to C string");
|
||||
debug!(" file_id: {} = '{:?}'", current_file_id, c_filename);
|
||||
let filenames_index = match self.filename_to_index.get(&c_filename) {
|
||||
Some(index) => *index,
|
||||
None => {
|
||||
let index = self.filenames.len() as u32;
|
||||
self.filenames.push(c_filename.clone());
|
||||
self.filename_to_index.insert(c_filename.clone(), index);
|
||||
index
|
||||
}
|
||||
};
|
||||
virtual_file_mapping.push(filenames_index);
|
||||
let (filenames_index, _) = self.filenames.insert_full(c_filename);
|
||||
virtual_file_mapping.push(filenames_index as u32);
|
||||
}
|
||||
mapping_regions.push(CounterMappingRegion::code_region(
|
||||
counter,
|
||||
|
@ -97,8 +97,11 @@ impl CoverageInfoBuilderMethods<'tcx> for Builder<'a, 'll, 'tcx> {
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn write_filenames_section_to_buffer(filenames: &Vec<CString>, buffer: &RustString) {
|
||||
let c_str_vec = filenames.iter().map(|cstring| cstring.as_ptr()).collect::<Vec<_>>();
|
||||
pub(crate) fn write_filenames_section_to_buffer<'a>(
|
||||
filenames: impl IntoIterator<Item = &'a CString>,
|
||||
buffer: &RustString,
|
||||
) {
|
||||
let c_str_vec = filenames.into_iter().map(|cstring| cstring.as_ptr()).collect::<Vec<_>>();
|
||||
unsafe {
|
||||
llvm::LLVMRustCoverageWriteFilenamesSectionToBuffer(
|
||||
c_str_vec.as_ptr(),
|
||||
|
@ -11,7 +11,7 @@ doctest = false
|
||||
|
||||
[dependencies]
|
||||
ena = "0.14"
|
||||
indexmap = "1"
|
||||
indexmap = "1.5.1"
|
||||
log = { package = "tracing", version = "0.1" }
|
||||
jobserver_crate = { version = "0.1.13", package = "jobserver" }
|
||||
lazy_static = "1"
|
||||
|
@ -1,4 +1,4 @@
|
||||
use crate::fx::FxHashMap;
|
||||
use crate::fx::FxIndexSet;
|
||||
use crate::stable_hasher::{HashStable, StableHasher};
|
||||
use crate::sync::Lock;
|
||||
use rustc_index::bit_set::BitMatrix;
|
||||
@ -13,10 +13,7 @@ mod tests;
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct TransitiveRelation<T: Eq + Hash> {
|
||||
// List of elements. This is used to map from a T to a usize.
|
||||
elements: Vec<T>,
|
||||
|
||||
// Maps each element to an index.
|
||||
map: FxHashMap<T, Index>,
|
||||
elements: FxIndexSet<T>,
|
||||
|
||||
// List of base edges in the graph. Require to compute transitive
|
||||
// closure.
|
||||
@ -39,7 +36,6 @@ impl<T: Eq + Hash> Default for TransitiveRelation<T> {
|
||||
fn default() -> Self {
|
||||
TransitiveRelation {
|
||||
elements: Default::default(),
|
||||
map: Default::default(),
|
||||
edges: Default::default(),
|
||||
closure: Default::default(),
|
||||
}
|
||||
@ -65,20 +61,16 @@ impl<T: Clone + Debug + Eq + Hash> TransitiveRelation<T> {
|
||||
}
|
||||
|
||||
fn index(&self, a: &T) -> Option<Index> {
|
||||
self.map.get(a).cloned()
|
||||
self.elements.get_index_of(a).map(Index)
|
||||
}
|
||||
|
||||
fn add_index(&mut self, a: T) -> Index {
|
||||
let &mut TransitiveRelation { ref mut elements, ref mut closure, ref mut map, .. } = self;
|
||||
|
||||
*map.entry(a.clone()).or_insert_with(|| {
|
||||
elements.push(a);
|
||||
|
||||
let (index, added) = self.elements.insert_full(a);
|
||||
if added {
|
||||
// if we changed the dimensions, clear the cache
|
||||
*closure.get_mut() = None;
|
||||
|
||||
Index(elements.len() - 1)
|
||||
})
|
||||
*self.closure.get_mut() = None;
|
||||
}
|
||||
Index(index)
|
||||
}
|
||||
|
||||
/// Applies the (partial) function to each edge and returns a new
|
||||
@ -430,14 +422,11 @@ where
|
||||
{
|
||||
fn decode<D: Decoder>(d: &mut D) -> Result<Self, D::Error> {
|
||||
d.read_struct("TransitiveRelation", 2, |d| {
|
||||
let elements: Vec<T> = d.read_struct_field("elements", 0, |d| Decodable::decode(d))?;
|
||||
let edges = d.read_struct_field("edges", 1, |d| Decodable::decode(d))?;
|
||||
let map = elements
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(index, elem)| (elem.clone(), Index(index)))
|
||||
.collect();
|
||||
Ok(TransitiveRelation { elements, edges, map, closure: Lock::new(None) })
|
||||
Ok(TransitiveRelation {
|
||||
elements: d.read_struct_field("elements", 0, |d| Decodable::decode(d))?,
|
||||
edges: d.read_struct_field("edges", 1, |d| Decodable::decode(d))?,
|
||||
closure: Lock::new(None),
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
||||
@ -452,8 +441,6 @@ where
|
||||
let TransitiveRelation {
|
||||
ref elements,
|
||||
ref edges,
|
||||
// "map" is just a copy of elements vec
|
||||
map: _,
|
||||
// "closure" is just a copy of the data above
|
||||
closure: _,
|
||||
} = *self;
|
||||
|
@ -4,7 +4,7 @@ use crate::rmeta::*;
|
||||
use log::{debug, trace};
|
||||
use rustc_ast::ast;
|
||||
use rustc_data_structures::fingerprint::Fingerprint;
|
||||
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
|
||||
use rustc_data_structures::fx::{FxHashMap, FxHashSet, FxIndexSet};
|
||||
use rustc_data_structures::stable_hasher::StableHasher;
|
||||
use rustc_data_structures::sync::{join, Lrc};
|
||||
use rustc_hir as hir;
|
||||
@ -48,8 +48,7 @@ struct EncodeContext<'a, 'tcx> {
|
||||
type_shorthands: FxHashMap<Ty<'tcx>, usize>,
|
||||
predicate_shorthands: FxHashMap<ty::Predicate<'tcx>, usize>,
|
||||
|
||||
interpret_allocs: FxHashMap<interpret::AllocId, usize>,
|
||||
interpret_allocs_inverse: Vec<interpret::AllocId>,
|
||||
interpret_allocs: FxIndexSet<interpret::AllocId>,
|
||||
|
||||
// This is used to speed up Span encoding.
|
||||
// The `usize` is an index into the `MonotonicVec`
|
||||
@ -331,17 +330,7 @@ impl<'a, 'b, 'tcx> SpecializedEncoder<ty::Predicate<'b>> for EncodeContext<'a, '
|
||||
|
||||
impl<'a, 'tcx> SpecializedEncoder<interpret::AllocId> for EncodeContext<'a, 'tcx> {
|
||||
fn specialized_encode(&mut self, alloc_id: &interpret::AllocId) -> Result<(), Self::Error> {
|
||||
use std::collections::hash_map::Entry;
|
||||
let index = match self.interpret_allocs.entry(*alloc_id) {
|
||||
Entry::Occupied(e) => *e.get(),
|
||||
Entry::Vacant(e) => {
|
||||
let idx = self.interpret_allocs_inverse.len();
|
||||
self.interpret_allocs_inverse.push(*alloc_id);
|
||||
e.insert(idx);
|
||||
idx
|
||||
}
|
||||
};
|
||||
|
||||
let (index, _) = self.interpret_allocs.insert_full(*alloc_id);
|
||||
index.encode(self)
|
||||
}
|
||||
}
|
||||
@ -583,7 +572,7 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
|
||||
let mut n = 0;
|
||||
trace!("beginning to encode alloc ids");
|
||||
loop {
|
||||
let new_n = self.interpret_allocs_inverse.len();
|
||||
let new_n = self.interpret_allocs.len();
|
||||
// if we have found new ids, serialize those, too
|
||||
if n == new_n {
|
||||
// otherwise, abort
|
||||
@ -591,7 +580,7 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
|
||||
}
|
||||
trace!("encoding {} further alloc ids", new_n - n);
|
||||
for idx in n..new_n {
|
||||
let id = self.interpret_allocs_inverse[idx];
|
||||
let id = self.interpret_allocs[idx];
|
||||
let pos = self.position() as u32;
|
||||
interpret_alloc_index.push(pos);
|
||||
interpret::specialized_encode_alloc_id(self, tcx, id).unwrap();
|
||||
@ -2019,7 +2008,6 @@ fn encode_metadata_impl(tcx: TyCtxt<'_>) -> EncodedMetadata {
|
||||
predicate_shorthands: Default::default(),
|
||||
source_file_cache: (source_map_files[0].clone(), 0),
|
||||
interpret_allocs: Default::default(),
|
||||
interpret_allocs_inverse: Default::default(),
|
||||
required_source_files: Some(GrowableBitSet::with_capacity(source_map_files.len())),
|
||||
is_proc_macro: tcx.sess.crate_types().contains(&CrateType::ProcMacro),
|
||||
hygiene_ctxt: &hygiene_ctxt,
|
||||
|
@ -5,7 +5,7 @@ use crate::ty::codec::{self as ty_codec, TyDecoder, TyEncoder};
|
||||
use crate::ty::context::TyCtxt;
|
||||
use crate::ty::{self, Ty};
|
||||
use rustc_data_structures::fingerprint::Fingerprint;
|
||||
use rustc_data_structures::fx::FxHashMap;
|
||||
use rustc_data_structures::fx::{FxHashMap, FxIndexSet};
|
||||
use rustc_data_structures::sync::{HashMapExt, Lock, Lrc, OnceCell};
|
||||
use rustc_data_structures::thin_vec::ThinVec;
|
||||
use rustc_errors::Diagnostic;
|
||||
@ -212,7 +212,6 @@ impl<'sess> OnDiskCache<'sess> {
|
||||
type_shorthands: Default::default(),
|
||||
predicate_shorthands: Default::default(),
|
||||
interpret_allocs: Default::default(),
|
||||
interpret_allocs_inverse: Vec::new(),
|
||||
source_map: CachingSourceMapView::new(tcx.sess.source_map()),
|
||||
file_to_file_index,
|
||||
hygiene_context: &hygiene_encode_context,
|
||||
@ -267,7 +266,7 @@ impl<'sess> OnDiskCache<'sess> {
|
||||
let mut interpret_alloc_index = Vec::new();
|
||||
let mut n = 0;
|
||||
loop {
|
||||
let new_n = encoder.interpret_allocs_inverse.len();
|
||||
let new_n = encoder.interpret_allocs.len();
|
||||
// If we have found new IDs, serialize those too.
|
||||
if n == new_n {
|
||||
// Otherwise, abort.
|
||||
@ -275,7 +274,7 @@ impl<'sess> OnDiskCache<'sess> {
|
||||
}
|
||||
interpret_alloc_index.reserve(new_n - n);
|
||||
for idx in n..new_n {
|
||||
let id = encoder.interpret_allocs_inverse[idx];
|
||||
let id = encoder.interpret_allocs[idx];
|
||||
let pos = encoder.position() as u32;
|
||||
interpret_alloc_index.push(pos);
|
||||
interpret::specialized_encode_alloc_id(&mut encoder, tcx, id)?;
|
||||
@ -767,8 +766,7 @@ struct CacheEncoder<'a, 'tcx, E: ty_codec::TyEncoder> {
|
||||
encoder: &'a mut E,
|
||||
type_shorthands: FxHashMap<Ty<'tcx>, usize>,
|
||||
predicate_shorthands: FxHashMap<ty::Predicate<'tcx>, usize>,
|
||||
interpret_allocs: FxHashMap<interpret::AllocId, usize>,
|
||||
interpret_allocs_inverse: Vec<interpret::AllocId>,
|
||||
interpret_allocs: FxIndexSet<interpret::AllocId>,
|
||||
source_map: CachingSourceMapView<'tcx>,
|
||||
file_to_file_index: FxHashMap<*const SourceFile, SourceFileIndex>,
|
||||
hygiene_context: &'a HygieneEncodeContext,
|
||||
@ -807,17 +805,7 @@ where
|
||||
E: 'a + TyEncoder,
|
||||
{
|
||||
fn specialized_encode(&mut self, alloc_id: &interpret::AllocId) -> Result<(), Self::Error> {
|
||||
use std::collections::hash_map::Entry;
|
||||
let index = match self.interpret_allocs.entry(*alloc_id) {
|
||||
Entry::Occupied(e) => *e.get(),
|
||||
Entry::Vacant(e) => {
|
||||
let idx = self.interpret_allocs_inverse.len();
|
||||
self.interpret_allocs_inverse.push(*alloc_id);
|
||||
e.insert(idx);
|
||||
idx
|
||||
}
|
||||
};
|
||||
|
||||
let (index, _) = self.interpret_allocs.insert_full(*alloc_id);
|
||||
index.encode(self)
|
||||
}
|
||||
}
|
||||
|
@ -3,9 +3,8 @@ use crate::borrow_check::path_utils::allow_two_phase_borrow;
|
||||
use crate::borrow_check::place_ext::PlaceExt;
|
||||
use crate::dataflow::indexes::BorrowIndex;
|
||||
use crate::dataflow::move_paths::MoveData;
|
||||
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
|
||||
use rustc_data_structures::fx::{FxHashMap, FxHashSet, FxIndexMap};
|
||||
use rustc_index::bit_set::BitSet;
|
||||
use rustc_index::vec::IndexVec;
|
||||
use rustc_middle::mir::traversal;
|
||||
use rustc_middle::mir::visit::{MutatingUseContext, NonUseContext, PlaceContext, Visitor};
|
||||
use rustc_middle::mir::{self, Body, Local, Location};
|
||||
@ -15,14 +14,11 @@ use std::ops::Index;
|
||||
|
||||
crate struct BorrowSet<'tcx> {
|
||||
/// The fundamental map relating bitvector indexes to the borrows
|
||||
/// in the MIR.
|
||||
crate borrows: IndexVec<BorrowIndex, BorrowData<'tcx>>,
|
||||
|
||||
/// Each borrow is also uniquely identified in the MIR by the
|
||||
/// `Location` of the assignment statement in which it appears on
|
||||
/// the right hand side; we map each such location to the
|
||||
/// corresponding `BorrowIndex`.
|
||||
crate location_map: FxHashMap<Location, BorrowIndex>,
|
||||
/// in the MIR. Each borrow is also uniquely identified in the MIR
|
||||
/// by the `Location` of the assignment statement in which it
|
||||
/// appears on the right hand side. Thus the location is the map
|
||||
/// key, and its position in the map corresponds to `BorrowIndex`.
|
||||
crate location_map: FxIndexMap<Location, BorrowData<'tcx>>,
|
||||
|
||||
/// Locations which activate borrows.
|
||||
/// NOTE: a given location may activate more than one borrow in the future
|
||||
@ -40,7 +36,7 @@ impl<'tcx> Index<BorrowIndex> for BorrowSet<'tcx> {
|
||||
type Output = BorrowData<'tcx>;
|
||||
|
||||
fn index(&self, index: BorrowIndex) -> &BorrowData<'tcx> {
|
||||
&self.borrows[index]
|
||||
&self.location_map[index.as_usize()]
|
||||
}
|
||||
}
|
||||
|
||||
@ -129,7 +125,6 @@ impl<'tcx> BorrowSet<'tcx> {
|
||||
let mut visitor = GatherBorrows {
|
||||
tcx,
|
||||
body: &body,
|
||||
idx_vec: IndexVec::new(),
|
||||
location_map: Default::default(),
|
||||
activation_map: Default::default(),
|
||||
local_map: Default::default(),
|
||||
@ -146,7 +141,6 @@ impl<'tcx> BorrowSet<'tcx> {
|
||||
}
|
||||
|
||||
BorrowSet {
|
||||
borrows: visitor.idx_vec,
|
||||
location_map: visitor.location_map,
|
||||
activation_map: visitor.activation_map,
|
||||
local_map: visitor.local_map,
|
||||
@ -157,13 +151,32 @@ impl<'tcx> BorrowSet<'tcx> {
|
||||
crate fn activations_at_location(&self, location: Location) -> &[BorrowIndex] {
|
||||
self.activation_map.get(&location).map(|activations| &activations[..]).unwrap_or(&[])
|
||||
}
|
||||
|
||||
crate fn len(&self) -> usize {
|
||||
self.location_map.len()
|
||||
}
|
||||
|
||||
crate fn indices(&self) -> impl Iterator<Item = BorrowIndex> {
|
||||
BorrowIndex::from_usize(0)..BorrowIndex::from_usize(self.len())
|
||||
}
|
||||
|
||||
crate fn iter_enumerated(&self) -> impl Iterator<Item = (BorrowIndex, &BorrowData<'tcx>)> {
|
||||
self.indices().zip(self.location_map.values())
|
||||
}
|
||||
|
||||
crate fn get_index_of(&self, location: &Location) -> Option<BorrowIndex> {
|
||||
self.location_map.get_index_of(location).map(BorrowIndex::from)
|
||||
}
|
||||
|
||||
crate fn contains(&self, location: &Location) -> bool {
|
||||
self.location_map.contains_key(location)
|
||||
}
|
||||
}
|
||||
|
||||
struct GatherBorrows<'a, 'tcx> {
|
||||
tcx: TyCtxt<'tcx>,
|
||||
body: &'a Body<'tcx>,
|
||||
idx_vec: IndexVec<BorrowIndex, BorrowData<'tcx>>,
|
||||
location_map: FxHashMap<Location, BorrowIndex>,
|
||||
location_map: FxIndexMap<Location, BorrowData<'tcx>>,
|
||||
activation_map: FxHashMap<Location, Vec<BorrowIndex>>,
|
||||
local_map: FxHashMap<mir::Local, FxHashSet<BorrowIndex>>,
|
||||
|
||||
@ -203,8 +216,8 @@ impl<'a, 'tcx> Visitor<'tcx> for GatherBorrows<'a, 'tcx> {
|
||||
borrowed_place: *borrowed_place,
|
||||
assigned_place: *assigned_place,
|
||||
};
|
||||
let idx = self.idx_vec.push(borrow);
|
||||
self.location_map.insert(location, idx);
|
||||
let (idx, _) = self.location_map.insert_full(location, borrow);
|
||||
let idx = BorrowIndex::from(idx);
|
||||
|
||||
self.insert_as_pending_if_two_phase(location, assigned_place, kind, idx);
|
||||
|
||||
@ -224,7 +237,7 @@ impl<'a, 'tcx> Visitor<'tcx> for GatherBorrows<'a, 'tcx> {
|
||||
//
|
||||
// TMP = &mut place
|
||||
if let Some(&borrow_index) = self.pending_activations.get(temp) {
|
||||
let borrow_data = &mut self.idx_vec[borrow_index];
|
||||
let borrow_data = &mut self.location_map[borrow_index.as_usize()];
|
||||
|
||||
// Watch out: the use of TMP in the borrow itself
|
||||
// doesn't count as an activation. =)
|
||||
@ -265,8 +278,7 @@ impl<'a, 'tcx> Visitor<'tcx> for GatherBorrows<'a, 'tcx> {
|
||||
if let mir::Rvalue::Ref(region, kind, ref place) = *rvalue {
|
||||
// double-check that we already registered a BorrowData for this
|
||||
|
||||
let borrow_index = self.location_map[&location];
|
||||
let borrow_data = &self.idx_vec[borrow_index];
|
||||
let borrow_data = &self.location_map[&location];
|
||||
assert_eq!(borrow_data.reserve_location, location);
|
||||
assert_eq!(borrow_data.kind, kind);
|
||||
assert_eq!(borrow_data.region, region.to_region_vid());
|
||||
@ -316,7 +328,7 @@ impl<'a, 'tcx> GatherBorrows<'a, 'tcx> {
|
||||
// Consider the borrow not activated to start. When we find an activation, we'll update
|
||||
// this field.
|
||||
{
|
||||
let borrow_data = &mut self.idx_vec[borrow_index];
|
||||
let borrow_data = &mut self.location_map[borrow_index.as_usize()];
|
||||
borrow_data.activation_location = TwoPhaseActivation::NotActivated;
|
||||
}
|
||||
|
||||
@ -332,7 +344,7 @@ impl<'a, 'tcx> GatherBorrows<'a, 'tcx> {
|
||||
at borrow_index: {:?} with associated data {:?}",
|
||||
temp,
|
||||
old_index,
|
||||
self.idx_vec[old_index]
|
||||
self.location_map[old_index.as_usize()]
|
||||
);
|
||||
}
|
||||
}
|
||||
|
@ -217,7 +217,7 @@ impl<'cx, 'cg, 'tcx> ConstraintGeneration<'cx, 'cg, 'tcx> {
|
||||
let places_conflict = places_conflict::places_conflict(
|
||||
self.infcx.tcx,
|
||||
self.body,
|
||||
self.borrow_set.borrows[borrow_index].borrowed_place,
|
||||
self.borrow_set[borrow_index].borrowed_place,
|
||||
place,
|
||||
places_conflict::PlaceConflictBias::NoOverlap,
|
||||
);
|
||||
|
@ -166,8 +166,8 @@ impl<'cx, 'tcx> Visitor<'tcx> for InvalidationGenerator<'cx, 'tcx> {
|
||||
// Invalidate all borrows of local places
|
||||
let borrow_set = self.borrow_set.clone();
|
||||
let resume = self.location_table.start_index(resume.start_location());
|
||||
for i in borrow_set.borrows.indices() {
|
||||
if borrow_of_local_data(borrow_set.borrows[i].borrowed_place) {
|
||||
for (i, data) in borrow_set.iter_enumerated() {
|
||||
if borrow_of_local_data(data.borrowed_place) {
|
||||
self.all_facts.invalidates.push((resume, i));
|
||||
}
|
||||
}
|
||||
@ -178,8 +178,8 @@ impl<'cx, 'tcx> Visitor<'tcx> for InvalidationGenerator<'cx, 'tcx> {
|
||||
// Invalidate all borrows of local places
|
||||
let borrow_set = self.borrow_set.clone();
|
||||
let start = self.location_table.start_index(location);
|
||||
for i in borrow_set.borrows.indices() {
|
||||
if borrow_of_local_data(borrow_set.borrows[i].borrowed_place) {
|
||||
for (i, data) in borrow_set.iter_enumerated() {
|
||||
if borrow_of_local_data(data.borrowed_place) {
|
||||
self.all_facts.invalidates.push((start, i));
|
||||
}
|
||||
}
|
||||
@ -369,7 +369,7 @@ impl<'cx, 'tcx> InvalidationGenerator<'cx, 'tcx> {
|
||||
let tcx = self.tcx;
|
||||
let body = self.body;
|
||||
let borrow_set = self.borrow_set.clone();
|
||||
let indices = self.borrow_set.borrows.indices();
|
||||
let indices = self.borrow_set.indices();
|
||||
each_borrow_involving_path(
|
||||
self,
|
||||
tcx,
|
||||
|
@ -1131,11 +1131,8 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
||||
(
|
||||
Reservation(WriteKind::MutableBorrow(bk)),
|
||||
BorrowKind::Shallow | BorrowKind::Shared,
|
||||
) if {
|
||||
tcx.migrate_borrowck() && this.borrow_set.location_map.contains_key(&location)
|
||||
} =>
|
||||
{
|
||||
let bi = this.borrow_set.location_map[&location];
|
||||
) if { tcx.migrate_borrowck() && this.borrow_set.contains(&location) } => {
|
||||
let bi = this.borrow_set.get_index_of(&location).unwrap();
|
||||
debug!(
|
||||
"recording invalid reservation of place: {:?} with \
|
||||
borrow index {:?} as warning",
|
||||
|
@ -206,7 +206,7 @@ pub(in crate::borrow_check) fn compute_regions<'cx, 'tcx>(
|
||||
// the `borrow_set`, their `BorrowIndex` are synthesized as the universal region index
|
||||
// added to the existing number of loans, as if they succeeded them in the set.
|
||||
//
|
||||
let borrow_count = borrow_set.borrows.len();
|
||||
let borrow_count = borrow_set.len();
|
||||
debug!(
|
||||
"compute_regions: polonius placeholders, num_universals={}, borrow_count={}",
|
||||
universal_regions.len(),
|
||||
|
@ -1,4 +1,4 @@
|
||||
use rustc_data_structures::fx::FxHashMap;
|
||||
use rustc_data_structures::fx::FxIndexSet;
|
||||
use rustc_index::bit_set::{HybridBitSet, SparseBitMatrix};
|
||||
use rustc_index::vec::Idx;
|
||||
use rustc_index::vec::IndexVec;
|
||||
@ -193,26 +193,25 @@ impl<N: Idx> LivenessValues<N> {
|
||||
/// NLL.
|
||||
#[derive(Default)]
|
||||
crate struct PlaceholderIndices {
|
||||
to_index: FxHashMap<ty::PlaceholderRegion, PlaceholderIndex>,
|
||||
from_index: IndexVec<PlaceholderIndex, ty::PlaceholderRegion>,
|
||||
indices: FxIndexSet<ty::PlaceholderRegion>,
|
||||
}
|
||||
|
||||
impl PlaceholderIndices {
|
||||
crate fn insert(&mut self, placeholder: ty::PlaceholderRegion) -> PlaceholderIndex {
|
||||
let PlaceholderIndices { to_index, from_index } = self;
|
||||
*to_index.entry(placeholder).or_insert_with(|| from_index.push(placeholder))
|
||||
let (index, _) = self.indices.insert_full(placeholder);
|
||||
index.into()
|
||||
}
|
||||
|
||||
crate fn lookup_index(&self, placeholder: ty::PlaceholderRegion) -> PlaceholderIndex {
|
||||
self.to_index[&placeholder]
|
||||
self.indices.get_index_of(&placeholder).unwrap().into()
|
||||
}
|
||||
|
||||
crate fn lookup_placeholder(&self, placeholder: PlaceholderIndex) -> ty::PlaceholderRegion {
|
||||
self.from_index[placeholder]
|
||||
self.indices[placeholder.index()]
|
||||
}
|
||||
|
||||
crate fn len(&self) -> usize {
|
||||
self.from_index.len()
|
||||
self.indices.len()
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -2469,11 +2469,11 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
||||
// example).
|
||||
if let Some(all_facts) = all_facts {
|
||||
let _prof_timer = self.infcx.tcx.prof.generic_activity("polonius_fact_generation");
|
||||
if let Some(borrow_index) = borrow_set.location_map.get(&location) {
|
||||
if let Some(borrow_index) = borrow_set.get_index_of(&location) {
|
||||
let region_vid = borrow_region.to_region_vid();
|
||||
all_facts.borrow_region.push((
|
||||
region_vid,
|
||||
*borrow_index,
|
||||
borrow_index,
|
||||
location_table.mid_index(location),
|
||||
));
|
||||
}
|
||||
|
@ -136,9 +136,9 @@ impl<'a, 'tcx> Borrows<'a, 'tcx> {
|
||||
borrow_set: &Rc<BorrowSet<'tcx>>,
|
||||
) -> Self {
|
||||
let mut borrows_out_of_scope_at_location = FxHashMap::default();
|
||||
for (borrow_index, borrow_data) in borrow_set.borrows.iter_enumerated() {
|
||||
for (borrow_index, borrow_data) in borrow_set.iter_enumerated() {
|
||||
let borrow_region = borrow_data.region.to_region_vid();
|
||||
let location = borrow_set.borrows[borrow_index].reserve_location;
|
||||
let location = borrow_data.reserve_location;
|
||||
|
||||
precompute_borrows_out_of_scope(
|
||||
body,
|
||||
@ -160,7 +160,7 @@ impl<'a, 'tcx> Borrows<'a, 'tcx> {
|
||||
}
|
||||
|
||||
pub fn location(&self, idx: BorrowIndex) -> &Location {
|
||||
&self.borrow_set.borrows[idx].reserve_location
|
||||
&self.borrow_set[idx].reserve_location
|
||||
}
|
||||
|
||||
/// Add all borrows to the kill set, if those borrows are out of scope at `location`.
|
||||
@ -216,7 +216,7 @@ impl<'a, 'tcx> Borrows<'a, 'tcx> {
|
||||
places_conflict(
|
||||
self.tcx,
|
||||
self.body,
|
||||
self.borrow_set.borrows[i].borrowed_place,
|
||||
self.borrow_set[i].borrowed_place,
|
||||
place,
|
||||
PlaceConflictBias::NoOverlap,
|
||||
)
|
||||
@ -232,7 +232,7 @@ impl<'tcx> dataflow::AnalysisDomain<'tcx> for Borrows<'_, 'tcx> {
|
||||
const NAME: &'static str = "borrows";
|
||||
|
||||
fn bits_per_block(&self, _: &mir::Body<'tcx>) -> usize {
|
||||
self.borrow_set.borrows.len() * 2
|
||||
self.borrow_set.len() * 2
|
||||
}
|
||||
|
||||
fn initialize_start_block(&self, _: &mir::Body<'tcx>, _: &mut BitSet<Self::Idx>) {
|
||||
@ -271,11 +271,11 @@ impl<'tcx> dataflow::GenKillAnalysis<'tcx> for Borrows<'_, 'tcx> {
|
||||
) {
|
||||
return;
|
||||
}
|
||||
let index = self.borrow_set.location_map.get(&location).unwrap_or_else(|| {
|
||||
let index = self.borrow_set.get_index_of(&location).unwrap_or_else(|| {
|
||||
panic!("could not find BorrowIndex for location {:?}", location);
|
||||
});
|
||||
|
||||
trans.gen(*index);
|
||||
trans.gen(index);
|
||||
}
|
||||
|
||||
// Make sure there are no remaining borrows for variables
|
||||
|
@ -11,7 +11,7 @@ use crate::build::{BlockAnd, BlockAndExtension, Builder};
|
||||
use crate::build::{GuardFrame, GuardFrameLocal, LocalsForNode};
|
||||
use crate::thir::{self, *};
|
||||
use rustc_data_structures::{
|
||||
fx::{FxHashMap, FxHashSet},
|
||||
fx::{FxHashSet, FxIndexMap},
|
||||
stack::ensure_sufficient_stack,
|
||||
};
|
||||
use rustc_hir::HirId;
|
||||
@ -817,9 +817,7 @@ enum TestKind<'tcx> {
|
||||
///
|
||||
/// For `bool` we always generate two edges, one for `true` and one for
|
||||
/// `false`.
|
||||
options: Vec<u128>,
|
||||
/// Reverse map used to ensure that the values in `options` are unique.
|
||||
indices: FxHashMap<&'tcx ty::Const<'tcx>, usize>,
|
||||
options: FxIndexMap<&'tcx ty::Const<'tcx>, u128>,
|
||||
},
|
||||
|
||||
/// Test for equality with value, possibly after an unsizing coercion to
|
||||
@ -1396,14 +1394,13 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
||||
// may want to add cases based on the candidates that are
|
||||
// available
|
||||
match test.kind {
|
||||
TestKind::SwitchInt { switch_ty, ref mut options, ref mut indices } => {
|
||||
TestKind::SwitchInt { switch_ty, ref mut options } => {
|
||||
for candidate in candidates.iter() {
|
||||
if !self.add_cases_to_switch(
|
||||
&match_place,
|
||||
candidate,
|
||||
switch_ty,
|
||||
options,
|
||||
indices,
|
||||
) {
|
||||
break;
|
||||
}
|
||||
|
@ -9,7 +9,7 @@ use crate::build::matches::{Candidate, MatchPair, Test, TestKind};
|
||||
use crate::build::Builder;
|
||||
use crate::thir::pattern::compare_const_vals;
|
||||
use crate::thir::*;
|
||||
use rustc_data_structures::fx::FxHashMap;
|
||||
use rustc_data_structures::fx::FxIndexMap;
|
||||
use rustc_hir::RangeEnd;
|
||||
use rustc_index::bit_set::BitSet;
|
||||
use rustc_middle::mir::*;
|
||||
@ -44,8 +44,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
||||
|
||||
// these maps are empty to start; cases are
|
||||
// added below in add_cases_to_switch
|
||||
options: vec![],
|
||||
indices: Default::default(),
|
||||
options: Default::default(),
|
||||
},
|
||||
}
|
||||
}
|
||||
@ -83,8 +82,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
||||
test_place: &Place<'tcx>,
|
||||
candidate: &Candidate<'pat, 'tcx>,
|
||||
switch_ty: Ty<'tcx>,
|
||||
options: &mut Vec<u128>,
|
||||
indices: &mut FxHashMap<&'tcx ty::Const<'tcx>, usize>,
|
||||
options: &mut FxIndexMap<&'tcx ty::Const<'tcx>, u128>,
|
||||
) -> bool {
|
||||
let match_pair = match candidate.match_pairs.iter().find(|mp| mp.place == *test_place) {
|
||||
Some(match_pair) => match_pair,
|
||||
@ -95,9 +93,8 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
||||
|
||||
match *match_pair.pattern.kind {
|
||||
PatKind::Constant { value } => {
|
||||
indices.entry(value).or_insert_with(|| {
|
||||
options.push(value.eval_bits(self.hir.tcx(), self.hir.param_env, switch_ty));
|
||||
options.len() - 1
|
||||
options.entry(value).or_insert_with(|| {
|
||||
value.eval_bits(self.hir.tcx(), self.hir.param_env, switch_ty)
|
||||
});
|
||||
true
|
||||
}
|
||||
@ -106,7 +103,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
||||
}
|
||||
PatKind::Range(range) => {
|
||||
// Check that none of the switch values are in the range.
|
||||
self.values_not_contained_in_range(range, indices).unwrap_or(false)
|
||||
self.values_not_contained_in_range(range, options).unwrap_or(false)
|
||||
}
|
||||
PatKind::Slice { .. }
|
||||
| PatKind::Array { .. }
|
||||
@ -216,7 +213,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
||||
);
|
||||
}
|
||||
|
||||
TestKind::SwitchInt { switch_ty, ref options, indices: _ } => {
|
||||
TestKind::SwitchInt { switch_ty, ref options } => {
|
||||
let target_blocks = make_target_blocks(self);
|
||||
let terminator = if switch_ty.kind == ty::Bool {
|
||||
assert!(!options.is_empty() && options.len() <= 2);
|
||||
@ -236,7 +233,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
||||
TerminatorKind::SwitchInt {
|
||||
discr: Operand::Copy(place),
|
||||
switch_ty,
|
||||
values: options.clone().into(),
|
||||
values: options.values().copied().collect(),
|
||||
targets: target_blocks,
|
||||
}
|
||||
};
|
||||
@ -532,20 +529,20 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
||||
// FIXME(#29623) we could use PatKind::Range to rule
|
||||
// things out here, in some cases.
|
||||
(
|
||||
&TestKind::SwitchInt { switch_ty: _, options: _, ref indices },
|
||||
&TestKind::SwitchInt { switch_ty: _, ref options },
|
||||
&PatKind::Constant { ref value },
|
||||
) if is_switch_ty(match_pair.pattern.ty) => {
|
||||
let index = indices[value];
|
||||
let index = options.get_index_of(value).unwrap();
|
||||
self.candidate_without_match_pair(match_pair_index, candidate);
|
||||
Some(index)
|
||||
}
|
||||
|
||||
(
|
||||
&TestKind::SwitchInt { switch_ty: _, ref options, ref indices },
|
||||
&TestKind::SwitchInt { switch_ty: _, ref options },
|
||||
&PatKind::Range(range),
|
||||
) => {
|
||||
let not_contained =
|
||||
self.values_not_contained_in_range(range, indices).unwrap_or(false);
|
||||
self.values_not_contained_in_range(range, options).unwrap_or(false);
|
||||
|
||||
if not_contained {
|
||||
// No switch values are contained in the pattern range,
|
||||
@ -777,9 +774,9 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
||||
fn values_not_contained_in_range(
|
||||
&self,
|
||||
range: PatRange<'tcx>,
|
||||
indices: &FxHashMap<&'tcx ty::Const<'tcx>, usize>,
|
||||
options: &FxIndexMap<&'tcx ty::Const<'tcx>, u128>,
|
||||
) -> Option<bool> {
|
||||
for &val in indices.keys() {
|
||||
for &val in options.keys() {
|
||||
if self.const_range_contains(range, val)? {
|
||||
return Some(false);
|
||||
}
|
||||
|
@ -8,7 +8,7 @@ use crate::hygiene::SyntaxContext;
|
||||
use crate::SESSION_GLOBALS;
|
||||
use crate::{BytePos, SpanData};
|
||||
|
||||
use rustc_data_structures::fx::FxHashMap;
|
||||
use rustc_data_structures::fx::FxIndexSet;
|
||||
|
||||
/// A compressed span.
|
||||
///
|
||||
@ -111,25 +111,18 @@ impl Span {
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct SpanInterner {
|
||||
spans: FxHashMap<SpanData, u32>,
|
||||
span_data: Vec<SpanData>,
|
||||
spans: FxIndexSet<SpanData>,
|
||||
}
|
||||
|
||||
impl SpanInterner {
|
||||
fn intern(&mut self, span_data: &SpanData) -> u32 {
|
||||
if let Some(index) = self.spans.get(span_data) {
|
||||
return *index;
|
||||
}
|
||||
|
||||
let index = self.spans.len() as u32;
|
||||
self.span_data.push(*span_data);
|
||||
self.spans.insert(*span_data, index);
|
||||
index
|
||||
let (index, _) = self.spans.insert_full(*span_data);
|
||||
index as u32
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn get(&self, index: u32) -> &SpanData {
|
||||
&self.span_data[index as usize]
|
||||
&self.spans[index as usize]
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1481,6 +1481,10 @@ impl<CTX> ToStableHashKey<CTX> for Symbol {
|
||||
}
|
||||
|
||||
// The `&'static str`s in this type actually point into the arena.
|
||||
//
|
||||
// The `FxHashMap`+`Vec` pair could be replaced by `FxIndexSet`, but #75278
|
||||
// found that to regress performance up to 2% in some cases. This might be
|
||||
// revisited after further improvements to `indexmap`.
|
||||
#[derive(Default)]
|
||||
pub struct Interner {
|
||||
arena: DroplessArena,
|
||||
|
@ -4,7 +4,7 @@
|
||||
//! types computed here.
|
||||
|
||||
use super::FnCtxt;
|
||||
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
|
||||
use rustc_data_structures::fx::{FxHashSet, FxIndexSet};
|
||||
use rustc_hir as hir;
|
||||
use rustc_hir::def::{CtorKind, DefKind, Res};
|
||||
use rustc_hir::def_id::DefId;
|
||||
@ -16,7 +16,7 @@ use rustc_span::Span;
|
||||
|
||||
struct InteriorVisitor<'a, 'tcx> {
|
||||
fcx: &'a FnCtxt<'a, 'tcx>,
|
||||
types: FxHashMap<ty::GeneratorInteriorTypeCause<'tcx>, usize>,
|
||||
types: FxIndexSet<ty::GeneratorInteriorTypeCause<'tcx>>,
|
||||
region_scope_tree: &'tcx region::ScopeTree,
|
||||
expr_count: usize,
|
||||
kind: hir::GeneratorKind,
|
||||
@ -88,18 +88,15 @@ impl<'a, 'tcx> InteriorVisitor<'a, 'tcx> {
|
||||
.span_note(yield_data.span, &*note)
|
||||
.emit();
|
||||
} else {
|
||||
// Map the type to the number of types added before it
|
||||
let entries = self.types.len();
|
||||
// Insert the type into the ordered set.
|
||||
let scope_span = scope.map(|s| s.span(self.fcx.tcx, self.region_scope_tree));
|
||||
self.types
|
||||
.entry(ty::GeneratorInteriorTypeCause {
|
||||
span: source_span,
|
||||
ty: &ty,
|
||||
scope_span,
|
||||
yield_span: yield_data.span,
|
||||
expr: expr.map(|e| e.hir_id),
|
||||
})
|
||||
.or_insert(entries);
|
||||
self.types.insert(ty::GeneratorInteriorTypeCause {
|
||||
span: source_span,
|
||||
ty: &ty,
|
||||
scope_span,
|
||||
yield_span: yield_data.span,
|
||||
expr: expr.map(|e| e.hir_id),
|
||||
});
|
||||
}
|
||||
} else {
|
||||
debug!(
|
||||
@ -132,7 +129,7 @@ pub fn resolve_interior<'a, 'tcx>(
|
||||
let body = fcx.tcx.hir().body(body_id);
|
||||
let mut visitor = InteriorVisitor {
|
||||
fcx,
|
||||
types: FxHashMap::default(),
|
||||
types: FxIndexSet::default(),
|
||||
region_scope_tree: fcx.tcx.region_scope_tree(def_id),
|
||||
expr_count: 0,
|
||||
kind,
|
||||
@ -144,10 +141,8 @@ pub fn resolve_interior<'a, 'tcx>(
|
||||
let region_expr_count = visitor.region_scope_tree.body_expr_count(body_id).unwrap();
|
||||
assert_eq!(region_expr_count, visitor.expr_count);
|
||||
|
||||
let mut types: Vec<_> = visitor.types.drain().collect();
|
||||
|
||||
// Sort types by insertion order
|
||||
types.sort_by_key(|t| t.1);
|
||||
// The types are already kept in insertion order.
|
||||
let types = visitor.types;
|
||||
|
||||
// The types in the generator interior contain lifetimes local to the generator itself,
|
||||
// which should not be exposed outside of the generator. Therefore, we replace these
|
||||
@ -164,7 +159,7 @@ pub fn resolve_interior<'a, 'tcx>(
|
||||
let mut captured_tys = FxHashSet::default();
|
||||
let type_causes: Vec<_> = types
|
||||
.into_iter()
|
||||
.filter_map(|(mut cause, _)| {
|
||||
.filter_map(|mut cause| {
|
||||
// Erase regions and canonicalize late-bound regions to deduplicate as many types as we
|
||||
// can.
|
||||
let erased = fcx.tcx.erase_regions(&cause.ty);
|
||||
|
@ -22,7 +22,7 @@ use rustc_ast::ast;
|
||||
use rustc_ast::ast::MetaItemKind;
|
||||
use rustc_attr::{list_contains_name, InlineAttr, OptimizeAttr};
|
||||
use rustc_data_structures::captures::Captures;
|
||||
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
|
||||
use rustc_data_structures::fx::{FxHashMap, FxHashSet, FxIndexSet};
|
||||
use rustc_errors::{struct_span_err, Applicability};
|
||||
use rustc_hir as hir;
|
||||
use rustc_hir::def::{CtorKind, DefKind, Res};
|
||||
@ -1718,21 +1718,17 @@ fn explicit_predicates_of(tcx: TyCtxt<'_>, def_id: DefId) -> ty::GenericPredicat
|
||||
/// A data structure with unique elements, which preserves order of insertion.
|
||||
/// Preserving the order of insertion is important here so as not to break
|
||||
/// compile-fail UI tests.
|
||||
// FIXME(eddyb) just use `IndexSet` from `indexmap`.
|
||||
struct UniquePredicates<'tcx> {
|
||||
predicates: Vec<(ty::Predicate<'tcx>, Span)>,
|
||||
uniques: FxHashSet<(ty::Predicate<'tcx>, Span)>,
|
||||
predicates: FxIndexSet<(ty::Predicate<'tcx>, Span)>,
|
||||
}
|
||||
|
||||
impl<'tcx> UniquePredicates<'tcx> {
|
||||
fn new() -> Self {
|
||||
UniquePredicates { predicates: vec![], uniques: FxHashSet::default() }
|
||||
UniquePredicates { predicates: FxIndexSet::default() }
|
||||
}
|
||||
|
||||
fn push(&mut self, value: (ty::Predicate<'tcx>, Span)) {
|
||||
if self.uniques.insert(value) {
|
||||
self.predicates.push(value);
|
||||
}
|
||||
self.predicates.insert(value);
|
||||
}
|
||||
|
||||
fn extend<I: IntoIterator<Item = (ty::Predicate<'tcx>, Span)>>(&mut self, iter: I) {
|
||||
@ -2014,7 +2010,7 @@ fn explicit_predicates_of(tcx: TyCtxt<'_>, def_id: DefId) -> ty::GenericPredicat
|
||||
}))
|
||||
}
|
||||
|
||||
let mut predicates = predicates.predicates;
|
||||
let mut predicates: Vec<_> = predicates.predicates.into_iter().collect();
|
||||
|
||||
// Subtle: before we store the predicates into the tcx, we
|
||||
// sort them so that predicates like `T: Foo<Item=U>` come
|
||||
|
Loading…
Reference in New Issue
Block a user