Use FxHash{Map,Set} instead of the default Hash{Map,Set} everywhere in rustc.

This commit is contained in:
Eduard-Mihai Burtescu 2018-08-18 13:55:43 +03:00
parent 83ddc33347
commit 93f3f5b155
34 changed files with 156 additions and 152 deletions

View File

@ -50,6 +50,7 @@ use hir::GenericArg;
use lint::builtin::{self, PARENTHESIZED_PARAMS_IN_TYPES_AND_MODULES,
ELIDED_LIFETIMES_IN_PATHS};
use middle::cstore::CrateStore;
use rustc_data_structures::fx::FxHashSet;
use rustc_data_structures::indexed_vec::IndexVec;
use rustc_data_structures::small_vec::OneVector;
use rustc_data_structures::thin_vec::ThinVec;
@ -57,7 +58,7 @@ use session::Session;
use util::common::FN_OUTPUT_NAME;
use util::nodemap::{DefIdMap, NodeMap};
use std::collections::{BTreeMap, HashSet};
use std::collections::BTreeMap;
use std::fmt::Debug;
use std::iter;
use std::mem;
@ -1342,7 +1343,7 @@ impl<'a> LoweringContext<'a> {
exist_ty_id: NodeId,
collect_elided_lifetimes: bool,
currently_bound_lifetimes: Vec<hir::LifetimeName>,
already_defined_lifetimes: HashSet<hir::LifetimeName>,
already_defined_lifetimes: FxHashSet<hir::LifetimeName>,
output_lifetimes: Vec<hir::GenericArg>,
output_lifetime_params: Vec<hir::GenericParam>,
}
@ -1476,7 +1477,7 @@ impl<'a> LoweringContext<'a> {
exist_ty_id,
collect_elided_lifetimes: true,
currently_bound_lifetimes: Vec::new(),
already_defined_lifetimes: HashSet::new(),
already_defined_lifetimes: FxHashSet::default(),
output_lifetimes: Vec::new(),
output_lifetime_params: Vec::new(),
};

View File

@ -13,6 +13,7 @@
use session::config;
use middle::lang_items;
use rustc_data_structures::fx::FxHashSet;
use rustc_target::spec::PanicStrategy;
use syntax::ast;
use syntax::symbol::Symbol;
@ -23,8 +24,6 @@ use hir::intravisit;
use hir;
use ty::TyCtxt;
use std::collections::HashSet;
macro_rules! weak_lang_items {
($($name:ident, $item:ident, $sym:ident;)*) => (
@ -101,7 +100,7 @@ fn verify<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
return
}
let mut missing = HashSet::new();
let mut missing = FxHashSet::default();
for &cnum in tcx.crates().iter() {
for &item in tcx.missing_lang_items(cnum).iter() {
missing.insert(item);

View File

@ -37,10 +37,10 @@ use std::collections::btree_map::Iter as BTreeMapIter;
use std::collections::btree_map::Keys as BTreeMapKeysIter;
use std::collections::btree_map::Values as BTreeMapValuesIter;
use rustc_data_structures::fx::FxHashSet;
use std::{fmt, str};
use std::hash::Hasher;
use std::collections::hash_map::DefaultHasher;
use std::collections::HashSet;
use std::iter::FromIterator;
use std::path::{Path, PathBuf};
@ -1373,7 +1373,7 @@ pub fn default_configuration(sess: &Session) -> ast::CrateConfig {
let max_atomic_width = sess.target.target.max_atomic_width();
let atomic_cas = sess.target.target.options.atomic_cas;
let mut ret = HashSet::new();
let mut ret = FxHashSet::default();
// Target bindings.
ret.insert((Symbol::intern("target_os"), Some(Symbol::intern(os))));
if let Some(ref fam) = sess.target.target.options.target_family {

View File

@ -12,8 +12,8 @@
pub use self::FileMatch::*;
use rustc_data_structures::fx::FxHashSet;
use std::borrow::Cow;
use std::collections::HashSet;
use std::env;
use std::fs;
use std::path::{Path, PathBuf};
@ -40,7 +40,7 @@ impl<'a> FileSearch<'a> {
pub fn for_each_lib_search_path<F>(&self, mut f: F) where
F: FnMut(&Path, PathKind)
{
let mut visited_dirs = HashSet::new();
let mut visited_dirs = FxHashSet::default();
for (path, kind) in self.search_paths.iter(self.kind) {
f(path, kind);

View File

@ -47,7 +47,6 @@ use jobserver::Client;
use std;
use std::cell::{self, Cell, RefCell};
use std::collections::HashMap;
use std::env;
use std::fmt;
use std::io::Write;
@ -122,7 +121,7 @@ pub struct Session {
/// Map from imported macro spans (which consist of
/// the localized span for the macro body) to the
/// macro name and definition span in the source crate.
pub imported_macro_spans: OneThread<RefCell<HashMap<Span, (String, Span)>>>,
pub imported_macro_spans: OneThread<RefCell<FxHashMap<Span, (String, Span)>>>,
incr_comp_session: OneThread<RefCell<IncrCompSession>>,
@ -1129,7 +1128,7 @@ pub fn build_session_(
injected_allocator: Once::new(),
allocator_kind: Once::new(),
injected_panic_runtime: Once::new(),
imported_macro_spans: OneThread::new(RefCell::new(HashMap::new())),
imported_macro_spans: OneThread::new(RefCell::new(FxHashMap::default())),
incr_comp_session: OneThread::new(RefCell::new(IncrCompSession::NotInitialized)),
self_profiling: Lock::new(SelfProfiler::new()),
profile_channel: Lock::new(None),

View File

@ -11,6 +11,7 @@
#![allow(warnings)]
use std::mem;
use rustc_data_structures::fx::FxHashSet;
use rustc_data_structures::sync::{Lock, LockGuard, Lrc, Weak};
use rustc_data_structures::OnDrop;
use syntax_pos::Span;
@ -21,7 +22,7 @@ use ty::context::TyCtxt;
use errors::Diagnostic;
use std::process;
use std::{fmt, ptr};
use std::collections::HashSet;
#[cfg(parallel_queries)]
use {
rayon_core,
@ -282,7 +283,7 @@ where
fn cycle_check<'tcx>(query: Lrc<QueryJob<'tcx>>,
span: Span,
stack: &mut Vec<(Span, Lrc<QueryJob<'tcx>>)>,
visited: &mut HashSet<*const QueryJob<'tcx>>
visited: &mut FxHashSet<*const QueryJob<'tcx>>
) -> Option<Option<Waiter<'tcx>>> {
if visited.contains(&query.as_ptr()) {
return if let Some(p) = stack.iter().position(|q| q.1.as_ptr() == query.as_ptr()) {
@ -321,7 +322,7 @@ fn cycle_check<'tcx>(query: Lrc<QueryJob<'tcx>>,
#[cfg(parallel_queries)]
fn connected_to_root<'tcx>(
query: Lrc<QueryJob<'tcx>>,
visited: &mut HashSet<*const QueryJob<'tcx>>
visited: &mut FxHashSet<*const QueryJob<'tcx>>
) -> bool {
// We already visited this or we're deliberately ignoring it
if visited.contains(&query.as_ptr()) {
@ -357,7 +358,7 @@ fn remove_cycle<'tcx>(
wakelist: &mut Vec<Lrc<QueryWaiter<'tcx>>>,
tcx: TyCtxt<'_, 'tcx, '_>
) -> bool {
let mut visited = HashSet::new();
let mut visited = FxHashSet::default();
let mut stack = Vec::new();
// Look for a cycle starting with the last query in `jobs`
if let Some(waiter) = cycle_check(jobs.pop().unwrap(),
@ -389,7 +390,7 @@ fn remove_cycle<'tcx>(
// connected to queries outside the cycle
let entry_points: Vec<Lrc<QueryJob<'tcx>>> = stack.iter().filter_map(|query| {
// Mark all the other queries in the cycle as already visited
let mut visited = HashSet::from_iter(stack.iter().filter_map(|q| {
let mut visited = FxHashSet::from_iter(stack.iter().filter_map(|q| {
if q.1.as_ptr() != query.1.as_ptr() {
Some(q.1.as_ptr())
} else {

View File

@ -8,7 +8,7 @@
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::collections::HashMap;
use rustc_data_structures::fx::FxHashMap;
use std::fs::File;
use std::io::prelude::*;
use std::marker::PhantomData;
@ -40,7 +40,7 @@ struct PerThread {
#[derive(Clone)]
pub struct TimeGraph {
data: Arc<Mutex<HashMap<TimelineId, PerThread>>>,
data: Arc<Mutex<FxHashMap<TimelineId, PerThread>>>,
}
#[derive(Clone, Copy)]
@ -68,7 +68,7 @@ impl Drop for RaiiToken {
impl TimeGraph {
pub fn new() -> TimeGraph {
TimeGraph {
data: Arc::new(Mutex::new(HashMap::new()))
data: Arc::new(Mutex::new(FxHashMap::default()))
}
}

View File

@ -8,7 +8,7 @@
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::collections::HashMap;
use rustc_data_structures::fx::FxHashMap;
use std::ffi::{OsStr, OsString};
use std::fs::{self, File};
use std::io::prelude::*;
@ -30,7 +30,7 @@ use serialize::{json, Encoder};
/// For all the linkers we support, and information they might
/// need out of the shared crate context before we get rid of it.
pub struct LinkerInfo {
exports: HashMap<CrateType, Vec<String>>,
exports: FxHashMap<CrateType, Vec<String>>,
}
impl LinkerInfo {

View File

@ -8,7 +8,7 @@
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::collections::HashSet;
use rustc_data_structures::fx::FxHashSet;
use std::env;
use std::path::{Path, PathBuf};
use std::fs;
@ -172,7 +172,7 @@ fn get_install_prefix_rpath(config: &mut RPathConfig) -> String {
}
fn minimize_rpaths(rpaths: &[String]) -> Vec<String> {
let mut set = HashSet::new();
let mut set = FxHashSet::default();
let mut minimized = Vec::new();
for rpath in rpaths {
if set.insert(rpath) {

View File

@ -8,7 +8,7 @@
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::collections::HashMap;
use fx::FxHashMap;
use std::cmp::max;
use std::slice;
use std::iter;
@ -18,8 +18,8 @@ use super::*;
pub struct TestGraph {
num_nodes: usize,
start_node: usize,
successors: HashMap<usize, Vec<usize>>,
predecessors: HashMap<usize, Vec<usize>>,
successors: FxHashMap<usize, Vec<usize>>,
predecessors: FxHashMap<usize, Vec<usize>>,
}
impl TestGraph {
@ -27,8 +27,8 @@ impl TestGraph {
let mut graph = TestGraph {
num_nodes: start_node + 1,
start_node,
successors: HashMap::new(),
predecessors: HashMap::new(),
successors: FxHashMap::default(),
predecessors: FxHashMap::default(),
};
for &(source, target) in edges {
graph.num_nodes = max(graph.num_nodes, source + 1);

View File

@ -10,10 +10,10 @@
use super::*;
use syntax_pos::SpanData;
use rustc_data_structures::fx::FxHashMap;
use rustc::util::common::QueryMsg;
use std::fs::File;
use std::time::{Duration, Instant};
use std::collections::hash_map::HashMap;
use rustc::dep_graph::{DepNode};
#[derive(Debug, Clone, Eq, PartialEq)]
@ -149,7 +149,7 @@ fn write_traces_rec(file: &mut File, traces: &[Rec], total: Duration, depth: usi
}
}
fn compute_counts_rec(counts: &mut HashMap<String,QueryMetric>, traces: &[Rec]) {
fn compute_counts_rec(counts: &mut FxHashMap<String,QueryMetric>, traces: &[Rec]) {
for t in traces.iter() {
match t.effect {
Effect::TimeBegin(ref msg) => {
@ -200,7 +200,7 @@ fn compute_counts_rec(counts: &mut HashMap<String,QueryMetric>, traces: &[Rec])
}
}
pub fn write_counts(count_file: &mut File, counts: &mut HashMap<String,QueryMetric>) {
pub fn write_counts(count_file: &mut File, counts: &mut FxHashMap<String,QueryMetric>) {
use rustc::util::common::duration_to_secs_str;
use std::cmp::Reverse;
@ -219,7 +219,7 @@ pub fn write_counts(count_file: &mut File, counts: &mut HashMap<String,QueryMetr
pub fn write_traces(html_file: &mut File, counts_file: &mut File, traces: &[Rec]) {
let capacity = traces.iter().fold(0, |acc, t| acc + 1 + t.extent.len());
let mut counts : HashMap<String, QueryMetric> = HashMap::with_capacity(capacity);
let mut counts = FxHashMap::with_capacity_and_hasher(capacity, Default::default());
compute_counts_rec(&mut counts, traces);
write_counts(counts_file, &mut counts);

View File

@ -16,12 +16,12 @@ use {Level, CodeSuggestion, DiagnosticBuilder, SubDiagnostic, SourceMapperDyn, D
use snippet::{Annotation, AnnotationType, Line, MultilineAnnotation, StyledString, Style};
use styled_buffer::StyledBuffer;
use rustc_data_structures::fx::FxHashMap;
use rustc_data_structures::sync::Lrc;
use atty;
use std::borrow::Cow;
use std::io::prelude::*;
use std::io;
use std::collections::HashMap;
use std::cmp::{min, Reverse};
use termcolor::{StandardStream, ColorChoice, ColorSpec, BufferWriter};
use termcolor::{WriteColor, Color, Buffer};
@ -1090,7 +1090,7 @@ impl EmitterWriter {
max_line_num_len + 1);
// Contains the vertical lines' positions for active multiline annotations
let mut multilines = HashMap::new();
let mut multilines = FxHashMap::default();
// Next, output the annotate source for this file
for line_idx in 0..annotated_file.lines.len() {
@ -1109,7 +1109,7 @@ impl EmitterWriter {
width_offset,
code_offset);
let mut to_add = HashMap::new();
let mut to_add = FxHashMap::default();
for (depth, style) in depths {
if multilines.get(&depth).is_some() {

View File

@ -8,11 +8,11 @@
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::collections::HashMap;
use rustc_data_structures::fx::FxHashMap;
#[derive(Clone)]
pub struct Registry {
descriptions: HashMap<&'static str, &'static str>,
descriptions: FxHashMap<&'static str, &'static str>,
}
impl Registry {

View File

@ -24,7 +24,6 @@
//! the required condition is not met.
//!
use std::collections::HashSet;
use std::iter::FromIterator;
use std::vec::Vec;
use rustc::dep_graph::{DepNode, label_strs};
@ -193,7 +192,7 @@ const LABELS_TRAIT: &[&[&str]] = &[
//
// TypeOfItem for these.
type Labels = HashSet<String>;
type Labels = FxHashSet<String>;
/// Represents the requested configuration by rustc_clean/dirty
struct Assertion {
@ -205,13 +204,13 @@ impl Assertion {
fn from_clean_labels(labels: Labels) -> Assertion {
Assertion {
clean: labels,
dirty: Labels::new(),
dirty: Labels::default(),
}
}
fn from_dirty_labels(labels: Labels) -> Assertion {
Assertion {
clean: Labels::new(),
clean: Labels::default(),
dirty: labels,
}
}
@ -328,7 +327,7 @@ impl<'a, 'tcx> DirtyCleanVisitor<'a, 'tcx> {
}
}
// if no `label` or `except` is given, only the node's group are asserted
Labels::new()
Labels::default()
}
/// Return all DepNode labels that should be asserted for this item.
@ -436,7 +435,7 @@ impl<'a, 'tcx> DirtyCleanVisitor<'a, 'tcx> {
}
fn resolve_labels(&self, item: &NestedMetaItem, value: &str) -> Labels {
let mut out: Labels = HashSet::new();
let mut out = Labels::default();
for label in value.split(',') {
let label = label.trim();
if DepNode::has_label_string(label) {

View File

@ -39,7 +39,6 @@ use util::nodemap::NodeSet;
use lint::{LateContext, LintContext, LintArray};
use lint::{LintPass, LateLintPass, EarlyLintPass, EarlyContext};
use std::collections::HashSet;
use rustc::util::nodemap::FxHashSet;
use syntax::tokenstream::{TokenTree, TokenStream};
@ -304,14 +303,14 @@ pub struct MissingDoc {
doc_hidden_stack: Vec<bool>,
/// Private traits or trait items that leaked through. Don't check their methods.
private_traits: HashSet<ast::NodeId>,
private_traits: FxHashSet<ast::NodeId>,
}
impl MissingDoc {
pub fn new() -> MissingDoc {
MissingDoc {
doc_hidden_stack: vec![false],
private_traits: HashSet::new(),
private_traits: FxHashSet::default(),
}
}
@ -908,7 +907,7 @@ impl<'a, 'tcx> LateLintPass<'a, 'tcx> for UnconditionalRecursion {
let mut work_queue = vec![cfg.entry];
let mut reached_exit_without_self_call = false;
let mut self_call_spans = vec![];
let mut visited = HashSet::new();
let mut visited = FxHashSet::default();
while let Some(idx) = work_queue.pop() {
if idx == cfg.exit {

View File

@ -226,6 +226,7 @@ use cstore::{MetadataRef, MetadataBlob};
use creader::Library;
use schema::{METADATA_HEADER, rustc_version};
use rustc_data_structures::fx::FxHashSet;
use rustc_data_structures::svh::Svh;
use rustc::middle::cstore::MetadataLoader;
use rustc::session::{config, Session};
@ -239,7 +240,6 @@ use syntax_pos::Span;
use rustc_target::spec::{Target, TargetTriple};
use std::cmp;
use std::collections::HashSet;
use std::fmt;
use std::fs;
use std::io::{self, Read};
@ -308,7 +308,7 @@ impl CratePaths {
impl<'a> Context<'a> {
pub fn maybe_load_library_crate(&mut self) -> Option<Library> {
let mut seen_paths = HashSet::new();
let mut seen_paths = FxHashSet::default();
match self.extra_filename {
Some(s) => self.find_library_crate(s, &mut seen_paths)
.or_else(|| self.find_library_crate("", &mut seen_paths)),
@ -431,7 +431,7 @@ impl<'a> Context<'a> {
fn find_library_crate(&mut self,
extra_prefix: &str,
seen_paths: &mut HashSet<PathBuf>)
seen_paths: &mut FxHashSet<PathBuf>)
-> Option<Library> {
// If an SVH is specified, then this is a transitive dependency that
// must be loaded via -L plus some filtering.

View File

@ -67,9 +67,9 @@ use rustc::ty::{self, TyCtxt, AdtDef, Ty};
use rustc::ty::subst::Substs;
use util::dump_mir;
use util::liveness::{self, IdentityMap};
use rustc_data_structures::fx::FxHashMap;
use rustc_data_structures::indexed_vec::Idx;
use rustc_data_structures::indexed_set::IdxSet;
use std::collections::HashMap;
use std::borrow::Cow;
use std::iter::once;
use std::mem;
@ -142,10 +142,12 @@ struct TransformVisitor<'a, 'tcx: 'a> {
state_field: usize,
// Mapping from Local to (type of local, generator struct index)
remap: HashMap<Local, (Ty<'tcx>, usize)>,
// FIXME(eddyb) This should use `IndexVec<Local, Option<_>>`.
remap: FxHashMap<Local, (Ty<'tcx>, usize)>,
// A map from a suspension point in a block to the locals which have live storage at that point
storage_liveness: HashMap<BasicBlock, liveness::LiveVarSet<Local>>,
// FIXME(eddyb) This should use `IndexVec<BasicBlock, Option<_>>`.
storage_liveness: FxHashMap<BasicBlock, liveness::LiveVarSet<Local>>,
// A list of suspension points, generated during the transform
suspension_points: Vec<SuspensionPoint>,
@ -364,12 +366,15 @@ impl<'tcx> Visitor<'tcx> for BorrowedLocals {
}
}
fn locals_live_across_suspend_points<'a, 'tcx,>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
mir: &Mir<'tcx>,
source: MirSource,
movable: bool) ->
(liveness::LiveVarSet<Local>,
HashMap<BasicBlock, liveness::LiveVarSet<Local>>) {
fn locals_live_across_suspend_points(
tcx: TyCtxt<'a, 'tcx, 'tcx>,
mir: &Mir<'tcx>,
source: MirSource,
movable: bool,
) -> (
liveness::LiveVarSet<Local>,
FxHashMap<BasicBlock, liveness::LiveVarSet<Local>>,
) {
let dead_unwinds = IdxSet::new_empty(mir.basic_blocks().len());
let node_id = tcx.hir.as_local_node_id(source.def_id).unwrap();
@ -413,7 +418,7 @@ fn locals_live_across_suspend_points<'a, 'tcx,>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
&liveness,
);
let mut storage_liveness_map = HashMap::new();
let mut storage_liveness_map = FxHashMap::default();
for (block, data) in mir.basic_blocks().iter_enumerated() {
if let TerminatorKind::Yield { .. } = data.terminator().kind {
@ -477,9 +482,9 @@ fn compute_layout<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
interior: Ty<'tcx>,
movable: bool,
mir: &mut Mir<'tcx>)
-> (HashMap<Local, (Ty<'tcx>, usize)>,
-> (FxHashMap<Local, (Ty<'tcx>, usize)>,
GeneratorLayout<'tcx>,
HashMap<BasicBlock, liveness::LiveVarSet<Local>>)
FxHashMap<BasicBlock, liveness::LiveVarSet<Local>>)
{
// Use a liveness analysis to compute locals which are live across a suspension point
let (live_locals, storage_liveness) = locals_live_across_suspend_points(tcx,

View File

@ -12,6 +12,7 @@
use rustc::lint::{EarlyLintPassObject, LateLintPassObject, LintId, Lint};
use rustc::session::Session;
use rustc::util::nodemap::FxHashMap;
use syntax::ext::base::{SyntaxExtension, NamedSyntaxExtension, NormalTT, IdentTT};
use syntax::ext::base::MacroExpanderFn;
@ -21,7 +22,6 @@ use syntax::ast;
use syntax::feature_gate::AttributeType;
use syntax_pos::Span;
use std::collections::HashMap;
use std::borrow::ToOwned;
/// Structure used to register plugins.
@ -53,7 +53,7 @@ pub struct Registry<'a> {
pub late_lint_passes: Vec<LateLintPassObject>,
#[doc(hidden)]
pub lint_groups: HashMap<&'static str, Vec<LintId>>,
pub lint_groups: FxHashMap<&'static str, Vec<LintId>>,
#[doc(hidden)]
pub llvm_passes: Vec<String>,
@ -74,7 +74,7 @@ impl<'a> Registry<'a> {
syntax_exts: vec![],
early_lint_passes: vec![],
late_lint_passes: vec![],
lint_groups: HashMap::new(),
lint_groups: FxHashMap::default(),
llvm_passes: vec![],
attributes: vec![],
whitelisted_custom_derives: Vec::new(),

View File

@ -91,7 +91,7 @@ pub struct DumpVisitor<'l, 'tcx: 'l, 'll, O: DumpOutput + 'll> {
// of macro use (callsite) spans. We store these to ensure
// we only write one macro def per unique macro definition, and
// one macro use per unique callsite span.
// mac_defs: HashSet<Span>,
// mac_defs: FxHashSet<Span>,
macro_calls: FxHashSet<Span>,
}
@ -107,7 +107,7 @@ impl<'l, 'tcx: 'l, 'll, O: DumpOutput + 'll> DumpVisitor<'l, 'tcx, 'll, O> {
dumper,
span: span_utils.clone(),
cur_scope: CRATE_NODE_ID,
// mac_defs: HashSet::new(),
// mac_defs: FxHashSet::default(),
macro_calls: FxHashSet(),
}
}

View File

@ -29,8 +29,9 @@
#![allow(non_camel_case_types)]
use rustc_data_structures::fx::FxHashMap;
use std::cell::RefCell;
use std::collections::{HashMap, VecDeque};
use std::collections::VecDeque;
use std::default::Default;
use std::fmt::{self, Write};
use std::borrow::Cow;
@ -417,14 +418,14 @@ impl<'a, I: Iterator<Item = Event<'a>>> Iterator for SummaryLine<'a, I> {
/// references.
struct Footnotes<'a, I: Iterator<Item = Event<'a>>> {
inner: I,
footnotes: HashMap<String, (Vec<Event<'a>>, u16)>,
footnotes: FxHashMap<String, (Vec<Event<'a>>, u16)>,
}
impl<'a, I: Iterator<Item = Event<'a>>> Footnotes<'a, I> {
fn new(iter: I) -> Self {
Footnotes {
inner: iter,
footnotes: HashMap::new(),
footnotes: FxHashMap::default(),
}
}
fn get_entry(&mut self, key: &str) -> &mut (Vec<Event<'a>>, u16) {
@ -865,7 +866,7 @@ pub fn markdown_links(md: &str) -> Vec<(String, Option<Range<usize>>)> {
#[derive(Default)]
pub struct IdMap {
map: HashMap<String, usize>,
map: FxHashMap<String, usize>,
}
impl IdMap {
@ -880,7 +881,7 @@ impl IdMap {
}
pub fn reset(&mut self) {
self.map = HashMap::new();
self.map = FxHashMap::default();
}
pub fn derive(&mut self, candidate: String) -> String {

View File

@ -38,7 +38,7 @@ pub use self::ExternalLocation::*;
use std::borrow::Cow;
use std::cell::RefCell;
use std::cmp::Ordering;
use std::collections::{BTreeMap, HashSet, VecDeque};
use std::collections::{BTreeMap, VecDeque};
use std::default::Default;
use std::error;
use std::fmt::{self, Display, Formatter, Write as FmtWrite};
@ -741,7 +741,7 @@ fn write_shared(cx: &Context,
// To avoid "light.css" to be overwritten, we'll first run over the received themes and only
// then we'll run over the "official" styles.
let mut themes: HashSet<String> = HashSet::new();
let mut themes: FxHashSet<String> = FxHashSet::default();
for entry in &cx.shared.themes {
let mut content = Vec::with_capacity(100000);
@ -1539,35 +1539,36 @@ impl Ord for ItemEntry {
#[derive(Debug)]
struct AllTypes {
structs: HashSet<ItemEntry>,
enums: HashSet<ItemEntry>,
unions: HashSet<ItemEntry>,
primitives: HashSet<ItemEntry>,
traits: HashSet<ItemEntry>,
macros: HashSet<ItemEntry>,
functions: HashSet<ItemEntry>,
typedefs: HashSet<ItemEntry>,
existentials: HashSet<ItemEntry>,
statics: HashSet<ItemEntry>,
constants: HashSet<ItemEntry>,
keywords: HashSet<ItemEntry>,
structs: FxHashSet<ItemEntry>,
enums: FxHashSet<ItemEntry>,
unions: FxHashSet<ItemEntry>,
primitives: FxHashSet<ItemEntry>,
traits: FxHashSet<ItemEntry>,
macros: FxHashSet<ItemEntry>,
functions: FxHashSet<ItemEntry>,
typedefs: FxHashSet<ItemEntry>,
existentials: FxHashSet<ItemEntry>,
statics: FxHashSet<ItemEntry>,
constants: FxHashSet<ItemEntry>,
keywords: FxHashSet<ItemEntry>,
}
impl AllTypes {
fn new() -> AllTypes {
let new_set = |cap| FxHashSet::with_capacity_and_hasher(cap, Default::default());
AllTypes {
structs: HashSet::with_capacity(100),
enums: HashSet::with_capacity(100),
unions: HashSet::with_capacity(100),
primitives: HashSet::with_capacity(26),
traits: HashSet::with_capacity(100),
macros: HashSet::with_capacity(100),
functions: HashSet::with_capacity(100),
typedefs: HashSet::with_capacity(100),
existentials: HashSet::with_capacity(100),
statics: HashSet::with_capacity(100),
constants: HashSet::with_capacity(100),
keywords: HashSet::with_capacity(100),
structs: new_set(100),
enums: new_set(100),
unions: new_set(100),
primitives: new_set(26),
traits: new_set(100),
macros: new_set(100),
functions: new_set(100),
typedefs: new_set(100),
existentials: new_set(100),
statics: new_set(100),
constants: new_set(100),
keywords: new_set(100),
}
}
@ -1595,7 +1596,7 @@ impl AllTypes {
}
}
fn print_entries(f: &mut fmt::Formatter, e: &HashSet<ItemEntry>, title: &str,
fn print_entries(f: &mut fmt::Formatter, e: &FxHashSet<ItemEntry>, title: &str,
class: &str) -> fmt::Result {
if !e.is_empty() {
let mut e: Vec<&ItemEntry> = e.iter().collect();
@ -4185,7 +4186,7 @@ fn sidebar_assoc_items(it: &clean::Item) -> String {
}
}
let format_impls = |impls: Vec<&Impl>| {
let mut links = HashSet::new();
let mut links = FxHashSet::default();
impls.iter()
.filter_map(|i| {
let is_negative_impl = is_negative_impl(i.inner_impl());

View File

@ -8,7 +8,7 @@
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::collections::HashSet;
use rustc_data_structures::fx::FxHashSet;
use std::fs::File;
use std::hash::{Hash, Hasher};
use std::io::Read;
@ -31,7 +31,7 @@ macro_rules! try_something {
#[derive(Debug, Clone, Eq)]
pub struct CssPath {
pub name: String,
pub children: HashSet<CssPath>,
pub children: FxHashSet<CssPath>,
}
// This PartialEq implementation IS NOT COMMUTATIVE!!!
@ -66,7 +66,7 @@ impl CssPath {
fn new(name: String) -> CssPath {
CssPath {
name,
children: HashSet::new(),
children: FxHashSet::default(),
}
}
}
@ -211,7 +211,7 @@ fn build_rule(v: &[u8], positions: &[usize]) -> String {
.join(" ")
}
fn inner(v: &[u8], events: &[Events], pos: &mut usize) -> HashSet<CssPath> {
fn inner(v: &[u8], events: &[Events], pos: &mut usize) -> FxHashSet<CssPath> {
let mut paths = Vec::with_capacity(50);
while *pos < events.len() {

View File

@ -28,8 +28,8 @@ use ThinVec;
use tokenstream::{ThinTokenStream, TokenStream};
use serialize::{self, Encoder, Decoder};
use std::collections::HashSet;
use std::fmt;
use rustc_data_structures::fx::FxHashSet;
use rustc_data_structures::sync::Lrc;
use std::u32;
@ -407,7 +407,7 @@ pub struct WhereEqPredicate {
/// The set of MetaItems that define the compilation environment of the crate,
/// used to drive conditional compilation
pub type CrateConfig = HashSet<(Name, Option<Symbol>)>;
pub type CrateConfig = FxHashSet<(Name, Option<Symbol>)>;
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub struct Crate {

View File

@ -26,7 +26,7 @@ use OneVector;
use symbol::{keywords, Ident, Symbol};
use ThinVec;
use std::collections::HashMap;
use rustc_data_structures::fx::FxHashMap;
use std::iter;
use std::path::PathBuf;
use std::rc::Rc;
@ -800,7 +800,7 @@ pub struct ExtCtxt<'a> {
pub resolver: &'a mut dyn Resolver,
pub resolve_err_count: usize,
pub current_expansion: ExpansionData,
pub expansions: HashMap<Span, Vec<String>>,
pub expansions: FxHashMap<Span, Vec<String>>,
}
impl<'a> ExtCtxt<'a> {
@ -821,7 +821,7 @@ impl<'a> ExtCtxt<'a> {
directory_ownership: DirectoryOwnership::Owned { relative: None },
crate_span: None,
},
expansions: HashMap::new(),
expansions: FxHashMap::default(),
}
}

View File

@ -17,7 +17,7 @@ use parse::parser::PathStyle;
use symbol::Symbol;
use syntax_pos::Span;
use std::collections::HashSet;
use rustc_data_structures::fx::FxHashSet;
pub fn collect_derives(cx: &mut ExtCtxt, attrs: &mut Vec<ast::Attribute>) -> Vec<ast::Path> {
let mut result = Vec::new();
@ -48,7 +48,7 @@ pub fn collect_derives(cx: &mut ExtCtxt, attrs: &mut Vec<ast::Attribute>) -> Vec
pub fn add_derived_markers<T>(cx: &mut ExtCtxt, span: Span, traits: &[ast::Path], item: T) -> T
where T: HasAttrs,
{
let (mut names, mut pretty_name) = (HashSet::new(), "derive(".to_owned());
let (mut names, mut pretty_name) = (FxHashSet::default(), "derive(".to_owned());
for (i, path) in traits.iter().enumerate() {
if i > 0 {
pretty_name.push_str(", ");

View File

@ -34,7 +34,7 @@ use syntax_pos::hygiene::ExpnFormat;
use tokenstream::{TokenStream, TokenTree};
use visit::{self, Visitor};
use std::collections::HashMap;
use rustc_data_structures::fx::FxHashMap;
use std::fs::File;
use std::io::Read;
use std::iter::FromIterator;
@ -319,7 +319,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
// Unresolved macros produce dummy outputs as a recovery measure.
invocations.reverse();
let mut expanded_fragments = Vec::new();
let mut derives: HashMap<Mark, Vec<_>> = HashMap::new();
let mut derives: FxHashMap<Mark, Vec<_>> = FxHashMap::default();
let mut undetermined_invocations = Vec::new();
let (mut progress, mut force) = (false, !self.monotonic);
loop {

View File

@ -21,7 +21,7 @@ use symbol::keywords;
use ThinVec;
use util::move_map::MoveMap;
use std::collections::HashMap;
use rustc_data_structures::fx::FxHashMap;
pub fn placeholder(kind: AstFragmentKind, id: ast::NodeId) -> AstFragment {
fn mac_placeholder() -> ast::Mac {
@ -81,7 +81,7 @@ pub fn placeholder(kind: AstFragmentKind, id: ast::NodeId) -> AstFragment {
}
pub struct PlaceholderExpander<'a, 'b: 'a> {
expanded_fragments: HashMap<ast::NodeId, AstFragment>,
expanded_fragments: FxHashMap<ast::NodeId, AstFragment>,
cx: &'a mut ExtCtxt<'b>,
monotonic: bool,
}
@ -90,7 +90,7 @@ impl<'a, 'b> PlaceholderExpander<'a, 'b> {
pub fn new(cx: &'a mut ExtCtxt<'b>, monotonic: bool) -> Self {
PlaceholderExpander {
cx,
expanded_fragments: HashMap::new(),
expanded_fragments: FxHashMap::default(),
monotonic,
}
}

View File

@ -96,11 +96,11 @@ use OneVector;
use symbol::keywords;
use tokenstream::TokenStream;
use rustc_data_structures::fx::FxHashMap;
use std::collections::hash_map::Entry::{Occupied, Vacant};
use std::mem;
use std::ops::{Deref, DerefMut};
use std::rc::Rc;
use std::collections::HashMap;
use std::collections::hash_map::Entry::{Occupied, Vacant};
// To avoid costly uniqueness checks, we require that `MatchSeq` always has a nonempty body.
@ -263,7 +263,7 @@ pub enum ParseResult<T> {
/// A `ParseResult` where the `Success` variant contains a mapping of `Ident`s to `NamedMatch`es.
/// This represents the mapping of metavars to the token trees they bind to.
pub type NamedParseResult = ParseResult<HashMap<Ident, Rc<NamedMatch>>>;
pub type NamedParseResult = ParseResult<FxHashMap<Ident, Rc<NamedMatch>>>;
/// Count how many metavars are named in the given matcher `ms`.
pub fn count_names(ms: &[TokenTree]) -> usize {
@ -351,7 +351,7 @@ fn nameize<I: Iterator<Item = NamedMatch>>(
sess: &ParseSess,
m: &TokenTree,
res: &mut I,
ret_val: &mut HashMap<Ident, Rc<NamedMatch>>,
ret_val: &mut FxHashMap<Ident, Rc<NamedMatch>>,
) -> Result<(), (syntax_pos::Span, String)> {
match *m {
TokenTree::Sequence(_, ref seq) => for next_m in &seq.tts {
@ -382,7 +382,7 @@ fn nameize<I: Iterator<Item = NamedMatch>>(
Ok(())
}
let mut ret_val = HashMap::new();
let mut ret_val = FxHashMap::default();
for m in ms {
match n_rec(sess, m, res.by_ref(), &mut ret_val) {
Ok(_) => {}

View File

@ -27,8 +27,8 @@ use parse::token::Token::*;
use symbol::Symbol;
use tokenstream::{TokenStream, TokenTree};
use rustc_data_structures::fx::FxHashMap;
use std::borrow::Cow;
use std::collections::HashMap;
use std::collections::hash_map::Entry;
use rustc_data_structures::sync::Lrc;
@ -451,14 +451,14 @@ struct FirstSets {
// If two sequences have the same span in a matcher, then map that
// span to None (invalidating the mapping here and forcing the code to
// use a slow path).
first: HashMap<Span, Option<TokenSet>>,
first: FxHashMap<Span, Option<TokenSet>>,
}
impl FirstSets {
fn new(tts: &[quoted::TokenTree]) -> FirstSets {
use self::quoted::TokenTree;
let mut sets = FirstSets { first: HashMap::new() };
let mut sets = FirstSets { first: FxHashMap::default() };
build_recur(&mut sets, tts);
return sets;

View File

@ -19,11 +19,11 @@ use OneVector;
use syntax_pos::{Span, DUMMY_SP};
use tokenstream::{TokenStream, TokenTree, Delimited};
use std::rc::Rc;
use rustc_data_structures::fx::FxHashMap;
use rustc_data_structures::sync::Lrc;
use std::mem;
use std::ops::Add;
use std::collections::HashMap;
use std::rc::Rc;
// An iterator over the token trees in a delimited token tree (`{ ... }`) or a sequence (`$(...)`).
enum Frame {
@ -67,11 +67,11 @@ impl Iterator for Frame {
/// `src` contains no `TokenTree::{Sequence, MetaVar, MetaVarDecl}`s, `interp` can
/// (and should) be None.
pub fn transcribe(cx: &ExtCtxt,
interp: Option<HashMap<Ident, Rc<NamedMatch>>>,
interp: Option<FxHashMap<Ident, Rc<NamedMatch>>>,
src: Vec<quoted::TokenTree>)
-> TokenStream {
let mut stack: OneVector<Frame> = smallvec![Frame::new(src)];
let interpolations = interp.unwrap_or_else(HashMap::new); /* just a convenience */
let interpolations = interp.unwrap_or_else(FxHashMap::default); /* just a convenience */
let mut repeats = Vec::new();
let mut result: Vec<TokenStream> = Vec::new();
let mut result_stack = Vec::new();
@ -187,7 +187,7 @@ pub fn transcribe(cx: &ExtCtxt,
}
fn lookup_cur_matched(ident: Ident,
interpolations: &HashMap<Ident, Rc<NamedMatch>>,
interpolations: &FxHashMap<Ident, Rc<NamedMatch>>,
repeats: &[(usize, usize)])
-> Option<Rc<NamedMatch>> {
interpolations.get(&ident).map(|matched| {
@ -234,7 +234,7 @@ impl Add for LockstepIterSize {
}
fn lockstep_iter_size(tree: &quoted::TokenTree,
interpolations: &HashMap<Ident, Rc<NamedMatch>>,
interpolations: &FxHashMap<Ident, Rc<NamedMatch>>,
repeats: &[(usize, usize)])
-> LockstepIterSize {
use self::quoted::TokenTree;

View File

@ -1831,10 +1831,10 @@ mod tests {
use errors;
use feature_gate::UnstableFeatures;
use parse::token;
use std::collections::HashSet;
use std::io;
use std::path::PathBuf;
use diagnostics::plugin::ErrorMap;
use rustc_data_structures::fx::FxHashSet;
use rustc_data_structures::sync::Lock;
use with_globals;
fn mk_sess(cm: Lrc<SourceMap>) -> ParseSess {
@ -1845,10 +1845,10 @@ mod tests {
ParseSess {
span_diagnostic: errors::Handler::with_emitter(true, false, Box::new(emitter)),
unstable_features: UnstableFeatures::from_environment(),
config: CrateConfig::new(),
config: CrateConfig::default(),
included_mod_stack: Lock::new(Vec::new()),
code_map: cm,
missing_fragment_specifiers: Lock::new(HashSet::new()),
missing_fragment_specifiers: Lock::new(FxHashSet::default()),
raw_identifier_spans: Lock::new(Vec::new()),
registered_diagnostics: Lock::new(ErrorMap::new()),
non_modrs_mods: Lock::new(vec![]),

View File

@ -24,8 +24,8 @@ use symbol::Symbol;
use tokenstream::{TokenStream, TokenTree};
use diagnostics::plugin::ErrorMap;
use rustc_data_structures::fx::FxHashSet;
use std::borrow::Cow;
use std::collections::HashSet;
use std::iter;
use std::path::{Path, PathBuf};
use std::str;
@ -46,7 +46,7 @@ pub struct ParseSess {
pub span_diagnostic: Handler,
pub unstable_features: UnstableFeatures,
pub config: CrateConfig,
pub missing_fragment_specifiers: Lock<HashSet<Span>>,
pub missing_fragment_specifiers: Lock<FxHashSet<Span>>,
/// Places where raw identifiers were used. This is used for feature gating
/// raw identifiers
pub raw_identifier_spans: Lock<Vec<Span>>,
@ -75,8 +75,8 @@ impl ParseSess {
ParseSess {
span_diagnostic: handler,
unstable_features: UnstableFeatures::from_environment(),
config: HashSet::new(),
missing_fragment_specifiers: Lock::new(HashSet::new()),
config: FxHashSet::default(),
missing_fragment_specifiers: Lock::new(FxHashSet::default()),
raw_identifier_spans: Lock::new(Vec::new()),
registered_diagnostics: Lock::new(ErrorMap::new()),
included_mod_stack: Lock::new(vec![]),

View File

@ -24,9 +24,9 @@ use syntax::tokenstream;
use syntax_pos::{MultiSpan, Span, DUMMY_SP};
use errors::Applicability;
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
use std::borrow::Cow;
use std::collections::hash_map::Entry;
use std::collections::{HashMap, HashSet};
#[derive(PartialEq)]
enum ArgumentType {
@ -65,7 +65,7 @@ struct Context<'a, 'b: 'a> {
/// Unique format specs seen for each argument.
arg_unique_types: Vec<Vec<ArgumentType>>,
/// Map from named arguments to their resolved indices.
names: HashMap<String, usize>,
names: FxHashMap<String, usize>,
/// The latest consecutive literal strings, or empty if there weren't any.
literal: String,
@ -104,7 +104,7 @@ struct Context<'a, 'b: 'a> {
/// * `count_args`: `vec![Exact(0), Exact(5), Exact(3)]`
count_args: Vec<Position>,
/// Relative slot numbers for count arguments.
count_positions: HashMap<usize, usize>,
count_positions: FxHashMap<usize, usize>,
/// Number of count slots assigned.
count_positions_count: usize,
@ -134,9 +134,9 @@ struct Context<'a, 'b: 'a> {
fn parse_args(ecx: &mut ExtCtxt,
sp: Span,
tts: &[tokenstream::TokenTree])
-> Option<(P<ast::Expr>, Vec<P<ast::Expr>>, HashMap<String, usize>)> {
-> Option<(P<ast::Expr>, Vec<P<ast::Expr>>, FxHashMap<String, usize>)> {
let mut args = Vec::<P<ast::Expr>>::new();
let mut names = HashMap::<String, usize>::new();
let mut names = FxHashMap::<String, usize>::default();
let mut p = ecx.new_parser_from_tts(tts);
@ -768,7 +768,7 @@ pub fn expand_preparsed_format_args(ecx: &mut ExtCtxt,
sp: Span,
efmt: P<ast::Expr>,
args: Vec<P<ast::Expr>>,
names: HashMap<String, usize>,
names: FxHashMap<String, usize>,
append_newline: bool)
-> P<ast::Expr> {
// NOTE: this verbose way of initializing `Vec<Vec<ArgumentType>>` is because
@ -852,7 +852,7 @@ pub fn expand_preparsed_format_args(ecx: &mut ExtCtxt,
curpiece: 0,
arg_index_map: Vec::new(),
count_args: Vec::new(),
count_positions: HashMap::new(),
count_positions: FxHashMap::default(),
count_positions_count: 0,
count_args_index_offset: 0,
literal: String::new(),
@ -952,7 +952,7 @@ pub fn expand_preparsed_format_args(ecx: &mut ExtCtxt,
// The set of foreign substitutions we've explained. This prevents spamming the user
// with `%d should be written as {}` over and over again.
let mut explained = HashSet::new();
let mut explained = FxHashSet::default();
macro_rules! check_foreign {
($kind:ident) => {{

View File

@ -21,8 +21,7 @@ use edition::Edition;
use symbol::Symbol;
use serialize::{Encodable, Decodable, Encoder, Decoder};
use std::collections::HashMap;
use rustc_data_structures::fx::FxHashSet;
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
use std::fmt;
/// A SyntaxContext represents a chain of macro expansions (represented by marks).
@ -190,7 +189,7 @@ impl Mark {
crate struct HygieneData {
marks: Vec<MarkData>,
syntax_contexts: Vec<SyntaxContextData>,
markings: HashMap<(SyntaxContext, Mark, Transparency), SyntaxContext>,
markings: FxHashMap<(SyntaxContext, Mark, Transparency), SyntaxContext>,
default_edition: Edition,
}
@ -212,7 +211,7 @@ impl HygieneData {
opaque: SyntaxContext(0),
opaque_and_semitransparent: SyntaxContext(0),
}],
markings: HashMap::new(),
markings: FxHashMap::default(),
default_edition: Edition::Edition2015,
}
}
@ -231,7 +230,7 @@ pub fn set_default_edition(edition: Edition) {
}
pub fn clear_markings() {
HygieneData::with(|data| data.markings = HashMap::new());
HygieneData::with(|data| data.markings = FxHashMap::default());
}
impl SyntaxContext {