1
Fork 0

Rollup merge of #37229 - nnethercote:FxHasher, r=nikomatsakis

Replace FNV with a faster hash function.

Hash table lookups are very hot in rustc profiles and the time taken within `FnvHash` itself is a big part of that. Although FNV is a simple hash, it processes its input one byte at a time. In contrast, Firefox has a homespun hash function that is also simple but works on multiple bytes at a time. So I tried it out and the results are compelling:

```
futures-rs-test  4.326s vs  4.212s --> 1.027x faster (variance: 1.001x, 1.007x)
helloworld       0.233s vs  0.232s --> 1.004x faster (variance: 1.037x, 1.016x)
html5ever-2016-  5.397s vs  5.210s --> 1.036x faster (variance: 1.009x, 1.006x)
hyper.0.5.0      5.018s vs  4.905s --> 1.023x faster (variance: 1.007x, 1.006x)
inflate-0.1.0    4.889s vs  4.872s --> 1.004x faster (variance: 1.012x, 1.007x)
issue-32062-equ  0.347s vs  0.335s --> 1.035x faster (variance: 1.033x, 1.019x)
issue-32278-big  1.717s vs  1.622s --> 1.059x faster (variance: 1.027x, 1.028x)
jld-day15-parse  1.537s vs  1.459s --> 1.054x faster (variance: 1.005x, 1.003x)
piston-image-0. 11.863s vs 11.482s --> 1.033x faster (variance: 1.060x, 1.002x)
regex.0.1.30     2.517s vs  2.453s --> 1.026x faster (variance: 1.011x, 1.013x)
rust-encoding-0  2.080s vs  2.047s --> 1.016x faster (variance: 1.005x, 1.005x)
syntex-0.42.2   32.268s vs 31.275s --> 1.032x faster (variance: 1.014x, 1.022x)
syntex-0.42.2-i 17.629s vs 16.559s --> 1.065x faster (variance: 1.013x, 1.021x)
```

(That's a stage1 compiler doing debug builds. Results for a stage2 compiler are similar.)

The attached commit is not in a state suitable for landing because I changed the implementation of FnvHasher without changing its name (because that would have required touching many lines in the compiler). Nonetheless, it is a good place to start discussions.

Profiles show very clearly that this new hash function is a lot faster to compute than FNV. The quality of the new hash function is less clear -- it seems to do better in some cases and worse in others (judging by the number of instructions executed in `Hash{Map,Set}::get`).

CC @brson, @arthurprs
This commit is contained in:
Eduard-Mihai Burtescu 2016-11-09 20:51:15 +02:00 committed by GitHub
commit dc8ac2679a
92 changed files with 703 additions and 588 deletions

View file

@ -9,7 +9,7 @@
// except according to those terms. // except according to those terms.
use hir::def_id::DefId; use hir::def_id::DefId;
use rustc_data_structures::fnv::FnvHashMap; use rustc_data_structures::fx::FxHashMap;
use std::cell::RefCell; use std::cell::RefCell;
use std::ops::Index; use std::ops::Index;
use std::hash::Hash; use std::hash::Hash;
@ -24,7 +24,7 @@ use super::{DepNode, DepGraph};
pub struct DepTrackingMap<M: DepTrackingMapConfig> { pub struct DepTrackingMap<M: DepTrackingMapConfig> {
phantom: PhantomData<M>, phantom: PhantomData<M>,
graph: DepGraph, graph: DepGraph,
map: FnvHashMap<M::Key, M::Value>, map: FxHashMap<M::Key, M::Value>,
} }
pub trait DepTrackingMapConfig { pub trait DepTrackingMapConfig {
@ -38,7 +38,7 @@ impl<M: DepTrackingMapConfig> DepTrackingMap<M> {
DepTrackingMap { DepTrackingMap {
phantom: PhantomData, phantom: PhantomData,
graph: graph, graph: graph,
map: FnvHashMap() map: FxHashMap()
} }
} }

View file

@ -8,15 +8,15 @@
// option. This file may not be copied, modified, or distributed // option. This file may not be copied, modified, or distributed
// except according to those terms. // except according to those terms.
use rustc_data_structures::fnv::{FnvHashMap, FnvHashSet}; use rustc_data_structures::fx::{FxHashMap, FxHashSet};
use std::fmt::Debug; use std::fmt::Debug;
use std::hash::Hash; use std::hash::Hash;
use super::{DepGraphQuery, DepNode}; use super::{DepGraphQuery, DepNode};
pub struct DepGraphEdges<D: Clone + Debug + Eq + Hash> { pub struct DepGraphEdges<D: Clone + Debug + Eq + Hash> {
nodes: Vec<DepNode<D>>, nodes: Vec<DepNode<D>>,
indices: FnvHashMap<DepNode<D>, IdIndex>, indices: FxHashMap<DepNode<D>, IdIndex>,
edges: FnvHashSet<(IdIndex, IdIndex)>, edges: FxHashSet<(IdIndex, IdIndex)>,
open_nodes: Vec<OpenNode>, open_nodes: Vec<OpenNode>,
} }
@ -46,8 +46,8 @@ impl<D: Clone + Debug + Eq + Hash> DepGraphEdges<D> {
pub fn new() -> DepGraphEdges<D> { pub fn new() -> DepGraphEdges<D> {
DepGraphEdges { DepGraphEdges {
nodes: vec![], nodes: vec![],
indices: FnvHashMap(), indices: FxHashMap(),
edges: FnvHashSet(), edges: FxHashSet(),
open_nodes: Vec::new() open_nodes: Vec::new()
} }
} }

View file

@ -9,7 +9,7 @@
// except according to those terms. // except according to those terms.
use hir::def_id::DefId; use hir::def_id::DefId;
use rustc_data_structures::fnv::FnvHashMap; use rustc_data_structures::fx::FxHashMap;
use session::config::OutputType; use session::config::OutputType;
use std::cell::{Ref, RefCell}; use std::cell::{Ref, RefCell};
use std::rc::Rc; use std::rc::Rc;
@ -34,10 +34,10 @@ struct DepGraphData {
/// things available to us. If we find that they are not dirty, we /// things available to us. If we find that they are not dirty, we
/// load the path to the file storing those work-products here into /// load the path to the file storing those work-products here into
/// this map. We can later look for and extract that data. /// this map. We can later look for and extract that data.
previous_work_products: RefCell<FnvHashMap<Arc<WorkProductId>, WorkProduct>>, previous_work_products: RefCell<FxHashMap<Arc<WorkProductId>, WorkProduct>>,
/// Work-products that we generate in this run. /// Work-products that we generate in this run.
work_products: RefCell<FnvHashMap<Arc<WorkProductId>, WorkProduct>>, work_products: RefCell<FxHashMap<Arc<WorkProductId>, WorkProduct>>,
} }
impl DepGraph { impl DepGraph {
@ -45,8 +45,8 @@ impl DepGraph {
DepGraph { DepGraph {
data: Rc::new(DepGraphData { data: Rc::new(DepGraphData {
thread: DepGraphThreadData::new(enabled), thread: DepGraphThreadData::new(enabled),
previous_work_products: RefCell::new(FnvHashMap()), previous_work_products: RefCell::new(FxHashMap()),
work_products: RefCell::new(FnvHashMap()), work_products: RefCell::new(FxHashMap()),
}) })
} }
} }
@ -117,7 +117,7 @@ impl DepGraph {
/// Access the map of work-products created during this run. Only /// Access the map of work-products created during this run. Only
/// used during saving of the dep-graph. /// used during saving of the dep-graph.
pub fn work_products(&self) -> Ref<FnvHashMap<Arc<WorkProductId>, WorkProduct>> { pub fn work_products(&self) -> Ref<FxHashMap<Arc<WorkProductId>, WorkProduct>> {
self.data.work_products.borrow() self.data.work_products.borrow()
} }
} }

View file

@ -8,7 +8,7 @@
// option. This file may not be copied, modified, or distributed // option. This file may not be copied, modified, or distributed
// except according to those terms. // except according to those terms.
use rustc_data_structures::fnv::FnvHashMap; use rustc_data_structures::fx::FxHashMap;
use rustc_data_structures::graph::{Direction, INCOMING, Graph, NodeIndex, OUTGOING}; use rustc_data_structures::graph::{Direction, INCOMING, Graph, NodeIndex, OUTGOING};
use std::fmt::Debug; use std::fmt::Debug;
use std::hash::Hash; use std::hash::Hash;
@ -17,7 +17,7 @@ use super::DepNode;
pub struct DepGraphQuery<D: Clone + Debug + Hash + Eq> { pub struct DepGraphQuery<D: Clone + Debug + Hash + Eq> {
pub graph: Graph<DepNode<D>, ()>, pub graph: Graph<DepNode<D>, ()>,
pub indices: FnvHashMap<DepNode<D>, NodeIndex>, pub indices: FxHashMap<DepNode<D>, NodeIndex>,
} }
impl<D: Clone + Debug + Hash + Eq> DepGraphQuery<D> { impl<D: Clone + Debug + Hash + Eq> DepGraphQuery<D> {
@ -25,7 +25,7 @@ impl<D: Clone + Debug + Hash + Eq> DepGraphQuery<D> {
edges: &[(DepNode<D>, DepNode<D>)]) edges: &[(DepNode<D>, DepNode<D>)])
-> DepGraphQuery<D> { -> DepGraphQuery<D> {
let mut graph = Graph::new(); let mut graph = Graph::new();
let mut indices = FnvHashMap(); let mut indices = FxHashMap();
for node in nodes { for node in nodes {
indices.insert(node.clone(), graph.next_node_index()); indices.insert(node.clone(), graph.next_node_index());
graph.add_node(node.clone()); graph.add_node(node.clone());

View file

@ -9,7 +9,7 @@
// except according to those terms. // except according to those terms.
use hir::def_id::{CrateNum, DefId, DefIndex, LOCAL_CRATE}; use hir::def_id::{CrateNum, DefId, DefIndex, LOCAL_CRATE};
use rustc_data_structures::fnv::FnvHashMap; use rustc_data_structures::fx::FxHashMap;
use std::fmt::Write; use std::fmt::Write;
use std::hash::{Hash, Hasher}; use std::hash::{Hash, Hasher};
use std::collections::hash_map::DefaultHasher; use std::collections::hash_map::DefaultHasher;
@ -22,7 +22,7 @@ use util::nodemap::NodeMap;
#[derive(Clone)] #[derive(Clone)]
pub struct Definitions { pub struct Definitions {
data: Vec<DefData>, data: Vec<DefData>,
key_map: FnvHashMap<DefKey, DefIndex>, key_map: FxHashMap<DefKey, DefIndex>,
node_map: NodeMap<DefIndex>, node_map: NodeMap<DefIndex>,
} }
@ -219,7 +219,7 @@ impl Definitions {
pub fn new() -> Definitions { pub fn new() -> Definitions {
Definitions { Definitions {
data: vec![], data: vec![],
key_map: FnvHashMap(), key_map: FxHashMap(),
node_map: NodeMap(), node_map: NodeMap(),
} }
} }

View file

@ -33,7 +33,7 @@ pub use self::PathParameters::*;
use hir::def::Def; use hir::def::Def;
use hir::def_id::DefId; use hir::def_id::DefId;
use util::nodemap::{NodeMap, FnvHashSet}; use util::nodemap::{NodeMap, FxHashSet};
use syntax_pos::{mk_sp, Span, ExpnId, DUMMY_SP}; use syntax_pos::{mk_sp, Span, ExpnId, DUMMY_SP};
use syntax::codemap::{self, respan, Spanned}; use syntax::codemap::{self, respan, Spanned};
@ -1605,4 +1605,4 @@ pub type TraitMap = NodeMap<Vec<TraitCandidate>>;
// Map from the NodeId of a glob import to a list of items which are actually // Map from the NodeId of a glob import to a list of items which are actually
// imported. // imported.
pub type GlobMap = NodeMap<FnvHashSet<Name>>; pub type GlobMap = NodeMap<FxHashSet<Name>>;

View file

@ -32,7 +32,7 @@
use ty::{self, Ty, TyCtxt, TypeFoldable}; use ty::{self, Ty, TyCtxt, TypeFoldable};
use ty::fold::TypeFolder; use ty::fold::TypeFolder;
use util::nodemap::FnvHashMap; use util::nodemap::FxHashMap;
use std::collections::hash_map::Entry; use std::collections::hash_map::Entry;
use super::InferCtxt; use super::InferCtxt;
@ -41,7 +41,7 @@ use super::unify_key::ToType;
pub struct TypeFreshener<'a, 'gcx: 'a+'tcx, 'tcx: 'a> { pub struct TypeFreshener<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
infcx: &'a InferCtxt<'a, 'gcx, 'tcx>, infcx: &'a InferCtxt<'a, 'gcx, 'tcx>,
freshen_count: u32, freshen_count: u32,
freshen_map: FnvHashMap<ty::InferTy, Ty<'tcx>>, freshen_map: FxHashMap<ty::InferTy, Ty<'tcx>>,
} }
impl<'a, 'gcx, 'tcx> TypeFreshener<'a, 'gcx, 'tcx> { impl<'a, 'gcx, 'tcx> TypeFreshener<'a, 'gcx, 'tcx> {
@ -50,7 +50,7 @@ impl<'a, 'gcx, 'tcx> TypeFreshener<'a, 'gcx, 'tcx> {
TypeFreshener { TypeFreshener {
infcx: infcx, infcx: infcx,
freshen_count: 0, freshen_count: 0,
freshen_map: FnvHashMap(), freshen_map: FxHashMap(),
} }
} }

View file

@ -24,7 +24,7 @@ use ty::{self, TyCtxt, Binder, TypeFoldable};
use ty::error::TypeError; use ty::error::TypeError;
use ty::relate::{Relate, RelateResult, TypeRelation}; use ty::relate::{Relate, RelateResult, TypeRelation};
use syntax_pos::Span; use syntax_pos::Span;
use util::nodemap::{FnvHashMap, FnvHashSet}; use util::nodemap::{FxHashMap, FxHashSet};
pub struct HrMatchResult<U> { pub struct HrMatchResult<U> {
pub value: U, pub value: U,
@ -135,7 +135,7 @@ impl<'a, 'gcx, 'tcx> CombineFields<'a, 'gcx, 'tcx> {
// Map each skolemized region to a vector of other regions that it // Map each skolemized region to a vector of other regions that it
// must be equated with. (Note that this vector may include other // must be equated with. (Note that this vector may include other
// skolemized regions from `skol_map`.) // skolemized regions from `skol_map`.)
let skol_resolution_map: FnvHashMap<_, _> = let skol_resolution_map: FxHashMap<_, _> =
skol_map skol_map
.iter() .iter()
.map(|(&br, &skol)| { .map(|(&br, &skol)| {
@ -158,7 +158,7 @@ impl<'a, 'gcx, 'tcx> CombineFields<'a, 'gcx, 'tcx> {
// `skol_map`. There should always be a representative if things // `skol_map`. There should always be a representative if things
// are properly well-formed. // are properly well-formed.
let mut unconstrained_regions = vec![]; let mut unconstrained_regions = vec![];
let skol_representatives: FnvHashMap<_, _> = let skol_representatives: FxHashMap<_, _> =
skol_resolution_map skol_resolution_map
.iter() .iter()
.map(|(&skol, &(br, ref regions))| { .map(|(&skol, &(br, ref regions))| {
@ -268,7 +268,7 @@ impl<'a, 'gcx, 'tcx> CombineFields<'a, 'gcx, 'tcx> {
snapshot: &CombinedSnapshot, snapshot: &CombinedSnapshot,
debruijn: ty::DebruijnIndex, debruijn: ty::DebruijnIndex,
new_vars: &[ty::RegionVid], new_vars: &[ty::RegionVid],
a_map: &FnvHashMap<ty::BoundRegion, &'tcx ty::Region>, a_map: &FxHashMap<ty::BoundRegion, &'tcx ty::Region>,
r0: &'tcx ty::Region) r0: &'tcx ty::Region)
-> &'tcx ty::Region { -> &'tcx ty::Region {
// Regions that pre-dated the LUB computation stay as they are. // Regions that pre-dated the LUB computation stay as they are.
@ -364,8 +364,7 @@ impl<'a, 'gcx, 'tcx> CombineFields<'a, 'gcx, 'tcx> {
snapshot: &CombinedSnapshot, snapshot: &CombinedSnapshot,
debruijn: ty::DebruijnIndex, debruijn: ty::DebruijnIndex,
new_vars: &[ty::RegionVid], new_vars: &[ty::RegionVid],
a_map: &FnvHashMap<ty::BoundRegion, a_map: &FxHashMap<ty::BoundRegion, &'tcx ty::Region>,
&'tcx ty::Region>,
a_vars: &[ty::RegionVid], a_vars: &[ty::RegionVid],
b_vars: &[ty::RegionVid], b_vars: &[ty::RegionVid],
r0: &'tcx ty::Region) r0: &'tcx ty::Region)
@ -434,7 +433,7 @@ impl<'a, 'gcx, 'tcx> CombineFields<'a, 'gcx, 'tcx> {
fn rev_lookup<'a, 'gcx, 'tcx>(infcx: &InferCtxt<'a, 'gcx, 'tcx>, fn rev_lookup<'a, 'gcx, 'tcx>(infcx: &InferCtxt<'a, 'gcx, 'tcx>,
span: Span, span: Span,
a_map: &FnvHashMap<ty::BoundRegion, &'tcx ty::Region>, a_map: &FxHashMap<ty::BoundRegion, &'tcx ty::Region>,
r: &'tcx ty::Region) -> &'tcx ty::Region r: &'tcx ty::Region) -> &'tcx ty::Region
{ {
for (a_br, a_r) in a_map { for (a_br, a_r) in a_map {
@ -457,7 +456,7 @@ impl<'a, 'gcx, 'tcx> CombineFields<'a, 'gcx, 'tcx> {
} }
fn var_ids<'a, 'gcx, 'tcx>(fields: &CombineFields<'a, 'gcx, 'tcx>, fn var_ids<'a, 'gcx, 'tcx>(fields: &CombineFields<'a, 'gcx, 'tcx>,
map: &FnvHashMap<ty::BoundRegion, &'tcx ty::Region>) map: &FxHashMap<ty::BoundRegion, &'tcx ty::Region>)
-> Vec<ty::RegionVid> { -> Vec<ty::RegionVid> {
map.iter() map.iter()
.map(|(_, &r)| match *r { .map(|(_, &r)| match *r {
@ -504,7 +503,7 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
snapshot: &CombinedSnapshot, snapshot: &CombinedSnapshot,
r: &'tcx ty::Region, r: &'tcx ty::Region,
directions: TaintDirections) directions: TaintDirections)
-> FnvHashSet<&'tcx ty::Region> { -> FxHashSet<&'tcx ty::Region> {
self.region_vars.tainted(&snapshot.region_vars_snapshot, r, directions) self.region_vars.tainted(&snapshot.region_vars_snapshot, r, directions)
} }
@ -568,7 +567,7 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
let escaping_types = let escaping_types =
self.type_variables.borrow_mut().types_escaping_snapshot(&snapshot.type_snapshot); self.type_variables.borrow_mut().types_escaping_snapshot(&snapshot.type_snapshot);
let mut escaping_region_vars = FnvHashSet(); let mut escaping_region_vars = FxHashSet();
for ty in &escaping_types { for ty in &escaping_types {
self.tcx.collect_regions(ty, &mut escaping_region_vars); self.tcx.collect_regions(ty, &mut escaping_region_vars);
} }
@ -764,7 +763,7 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
// region back to the `ty::BoundRegion` that it originally // region back to the `ty::BoundRegion` that it originally
// represented. Because `leak_check` passed, we know that // represented. Because `leak_check` passed, we know that
// these taint sets are mutually disjoint. // these taint sets are mutually disjoint.
let inv_skol_map: FnvHashMap<&'tcx ty::Region, ty::BoundRegion> = let inv_skol_map: FxHashMap<&'tcx ty::Region, ty::BoundRegion> =
skol_map skol_map
.iter() .iter()
.flat_map(|(&skol_br, &skol)| { .flat_map(|(&skol_br, &skol)| {
@ -837,7 +836,7 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
snapshot: &CombinedSnapshot) snapshot: &CombinedSnapshot)
{ {
debug!("pop_skolemized({:?})", skol_map); debug!("pop_skolemized({:?})", skol_map);
let skol_regions: FnvHashSet<_> = skol_map.values().cloned().collect(); let skol_regions: FxHashSet<_> = skol_map.values().cloned().collect();
self.region_vars.pop_skolemized(&skol_regions, &snapshot.region_vars_snapshot); self.region_vars.pop_skolemized(&skol_regions, &snapshot.region_vars_snapshot);
if !skol_map.is_empty() { if !skol_map.is_empty() {
self.projection_cache.borrow_mut().rollback_skolemized( self.projection_cache.borrow_mut().rollback_skolemized(

View file

@ -39,7 +39,7 @@ use std::fmt;
use syntax::ast; use syntax::ast;
use errors::DiagnosticBuilder; use errors::DiagnosticBuilder;
use syntax_pos::{self, Span, DUMMY_SP}; use syntax_pos::{self, Span, DUMMY_SP};
use util::nodemap::{FnvHashMap, FnvHashSet, NodeMap}; use util::nodemap::{FxHashMap, FxHashSet, NodeMap};
use self::combine::CombineFields; use self::combine::CombineFields;
use self::higher_ranked::HrMatchResult; use self::higher_ranked::HrMatchResult;
@ -134,7 +134,7 @@ pub struct InferCtxt<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
// the set of predicates on which errors have been reported, to // the set of predicates on which errors have been reported, to
// avoid reporting the same error twice. // avoid reporting the same error twice.
pub reported_trait_errors: RefCell<FnvHashSet<traits::TraitErrorKey<'tcx>>>, pub reported_trait_errors: RefCell<FxHashSet<traits::TraitErrorKey<'tcx>>>,
// Sadly, the behavior of projection varies a bit depending on the // Sadly, the behavior of projection varies a bit depending on the
// stage of compilation. The specifics are given in the // stage of compilation. The specifics are given in the
@ -170,7 +170,7 @@ pub struct InferCtxt<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
/// A map returned by `skolemize_late_bound_regions()` indicating the skolemized /// A map returned by `skolemize_late_bound_regions()` indicating the skolemized
/// region that each late-bound region was replaced with. /// region that each late-bound region was replaced with.
pub type SkolemizationMap<'tcx> = FnvHashMap<ty::BoundRegion, &'tcx ty::Region>; pub type SkolemizationMap<'tcx> = FxHashMap<ty::BoundRegion, &'tcx ty::Region>;
/// Why did we require that the two types be related? /// Why did we require that the two types be related?
/// ///
@ -492,7 +492,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'gcx> {
selection_cache: traits::SelectionCache::new(), selection_cache: traits::SelectionCache::new(),
evaluation_cache: traits::EvaluationCache::new(), evaluation_cache: traits::EvaluationCache::new(),
projection_cache: RefCell::new(traits::ProjectionCache::new()), projection_cache: RefCell::new(traits::ProjectionCache::new()),
reported_trait_errors: RefCell::new(FnvHashSet()), reported_trait_errors: RefCell::new(FxHashSet()),
projection_mode: Reveal::NotSpecializable, projection_mode: Reveal::NotSpecializable,
tainted_by_errors_flag: Cell::new(false), tainted_by_errors_flag: Cell::new(false),
err_count_on_creation: self.sess.err_count(), err_count_on_creation: self.sess.err_count(),
@ -531,7 +531,7 @@ impl<'a, 'gcx, 'tcx> InferCtxtBuilder<'a, 'gcx, 'tcx> {
parameter_environment: param_env, parameter_environment: param_env,
selection_cache: traits::SelectionCache::new(), selection_cache: traits::SelectionCache::new(),
evaluation_cache: traits::EvaluationCache::new(), evaluation_cache: traits::EvaluationCache::new(),
reported_trait_errors: RefCell::new(FnvHashSet()), reported_trait_errors: RefCell::new(FxHashSet()),
projection_mode: projection_mode, projection_mode: projection_mode,
tainted_by_errors_flag: Cell::new(false), tainted_by_errors_flag: Cell::new(false),
err_count_on_creation: tcx.sess.err_count(), err_count_on_creation: tcx.sess.err_count(),
@ -1530,7 +1530,7 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
span: Span, span: Span,
lbrct: LateBoundRegionConversionTime, lbrct: LateBoundRegionConversionTime,
value: &ty::Binder<T>) value: &ty::Binder<T>)
-> (T, FnvHashMap<ty::BoundRegion, &'tcx ty::Region>) -> (T, FxHashMap<ty::BoundRegion, &'tcx ty::Region>)
where T : TypeFoldable<'tcx> where T : TypeFoldable<'tcx>
{ {
self.tcx.replace_late_bound_regions( self.tcx.replace_late_bound_regions(

View file

@ -23,7 +23,7 @@ use middle::region::CodeExtent;
use super::Constraint; use super::Constraint;
use infer::SubregionOrigin; use infer::SubregionOrigin;
use infer::region_inference::RegionVarBindings; use infer::region_inference::RegionVarBindings;
use util::nodemap::{FnvHashMap, FnvHashSet}; use util::nodemap::{FxHashMap, FxHashSet};
use std::borrow::Cow; use std::borrow::Cow;
use std::collections::hash_map::Entry::Vacant; use std::collections::hash_map::Entry::Vacant;
@ -122,8 +122,8 @@ pub fn maybe_print_constraints_for<'a, 'gcx, 'tcx>(
struct ConstraintGraph<'a, 'gcx: 'a+'tcx, 'tcx: 'a> { struct ConstraintGraph<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
tcx: TyCtxt<'a, 'gcx, 'tcx>, tcx: TyCtxt<'a, 'gcx, 'tcx>,
graph_name: String, graph_name: String,
map: &'a FnvHashMap<Constraint<'tcx>, SubregionOrigin<'tcx>>, map: &'a FxHashMap<Constraint<'tcx>, SubregionOrigin<'tcx>>,
node_ids: FnvHashMap<Node, usize>, node_ids: FxHashMap<Node, usize>,
} }
#[derive(Clone, Hash, PartialEq, Eq, Debug, Copy)] #[derive(Clone, Hash, PartialEq, Eq, Debug, Copy)]
@ -145,7 +145,7 @@ impl<'a, 'gcx, 'tcx> ConstraintGraph<'a, 'gcx, 'tcx> {
map: &'a ConstraintMap<'tcx>) map: &'a ConstraintMap<'tcx>)
-> ConstraintGraph<'a, 'gcx, 'tcx> { -> ConstraintGraph<'a, 'gcx, 'tcx> {
let mut i = 0; let mut i = 0;
let mut node_ids = FnvHashMap(); let mut node_ids = FxHashMap();
{ {
let mut add_node = |node| { let mut add_node = |node| {
if let Vacant(e) = node_ids.entry(node) { if let Vacant(e) = node_ids.entry(node) {
@ -235,7 +235,7 @@ impl<'a, 'gcx, 'tcx> dot::GraphWalk<'a> for ConstraintGraph<'a, 'gcx, 'tcx> {
type Node = Node; type Node = Node;
type Edge = Edge<'tcx>; type Edge = Edge<'tcx>;
fn nodes(&self) -> dot::Nodes<Node> { fn nodes(&self) -> dot::Nodes<Node> {
let mut set = FnvHashSet(); let mut set = FxHashSet();
for node in self.node_ids.keys() { for node in self.node_ids.keys() {
set.insert(*node); set.insert(*node);
} }
@ -261,7 +261,7 @@ impl<'a, 'gcx, 'tcx> dot::GraphWalk<'a> for ConstraintGraph<'a, 'gcx, 'tcx> {
} }
} }
pub type ConstraintMap<'tcx> = FnvHashMap<Constraint<'tcx>, SubregionOrigin<'tcx>>; pub type ConstraintMap<'tcx> = FxHashMap<Constraint<'tcx>, SubregionOrigin<'tcx>>;
fn dump_region_constraints_to<'a, 'gcx, 'tcx>(tcx: TyCtxt<'a, 'gcx, 'tcx>, fn dump_region_constraints_to<'a, 'gcx, 'tcx>(tcx: TyCtxt<'a, 'gcx, 'tcx>,
map: &ConstraintMap<'tcx>, map: &ConstraintMap<'tcx>,

View file

@ -19,7 +19,7 @@ pub use self::VarValue::*;
use super::{RegionVariableOrigin, SubregionOrigin, MiscVariable}; use super::{RegionVariableOrigin, SubregionOrigin, MiscVariable};
use super::unify_key; use super::unify_key;
use rustc_data_structures::fnv::{FnvHashMap, FnvHashSet}; use rustc_data_structures::fx::{FxHashMap, FxHashSet};
use rustc_data_structures::graph::{self, Direction, NodeIndex, OUTGOING}; use rustc_data_structures::graph::{self, Direction, NodeIndex, OUTGOING};
use rustc_data_structures::unify::{self, UnificationTable}; use rustc_data_structures::unify::{self, UnificationTable};
use middle::free_region::FreeRegionMap; use middle::free_region::FreeRegionMap;
@ -213,7 +213,7 @@ impl SameRegions {
} }
} }
pub type CombineMap<'tcx> = FnvHashMap<TwoRegions<'tcx>, RegionVid>; pub type CombineMap<'tcx> = FxHashMap<TwoRegions<'tcx>, RegionVid>;
pub struct RegionVarBindings<'a, 'gcx: 'a+'tcx, 'tcx: 'a> { pub struct RegionVarBindings<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
tcx: TyCtxt<'a, 'gcx, 'tcx>, tcx: TyCtxt<'a, 'gcx, 'tcx>,
@ -222,7 +222,7 @@ pub struct RegionVarBindings<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
// Constraints of the form `A <= B` introduced by the region // Constraints of the form `A <= B` introduced by the region
// checker. Here at least one of `A` and `B` must be a region // checker. Here at least one of `A` and `B` must be a region
// variable. // variable.
constraints: RefCell<FnvHashMap<Constraint<'tcx>, SubregionOrigin<'tcx>>>, constraints: RefCell<FxHashMap<Constraint<'tcx>, SubregionOrigin<'tcx>>>,
// A "verify" is something that we need to verify after inference is // A "verify" is something that we need to verify after inference is
// done, but which does not directly affect inference in any way. // done, but which does not directly affect inference in any way.
@ -248,7 +248,7 @@ pub struct RegionVarBindings<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
// record the fact that `'a <= 'b` is implied by the fn signature, // record the fact that `'a <= 'b` is implied by the fn signature,
// and then ignore the constraint when solving equations. This is // and then ignore the constraint when solving equations. This is
// a bit of a hack but seems to work. // a bit of a hack but seems to work.
givens: RefCell<FnvHashSet<(ty::FreeRegion, ty::RegionVid)>>, givens: RefCell<FxHashSet<(ty::FreeRegion, ty::RegionVid)>>,
lubs: RefCell<CombineMap<'tcx>>, lubs: RefCell<CombineMap<'tcx>>,
glbs: RefCell<CombineMap<'tcx>>, glbs: RefCell<CombineMap<'tcx>>,
@ -305,14 +305,14 @@ impl TaintDirections {
struct TaintSet<'tcx> { struct TaintSet<'tcx> {
directions: TaintDirections, directions: TaintDirections,
regions: FnvHashSet<&'tcx ty::Region> regions: FxHashSet<&'tcx ty::Region>
} }
impl<'a, 'gcx, 'tcx> TaintSet<'tcx> { impl<'a, 'gcx, 'tcx> TaintSet<'tcx> {
fn new(directions: TaintDirections, fn new(directions: TaintDirections,
initial_region: &'tcx ty::Region) initial_region: &'tcx ty::Region)
-> Self { -> Self {
let mut regions = FnvHashSet(); let mut regions = FxHashSet();
regions.insert(initial_region); regions.insert(initial_region);
TaintSet { directions: directions, regions: regions } TaintSet { directions: directions, regions: regions }
} }
@ -362,7 +362,7 @@ impl<'a, 'gcx, 'tcx> TaintSet<'tcx> {
} }
} }
fn into_set(self) -> FnvHashSet<&'tcx ty::Region> { fn into_set(self) -> FxHashSet<&'tcx ty::Region> {
self.regions self.regions
} }
@ -393,11 +393,11 @@ impl<'a, 'gcx, 'tcx> RegionVarBindings<'a, 'gcx, 'tcx> {
tcx: tcx, tcx: tcx,
var_origins: RefCell::new(Vec::new()), var_origins: RefCell::new(Vec::new()),
values: RefCell::new(None), values: RefCell::new(None),
constraints: RefCell::new(FnvHashMap()), constraints: RefCell::new(FxHashMap()),
verifys: RefCell::new(Vec::new()), verifys: RefCell::new(Vec::new()),
givens: RefCell::new(FnvHashSet()), givens: RefCell::new(FxHashSet()),
lubs: RefCell::new(FnvHashMap()), lubs: RefCell::new(FxHashMap()),
glbs: RefCell::new(FnvHashMap()), glbs: RefCell::new(FxHashMap()),
skolemization_count: Cell::new(0), skolemization_count: Cell::new(0),
bound_count: Cell::new(0), bound_count: Cell::new(0),
undo_log: RefCell::new(Vec::new()), undo_log: RefCell::new(Vec::new()),
@ -547,7 +547,7 @@ impl<'a, 'gcx, 'tcx> RegionVarBindings<'a, 'gcx, 'tcx> {
/// completes to remove all trace of the skolemized regions /// completes to remove all trace of the skolemized regions
/// created in that time. /// created in that time.
pub fn pop_skolemized(&self, pub fn pop_skolemized(&self,
skols: &FnvHashSet<&'tcx ty::Region>, skols: &FxHashSet<&'tcx ty::Region>,
snapshot: &RegionSnapshot) { snapshot: &RegionSnapshot) {
debug!("pop_skolemized_regions(skols={:?})", skols); debug!("pop_skolemized_regions(skols={:?})", skols);
@ -601,7 +601,7 @@ impl<'a, 'gcx, 'tcx> RegionVarBindings<'a, 'gcx, 'tcx> {
self.skolemization_count.set(snapshot.skolemization_count); self.skolemization_count.set(snapshot.skolemization_count);
return; return;
fn kill_constraint<'tcx>(skols: &FnvHashSet<&'tcx ty::Region>, fn kill_constraint<'tcx>(skols: &FxHashSet<&'tcx ty::Region>,
undo_entry: &UndoLogEntry<'tcx>) undo_entry: &UndoLogEntry<'tcx>)
-> bool { -> bool {
match undo_entry { match undo_entry {
@ -905,7 +905,7 @@ impl<'a, 'gcx, 'tcx> RegionVarBindings<'a, 'gcx, 'tcx> {
mark: &RegionSnapshot, mark: &RegionSnapshot,
r0: &'tcx Region, r0: &'tcx Region,
directions: TaintDirections) directions: TaintDirections)
-> FnvHashSet<&'tcx ty::Region> { -> FxHashSet<&'tcx ty::Region> {
debug!("tainted(mark={:?}, r0={:?}, directions={:?})", debug!("tainted(mark={:?}, r0={:?}, directions={:?})",
mark, r0, directions); mark, r0, directions);
@ -1414,13 +1414,13 @@ impl<'a, 'gcx, 'tcx> RegionVarBindings<'a, 'gcx, 'tcx> {
dup_vec: &mut [u32]) dup_vec: &mut [u32])
-> (Vec<RegionAndOrigin<'tcx>>, bool) { -> (Vec<RegionAndOrigin<'tcx>>, bool) {
struct WalkState<'tcx> { struct WalkState<'tcx> {
set: FnvHashSet<RegionVid>, set: FxHashSet<RegionVid>,
stack: Vec<RegionVid>, stack: Vec<RegionVid>,
result: Vec<RegionAndOrigin<'tcx>>, result: Vec<RegionAndOrigin<'tcx>>,
dup_found: bool, dup_found: bool,
} }
let mut state = WalkState { let mut state = WalkState {
set: FnvHashSet(), set: FxHashSet(),
stack: vec![orig_node_idx], stack: vec![orig_node_idx],
result: Vec::new(), result: Vec::new(),
dup_found: false, dup_found: false,

View file

@ -33,7 +33,7 @@ use lint::{Level, LevelSource, Lint, LintId, LintPass, LintSource};
use lint::{EarlyLintPassObject, LateLintPassObject}; use lint::{EarlyLintPassObject, LateLintPassObject};
use lint::{Default, CommandLine, Node, Allow, Warn, Deny, Forbid}; use lint::{Default, CommandLine, Node, Allow, Warn, Deny, Forbid};
use lint::builtin; use lint::builtin;
use util::nodemap::FnvHashMap; use util::nodemap::FxHashMap;
use std::cmp; use std::cmp;
use std::default::Default as StdDefault; use std::default::Default as StdDefault;
@ -64,18 +64,18 @@ pub struct LintStore {
late_passes: Option<Vec<LateLintPassObject>>, late_passes: Option<Vec<LateLintPassObject>>,
/// Lints indexed by name. /// Lints indexed by name.
by_name: FnvHashMap<String, TargetLint>, by_name: FxHashMap<String, TargetLint>,
/// Current levels of each lint, and where they were set. /// Current levels of each lint, and where they were set.
levels: FnvHashMap<LintId, LevelSource>, levels: FxHashMap<LintId, LevelSource>,
/// Map of registered lint groups to what lints they expand to. The bool /// Map of registered lint groups to what lints they expand to. The bool
/// is true if the lint group was added by a plugin. /// is true if the lint group was added by a plugin.
lint_groups: FnvHashMap<&'static str, (Vec<LintId>, bool)>, lint_groups: FxHashMap<&'static str, (Vec<LintId>, bool)>,
/// Extra info for future incompatibility lints, descibing the /// Extra info for future incompatibility lints, descibing the
/// issue or RFC that caused the incompatibility. /// issue or RFC that caused the incompatibility.
future_incompatible: FnvHashMap<LintId, FutureIncompatibleInfo>, future_incompatible: FxHashMap<LintId, FutureIncompatibleInfo>,
/// Maximum level a lint can be /// Maximum level a lint can be
lint_cap: Option<Level>, lint_cap: Option<Level>,
@ -171,10 +171,10 @@ impl LintStore {
lints: vec![], lints: vec![],
early_passes: Some(vec![]), early_passes: Some(vec![]),
late_passes: Some(vec![]), late_passes: Some(vec![]),
by_name: FnvHashMap(), by_name: FxHashMap(),
levels: FnvHashMap(), levels: FxHashMap(),
future_incompatible: FnvHashMap(), future_incompatible: FxHashMap(),
lint_groups: FnvHashMap(), lint_groups: FxHashMap(),
lint_cap: None, lint_cap: None,
} }
} }
@ -304,8 +304,8 @@ impl LintStore {
Err(FindLintError::Removed) => { } Err(FindLintError::Removed) => { }
Err(_) => { Err(_) => {
match self.lint_groups.iter().map(|(&x, pair)| (x, pair.0.clone())) match self.lint_groups.iter().map(|(&x, pair)| (x, pair.0.clone()))
.collect::<FnvHashMap<&'static str, .collect::<FxHashMap<&'static str,
Vec<LintId>>>() Vec<LintId>>>()
.get(&lint_name[..]) { .get(&lint_name[..]) {
Some(v) => { Some(v) => {
v.iter() v.iter()

View file

@ -22,7 +22,7 @@ use ty::{self, TyCtxt};
use hir::def::Def; use hir::def::Def;
use hir::def_id::{DefId}; use hir::def_id::{DefId};
use lint; use lint;
use util::nodemap::FnvHashSet; use util::nodemap::FxHashSet;
use syntax::{ast, codemap}; use syntax::{ast, codemap};
use syntax::attr; use syntax::attr;
@ -48,7 +48,7 @@ fn should_explore<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
struct MarkSymbolVisitor<'a, 'tcx: 'a> { struct MarkSymbolVisitor<'a, 'tcx: 'a> {
worklist: Vec<ast::NodeId>, worklist: Vec<ast::NodeId>,
tcx: TyCtxt<'a, 'tcx, 'tcx>, tcx: TyCtxt<'a, 'tcx, 'tcx>,
live_symbols: Box<FnvHashSet<ast::NodeId>>, live_symbols: Box<FxHashSet<ast::NodeId>>,
struct_has_extern_repr: bool, struct_has_extern_repr: bool,
ignore_non_const_paths: bool, ignore_non_const_paths: bool,
inherited_pub_visibility: bool, inherited_pub_visibility: bool,
@ -61,7 +61,7 @@ impl<'a, 'tcx> MarkSymbolVisitor<'a, 'tcx> {
MarkSymbolVisitor { MarkSymbolVisitor {
worklist: worklist, worklist: worklist,
tcx: tcx, tcx: tcx,
live_symbols: box FnvHashSet(), live_symbols: box FxHashSet(),
struct_has_extern_repr: false, struct_has_extern_repr: false,
ignore_non_const_paths: false, ignore_non_const_paths: false,
inherited_pub_visibility: false, inherited_pub_visibility: false,
@ -163,7 +163,7 @@ impl<'a, 'tcx> MarkSymbolVisitor<'a, 'tcx> {
} }
fn mark_live_symbols(&mut self) { fn mark_live_symbols(&mut self) {
let mut scanned = FnvHashSet(); let mut scanned = FxHashSet();
while !self.worklist.is_empty() { while !self.worklist.is_empty() {
let id = self.worklist.pop().unwrap(); let id = self.worklist.pop().unwrap();
if scanned.contains(&id) { if scanned.contains(&id) {
@ -396,7 +396,7 @@ fn create_and_seed_worklist<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
fn find_live<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, fn find_live<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
access_levels: &privacy::AccessLevels, access_levels: &privacy::AccessLevels,
krate: &hir::Crate) krate: &hir::Crate)
-> Box<FnvHashSet<ast::NodeId>> { -> Box<FxHashSet<ast::NodeId>> {
let worklist = create_and_seed_worklist(tcx, access_levels, krate); let worklist = create_and_seed_worklist(tcx, access_levels, krate);
let mut symbol_visitor = MarkSymbolVisitor::new(tcx, worklist); let mut symbol_visitor = MarkSymbolVisitor::new(tcx, worklist);
symbol_visitor.mark_live_symbols(); symbol_visitor.mark_live_symbols();
@ -414,7 +414,7 @@ fn get_struct_ctor_id(item: &hir::Item) -> Option<ast::NodeId> {
struct DeadVisitor<'a, 'tcx: 'a> { struct DeadVisitor<'a, 'tcx: 'a> {
tcx: TyCtxt<'a, 'tcx, 'tcx>, tcx: TyCtxt<'a, 'tcx, 'tcx>,
live_symbols: Box<FnvHashSet<ast::NodeId>>, live_symbols: Box<FxHashSet<ast::NodeId>>,
} }
impl<'a, 'tcx> DeadVisitor<'a, 'tcx> { impl<'a, 'tcx> DeadVisitor<'a, 'tcx> {

View file

@ -66,7 +66,7 @@ use hir::def_id::CrateNum;
use session; use session;
use session::config; use session::config;
use middle::cstore::LinkagePreference::{self, RequireStatic, RequireDynamic}; use middle::cstore::LinkagePreference::{self, RequireStatic, RequireDynamic};
use util::nodemap::FnvHashMap; use util::nodemap::FxHashMap;
use rustc_back::PanicStrategy; use rustc_back::PanicStrategy;
/// A list of dependencies for a certain crate type. /// A list of dependencies for a certain crate type.
@ -80,7 +80,7 @@ pub type DependencyList = Vec<Linkage>;
/// A mapping of all required dependencies for a particular flavor of output. /// A mapping of all required dependencies for a particular flavor of output.
/// ///
/// This is local to the tcx, and is generally relevant to one session. /// This is local to the tcx, and is generally relevant to one session.
pub type Dependencies = FnvHashMap<config::CrateType, DependencyList>; pub type Dependencies = FxHashMap<config::CrateType, DependencyList>;
#[derive(Copy, Clone, PartialEq, Debug)] #[derive(Copy, Clone, PartialEq, Debug)]
pub enum Linkage { pub enum Linkage {
@ -149,7 +149,7 @@ fn calculate_type(sess: &session::Session,
config::CrateTypeProcMacro => {}, config::CrateTypeProcMacro => {},
} }
let mut formats = FnvHashMap(); let mut formats = FxHashMap();
// Sweep all crates for found dylibs. Add all dylibs, as well as their // Sweep all crates for found dylibs. Add all dylibs, as well as their
// dependencies, ensuring there are no conflicts. The only valid case for a // dependencies, ensuring there are no conflicts. The only valid case for a
@ -240,7 +240,7 @@ fn calculate_type(sess: &session::Session,
fn add_library(sess: &session::Session, fn add_library(sess: &session::Session,
cnum: CrateNum, cnum: CrateNum,
link: LinkagePreference, link: LinkagePreference,
m: &mut FnvHashMap<CrateNum, LinkagePreference>) { m: &mut FxHashMap<CrateNum, LinkagePreference>) {
match m.get(&cnum) { match m.get(&cnum) {
Some(&link2) => { Some(&link2) => {
// If the linkages differ, then we'd have two copies of the library // If the linkages differ, then we'd have two copies of the library

View file

@ -27,7 +27,7 @@ use session::Session;
use hir::def_id::DefId; use hir::def_id::DefId;
use ty; use ty;
use middle::weak_lang_items; use middle::weak_lang_items;
use util::nodemap::FnvHashMap; use util::nodemap::FxHashMap;
use syntax::ast; use syntax::ast;
use syntax::parse::token::InternedString; use syntax::parse::token::InternedString;
@ -146,7 +146,7 @@ struct LanguageItemCollector<'a, 'tcx: 'a> {
session: &'a Session, session: &'a Session,
item_refs: FnvHashMap<&'static str, usize>, item_refs: FxHashMap<&'static str, usize>,
} }
impl<'a, 'v, 'tcx> Visitor<'v> for LanguageItemCollector<'a, 'tcx> { impl<'a, 'v, 'tcx> Visitor<'v> for LanguageItemCollector<'a, 'tcx> {
@ -169,7 +169,7 @@ impl<'a, 'v, 'tcx> Visitor<'v> for LanguageItemCollector<'a, 'tcx> {
impl<'a, 'tcx> LanguageItemCollector<'a, 'tcx> { impl<'a, 'tcx> LanguageItemCollector<'a, 'tcx> {
pub fn new(session: &'a Session, ast_map: &'a hir_map::Map<'tcx>) pub fn new(session: &'a Session, ast_map: &'a hir_map::Map<'tcx>)
-> LanguageItemCollector<'a, 'tcx> { -> LanguageItemCollector<'a, 'tcx> {
let mut item_refs = FnvHashMap(); let mut item_refs = FxHashMap();
$( item_refs.insert($name, $variant as usize); )* $( item_refs.insert($name, $variant as usize); )*

View file

@ -12,7 +12,7 @@
//! outside their scopes. This pass will also generate a set of exported items //! outside their scopes. This pass will also generate a set of exported items
//! which are available for use externally when compiled as a library. //! which are available for use externally when compiled as a library.
use util::nodemap::{DefIdSet, FnvHashMap}; use util::nodemap::{DefIdSet, FxHashMap};
use std::hash::Hash; use std::hash::Hash;
use std::fmt; use std::fmt;
@ -35,7 +35,7 @@ pub enum AccessLevel {
// Accessibility levels for reachable HIR nodes // Accessibility levels for reachable HIR nodes
#[derive(Clone)] #[derive(Clone)]
pub struct AccessLevels<Id = NodeId> { pub struct AccessLevels<Id = NodeId> {
pub map: FnvHashMap<Id, AccessLevel> pub map: FxHashMap<Id, AccessLevel>
} }
impl<Id: Hash + Eq> AccessLevels<Id> { impl<Id: Hash + Eq> AccessLevels<Id> {

View file

@ -22,7 +22,7 @@ use hir::def_id::DefId;
use ty::{self, TyCtxt}; use ty::{self, TyCtxt};
use middle::privacy; use middle::privacy;
use session::config; use session::config;
use util::nodemap::{NodeSet, FnvHashSet}; use util::nodemap::{NodeSet, FxHashSet};
use syntax::abi::Abi; use syntax::abi::Abi;
use syntax::ast; use syntax::ast;
@ -204,7 +204,7 @@ impl<'a, 'tcx> ReachableContext<'a, 'tcx> {
// Step 2: Mark all symbols that the symbols on the worklist touch. // Step 2: Mark all symbols that the symbols on the worklist touch.
fn propagate(&mut self) { fn propagate(&mut self) {
let mut scanned = FnvHashSet(); let mut scanned = FxHashSet();
loop { loop {
let search_item = match self.worklist.pop() { let search_item = match self.worklist.pop() {
Some(item) => item, Some(item) => item,

View file

@ -19,7 +19,7 @@
use dep_graph::DepNode; use dep_graph::DepNode;
use hir::map as ast_map; use hir::map as ast_map;
use session::Session; use session::Session;
use util::nodemap::{FnvHashMap, NodeMap, NodeSet}; use util::nodemap::{FxHashMap, NodeMap, NodeSet};
use ty; use ty;
use std::cell::RefCell; use std::cell::RefCell;
@ -251,7 +251,7 @@ impl CodeExtent {
/// The region maps encode information about region relationships. /// The region maps encode information about region relationships.
pub struct RegionMaps { pub struct RegionMaps {
code_extents: RefCell<Vec<CodeExtentData>>, code_extents: RefCell<Vec<CodeExtentData>>,
code_extent_interner: RefCell<FnvHashMap<CodeExtentData, CodeExtent>>, code_extent_interner: RefCell<FxHashMap<CodeExtentData, CodeExtent>>,
/// `scope_map` maps from a scope id to the enclosing scope id; /// `scope_map` maps from a scope id to the enclosing scope id;
/// this is usually corresponding to the lexical nesting, though /// this is usually corresponding to the lexical nesting, though
/// in the case of closures the parent scope is the innermost /// in the case of closures the parent scope is the innermost
@ -1217,7 +1217,7 @@ pub fn resolve_crate(sess: &Session, map: &ast_map::Map) -> RegionMaps {
let maps = RegionMaps { let maps = RegionMaps {
code_extents: RefCell::new(vec![]), code_extents: RefCell::new(vec![]),
code_extent_interner: RefCell::new(FnvHashMap()), code_extent_interner: RefCell::new(FxHashMap()),
scope_map: RefCell::new(vec![]), scope_map: RefCell::new(vec![]),
var_map: RefCell::new(NodeMap()), var_map: RefCell::new(NodeMap()),
rvalue_scopes: RefCell::new(NodeMap()), rvalue_scopes: RefCell::new(NodeMap()),

View file

@ -31,7 +31,7 @@ use syntax::parse::token::keywords;
use syntax_pos::Span; use syntax_pos::Span;
use util::nodemap::NodeMap; use util::nodemap::NodeMap;
use rustc_data_structures::fnv::FnvHashSet; use rustc_data_structures::fx::FxHashSet;
use hir; use hir;
use hir::print::lifetime_to_string; use hir::print::lifetime_to_string;
use hir::intravisit::{self, Visitor, FnKind}; use hir::intravisit::{self, Visitor, FnKind};
@ -847,13 +847,13 @@ fn insert_late_bound_lifetimes(map: &mut NamedRegionMap,
generics: &hir::Generics) { generics: &hir::Generics) {
debug!("insert_late_bound_lifetimes(decl={:?}, generics={:?})", decl, generics); debug!("insert_late_bound_lifetimes(decl={:?}, generics={:?})", decl, generics);
let mut constrained_by_input = ConstrainedCollector { regions: FnvHashSet() }; let mut constrained_by_input = ConstrainedCollector { regions: FxHashSet() };
for arg in &decl.inputs { for arg in &decl.inputs {
constrained_by_input.visit_ty(&arg.ty); constrained_by_input.visit_ty(&arg.ty);
} }
let mut appears_in_output = AllCollector { let mut appears_in_output = AllCollector {
regions: FnvHashSet(), regions: FxHashSet(),
impl_trait: false impl_trait: false
}; };
intravisit::walk_fn_ret_ty(&mut appears_in_output, &decl.output); intravisit::walk_fn_ret_ty(&mut appears_in_output, &decl.output);
@ -866,7 +866,7 @@ fn insert_late_bound_lifetimes(map: &mut NamedRegionMap,
// Subtle point: because we disallow nested bindings, we can just // Subtle point: because we disallow nested bindings, we can just
// ignore binders here and scrape up all names we see. // ignore binders here and scrape up all names we see.
let mut appears_in_where_clause = AllCollector { let mut appears_in_where_clause = AllCollector {
regions: FnvHashSet(), regions: FxHashSet(),
impl_trait: false impl_trait: false
}; };
for ty_param in generics.ty_params.iter() { for ty_param in generics.ty_params.iter() {
@ -926,7 +926,7 @@ fn insert_late_bound_lifetimes(map: &mut NamedRegionMap,
return; return;
struct ConstrainedCollector { struct ConstrainedCollector {
regions: FnvHashSet<ast::Name>, regions: FxHashSet<ast::Name>,
} }
impl<'v> Visitor<'v> for ConstrainedCollector { impl<'v> Visitor<'v> for ConstrainedCollector {
@ -961,7 +961,7 @@ fn insert_late_bound_lifetimes(map: &mut NamedRegionMap,
} }
struct AllCollector { struct AllCollector {
regions: FnvHashSet<ast::Name>, regions: FxHashSet<ast::Name>,
impl_trait: bool impl_trait: bool
} }

View file

@ -27,7 +27,7 @@ use syntax::ast;
use syntax::ast::{NodeId, Attribute}; use syntax::ast::{NodeId, Attribute};
use syntax::feature_gate::{GateIssue, emit_feature_err, find_lang_feature_accepted_version}; use syntax::feature_gate::{GateIssue, emit_feature_err, find_lang_feature_accepted_version};
use syntax::attr::{self, Stability, Deprecation}; use syntax::attr::{self, Stability, Deprecation};
use util::nodemap::{DefIdMap, FnvHashSet, FnvHashMap}; use util::nodemap::{DefIdMap, FxHashSet, FxHashMap};
use hir; use hir;
use hir::{Item, Generics, StructField, Variant, PatKind}; use hir::{Item, Generics, StructField, Variant, PatKind};
@ -102,7 +102,7 @@ pub struct Index<'tcx> {
depr_map: DefIdMap<Option<DeprecationEntry>>, depr_map: DefIdMap<Option<DeprecationEntry>>,
/// Maps for each crate whether it is part of the staged API. /// Maps for each crate whether it is part of the staged API.
staged_api: FnvHashMap<CrateNum, bool> staged_api: FxHashMap<CrateNum, bool>
} }
// A private tree-walker for producing an Index. // A private tree-walker for producing an Index.
@ -343,7 +343,7 @@ impl<'a, 'tcx> Index<'tcx> {
} }
} }
let mut staged_api = FnvHashMap(); let mut staged_api = FxHashMap();
staged_api.insert(LOCAL_CRATE, is_staged_api); staged_api.insert(LOCAL_CRATE, is_staged_api);
Index { Index {
staged_api: staged_api, staged_api: staged_api,
@ -357,7 +357,7 @@ impl<'a, 'tcx> Index<'tcx> {
/// features and possibly prints errors. Returns a list of all /// features and possibly prints errors. Returns a list of all
/// features used. /// features used.
pub fn check_unstable_api_usage<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) pub fn check_unstable_api_usage<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>)
-> FnvHashMap<InternedString, attr::StabilityLevel> { -> FxHashMap<InternedString, attr::StabilityLevel> {
let _task = tcx.dep_graph.in_task(DepNode::StabilityCheck); let _task = tcx.dep_graph.in_task(DepNode::StabilityCheck);
let ref active_lib_features = tcx.sess.features.borrow().declared_lib_features; let ref active_lib_features = tcx.sess.features.borrow().declared_lib_features;
@ -367,7 +367,7 @@ pub fn check_unstable_api_usage<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>)
let mut checker = Checker { let mut checker = Checker {
tcx: tcx, tcx: tcx,
active_features: active_features, active_features: active_features,
used_features: FnvHashMap(), used_features: FxHashMap(),
in_skip_block: 0, in_skip_block: 0,
}; };
intravisit::walk_crate(&mut checker, tcx.map.krate()); intravisit::walk_crate(&mut checker, tcx.map.krate());
@ -377,8 +377,8 @@ pub fn check_unstable_api_usage<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>)
struct Checker<'a, 'tcx: 'a> { struct Checker<'a, 'tcx: 'a> {
tcx: TyCtxt<'a, 'tcx, 'tcx>, tcx: TyCtxt<'a, 'tcx, 'tcx>,
active_features: FnvHashSet<InternedString>, active_features: FxHashSet<InternedString>,
used_features: FnvHashMap<InternedString, attr::StabilityLevel>, used_features: FxHashMap<InternedString, attr::StabilityLevel>,
// Within a block where feature gate checking can be skipped. // Within a block where feature gate checking can be skipped.
in_skip_block: u32, in_skip_block: u32,
} }
@ -746,10 +746,10 @@ impl<'a, 'tcx> TyCtxt<'a, 'tcx, 'tcx> {
/// were expected to be library features), and the list of features used from /// were expected to be library features), and the list of features used from
/// libraries, identify activated features that don't exist and error about them. /// libraries, identify activated features that don't exist and error about them.
pub fn check_unused_or_stable_features(sess: &Session, pub fn check_unused_or_stable_features(sess: &Session,
lib_features_used: &FnvHashMap<InternedString, lib_features_used: &FxHashMap<InternedString,
attr::StabilityLevel>) { attr::StabilityLevel>) {
let ref declared_lib_features = sess.features.borrow().declared_lib_features; let ref declared_lib_features = sess.features.borrow().declared_lib_features;
let mut remaining_lib_features: FnvHashMap<InternedString, Span> let mut remaining_lib_features: FxHashMap<InternedString, Span>
= declared_lib_features.clone().into_iter().collect(); = declared_lib_features.clone().into_iter().collect();
fn format_stable_since_msg(version: &str) -> String { fn format_stable_since_msg(version: &str) -> String {

View file

@ -17,7 +17,7 @@ use middle::dependency_format;
use session::search_paths::PathKind; use session::search_paths::PathKind;
use session::config::DebugInfoLevel; use session::config::DebugInfoLevel;
use ty::tls; use ty::tls;
use util::nodemap::{NodeMap, FnvHashMap, FnvHashSet}; use util::nodemap::{NodeMap, FxHashMap, FxHashSet};
use util::common::duration_to_secs_str; use util::common::duration_to_secs_str;
use mir::transform as mir_pass; use mir::transform as mir_pass;
@ -78,7 +78,7 @@ pub struct Session {
/// Set of (LintId, span, message) tuples tracking lint (sub)diagnostics /// Set of (LintId, span, message) tuples tracking lint (sub)diagnostics
/// that have been set once, but should not be set again, in order to avoid /// that have been set once, but should not be set again, in order to avoid
/// redundantly verbose output (Issue #24690). /// redundantly verbose output (Issue #24690).
pub one_time_diagnostics: RefCell<FnvHashSet<(lint::LintId, Span, String)>>, pub one_time_diagnostics: RefCell<FxHashSet<(lint::LintId, Span, String)>>,
pub plugin_llvm_passes: RefCell<Vec<String>>, pub plugin_llvm_passes: RefCell<Vec<String>>,
pub mir_passes: RefCell<mir_pass::Passes>, pub mir_passes: RefCell<mir_pass::Passes>,
pub plugin_attributes: RefCell<Vec<(String, AttributeType)>>, pub plugin_attributes: RefCell<Vec<(String, AttributeType)>>,
@ -603,12 +603,12 @@ pub fn build_session_(sopts: config::Options,
working_dir: env::current_dir().unwrap(), working_dir: env::current_dir().unwrap(),
lint_store: RefCell::new(lint::LintStore::new()), lint_store: RefCell::new(lint::LintStore::new()),
lints: RefCell::new(NodeMap()), lints: RefCell::new(NodeMap()),
one_time_diagnostics: RefCell::new(FnvHashSet()), one_time_diagnostics: RefCell::new(FxHashSet()),
plugin_llvm_passes: RefCell::new(Vec::new()), plugin_llvm_passes: RefCell::new(Vec::new()),
mir_passes: RefCell::new(mir_pass::Passes::new()), mir_passes: RefCell::new(mir_pass::Passes::new()),
plugin_attributes: RefCell::new(Vec::new()), plugin_attributes: RefCell::new(Vec::new()),
crate_types: RefCell::new(Vec::new()), crate_types: RefCell::new(Vec::new()),
dependency_formats: RefCell::new(FnvHashMap()), dependency_formats: RefCell::new(FxHashMap()),
crate_disambiguator: RefCell::new(token::intern("").as_str()), crate_disambiguator: RefCell::new(token::intern("").as_str()),
features: RefCell::new(feature_gate::Features::new()), features: RefCell::new(feature_gate::Features::new()),
recursion_limit: Cell::new(64), recursion_limit: Cell::new(64),

View file

@ -33,7 +33,7 @@ use ty::error::ExpectedFound;
use ty::fast_reject; use ty::fast_reject;
use ty::fold::TypeFolder; use ty::fold::TypeFolder;
use ty::subst::Subst; use ty::subst::Subst;
use util::nodemap::{FnvHashMap, FnvHashSet}; use util::nodemap::{FxHashMap, FxHashSet};
use std::cmp; use std::cmp;
use std::fmt; use std::fmt;
@ -252,7 +252,7 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
let generic_map = def.generics.types.iter().map(|param| { let generic_map = def.generics.types.iter().map(|param| {
(param.name.as_str().to_string(), (param.name.as_str().to_string(),
trait_ref.substs.type_for_def(param).to_string()) trait_ref.substs.type_for_def(param).to_string())
}).collect::<FnvHashMap<String, String>>(); }).collect::<FxHashMap<String, String>>();
let parser = Parser::new(&istring); let parser = Parser::new(&istring);
let mut errored = false; let mut errored = false;
let err: String = parser.filter_map(|p| { let err: String = parser.filter_map(|p| {
@ -647,7 +647,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
"the trait `{}` cannot be made into an object", trait_str "the trait `{}` cannot be made into an object", trait_str
)); ));
let mut reported_violations = FnvHashSet(); let mut reported_violations = FxHashSet();
for violation in violations { for violation in violations {
if !reported_violations.insert(violation.clone()) { if !reported_violations.insert(violation.clone()) {
continue; continue;
@ -786,7 +786,7 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
fn predicate_can_apply(&self, pred: ty::PolyTraitRef<'tcx>) -> bool { fn predicate_can_apply(&self, pred: ty::PolyTraitRef<'tcx>) -> bool {
struct ParamToVarFolder<'a, 'gcx: 'a+'tcx, 'tcx: 'a> { struct ParamToVarFolder<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
infcx: &'a InferCtxt<'a, 'gcx, 'tcx>, infcx: &'a InferCtxt<'a, 'gcx, 'tcx>,
var_map: FnvHashMap<Ty<'tcx>, Ty<'tcx>> var_map: FxHashMap<Ty<'tcx>, Ty<'tcx>>
} }
impl<'a, 'gcx, 'tcx> TypeFolder<'gcx, 'tcx> for ParamToVarFolder<'a, 'gcx, 'tcx> { impl<'a, 'gcx, 'tcx> TypeFolder<'gcx, 'tcx> for ParamToVarFolder<'a, 'gcx, 'tcx> {
@ -807,7 +807,7 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
let cleaned_pred = pred.fold_with(&mut ParamToVarFolder { let cleaned_pred = pred.fold_with(&mut ParamToVarFolder {
infcx: self, infcx: self,
var_map: FnvHashMap() var_map: FxHashMap()
}); });
let cleaned_pred = super::project::normalize( let cleaned_pred = super::project::normalize(

View file

@ -18,7 +18,7 @@ use std::marker::PhantomData;
use std::mem; use std::mem;
use syntax::ast; use syntax::ast;
use util::common::ErrorReported; use util::common::ErrorReported;
use util::nodemap::{FnvHashSet, NodeMap}; use util::nodemap::{FxHashSet, NodeMap};
use super::CodeAmbiguity; use super::CodeAmbiguity;
use super::CodeProjectionError; use super::CodeProjectionError;
@ -37,7 +37,7 @@ impl<'tcx> ForestObligation for PendingPredicateObligation<'tcx> {
} }
pub struct GlobalFulfilledPredicates<'tcx> { pub struct GlobalFulfilledPredicates<'tcx> {
set: FnvHashSet<ty::PolyTraitPredicate<'tcx>>, set: FxHashSet<ty::PolyTraitPredicate<'tcx>>,
dep_graph: DepGraph, dep_graph: DepGraph,
} }
@ -673,7 +673,7 @@ fn register_region_obligation<'tcx>(t_a: Ty<'tcx>,
impl<'a, 'gcx, 'tcx> GlobalFulfilledPredicates<'gcx> { impl<'a, 'gcx, 'tcx> GlobalFulfilledPredicates<'gcx> {
pub fn new(dep_graph: DepGraph) -> GlobalFulfilledPredicates<'gcx> { pub fn new(dep_graph: DepGraph) -> GlobalFulfilledPredicates<'gcx> {
GlobalFulfilledPredicates { GlobalFulfilledPredicates {
set: FnvHashSet(), set: FxHashSet(),
dep_graph: dep_graph, dep_graph: dep_graph,
} }
} }

View file

@ -51,7 +51,7 @@ use std::mem;
use std::rc::Rc; use std::rc::Rc;
use syntax::abi::Abi; use syntax::abi::Abi;
use hir; use hir;
use util::nodemap::FnvHashMap; use util::nodemap::FxHashMap;
struct InferredObligationsSnapshotVecDelegate<'tcx> { struct InferredObligationsSnapshotVecDelegate<'tcx> {
phantom: PhantomData<&'tcx i32>, phantom: PhantomData<&'tcx i32>,
@ -104,8 +104,8 @@ struct TraitObligationStack<'prev, 'tcx: 'prev> {
#[derive(Clone)] #[derive(Clone)]
pub struct SelectionCache<'tcx> { pub struct SelectionCache<'tcx> {
hashmap: RefCell<FnvHashMap<ty::TraitRef<'tcx>, hashmap: RefCell<FxHashMap<ty::TraitRef<'tcx>,
SelectionResult<'tcx, SelectionCandidate<'tcx>>>>, SelectionResult<'tcx, SelectionCandidate<'tcx>>>>,
} }
pub enum MethodMatchResult { pub enum MethodMatchResult {
@ -306,7 +306,7 @@ enum EvaluationResult {
#[derive(Clone)] #[derive(Clone)]
pub struct EvaluationCache<'tcx> { pub struct EvaluationCache<'tcx> {
hashmap: RefCell<FnvHashMap<ty::PolyTraitRef<'tcx>, EvaluationResult>> hashmap: RefCell<FxHashMap<ty::PolyTraitRef<'tcx>, EvaluationResult>>
} }
impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> { impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> {
@ -2937,7 +2937,7 @@ impl<'tcx> TraitObligation<'tcx> {
impl<'tcx> SelectionCache<'tcx> { impl<'tcx> SelectionCache<'tcx> {
pub fn new() -> SelectionCache<'tcx> { pub fn new() -> SelectionCache<'tcx> {
SelectionCache { SelectionCache {
hashmap: RefCell::new(FnvHashMap()) hashmap: RefCell::new(FxHashMap())
} }
} }
} }
@ -2945,7 +2945,7 @@ impl<'tcx> SelectionCache<'tcx> {
impl<'tcx> EvaluationCache<'tcx> { impl<'tcx> EvaluationCache<'tcx> {
pub fn new() -> EvaluationCache<'tcx> { pub fn new() -> EvaluationCache<'tcx> {
EvaluationCache { EvaluationCache {
hashmap: RefCell::new(FnvHashMap()) hashmap: RefCell::new(FxHashMap())
} }
} }
} }

View file

@ -20,7 +20,7 @@
use super::{SelectionContext, FulfillmentContext}; use super::{SelectionContext, FulfillmentContext};
use super::util::impl_trait_ref_and_oblig; use super::util::impl_trait_ref_and_oblig;
use rustc_data_structures::fnv::FnvHashMap; use rustc_data_structures::fx::FxHashMap;
use hir::def_id::DefId; use hir::def_id::DefId;
use infer::{InferCtxt, InferOk, TypeOrigin}; use infer::{InferCtxt, InferOk, TypeOrigin};
use middle::region; use middle::region;
@ -270,13 +270,13 @@ fn fulfill_implication<'a, 'gcx, 'tcx>(infcx: &InferCtxt<'a, 'gcx, 'tcx>,
} }
pub struct SpecializesCache { pub struct SpecializesCache {
map: FnvHashMap<(DefId, DefId), bool> map: FxHashMap<(DefId, DefId), bool>
} }
impl SpecializesCache { impl SpecializesCache {
pub fn new() -> Self { pub fn new() -> Self {
SpecializesCache { SpecializesCache {
map: FnvHashMap() map: FxHashMap()
} }
} }

View file

@ -17,7 +17,7 @@ use traits::{self, Reveal};
use ty::{self, TyCtxt, ImplOrTraitItem, TraitDef, TypeFoldable}; use ty::{self, TyCtxt, ImplOrTraitItem, TraitDef, TypeFoldable};
use ty::fast_reject::{self, SimplifiedType}; use ty::fast_reject::{self, SimplifiedType};
use syntax::ast::Name; use syntax::ast::Name;
use util::nodemap::{DefIdMap, FnvHashMap}; use util::nodemap::{DefIdMap, FxHashMap};
/// A per-trait graph of impls in specialization order. At the moment, this /// A per-trait graph of impls in specialization order. At the moment, this
/// graph forms a tree rooted with the trait itself, with all other nodes /// graph forms a tree rooted with the trait itself, with all other nodes
@ -57,7 +57,7 @@ struct Children {
// the specialization graph. // the specialization graph.
/// Impls of the trait. /// Impls of the trait.
nonblanket_impls: FnvHashMap<fast_reject::SimplifiedType, Vec<DefId>>, nonblanket_impls: FxHashMap<fast_reject::SimplifiedType, Vec<DefId>>,
/// Blanket impls associated with the trait. /// Blanket impls associated with the trait.
blanket_impls: Vec<DefId>, blanket_impls: Vec<DefId>,
@ -78,7 +78,7 @@ enum Inserted {
impl<'a, 'gcx, 'tcx> Children { impl<'a, 'gcx, 'tcx> Children {
fn new() -> Children { fn new() -> Children {
Children { Children {
nonblanket_impls: FnvHashMap(), nonblanket_impls: FxHashMap(),
blanket_impls: vec![], blanket_impls: vec![],
} }
} }

View file

@ -13,7 +13,7 @@ use ty::subst::{Subst, Substs};
use ty::{self, Ty, TyCtxt, ToPredicate, ToPolyTraitRef}; use ty::{self, Ty, TyCtxt, ToPredicate, ToPolyTraitRef};
use ty::outlives::Component; use ty::outlives::Component;
use util::common::ErrorReported; use util::common::ErrorReported;
use util::nodemap::FnvHashSet; use util::nodemap::FxHashSet;
use super::{Obligation, ObligationCause, PredicateObligation, SelectionContext, Normalized}; use super::{Obligation, ObligationCause, PredicateObligation, SelectionContext, Normalized};
@ -50,12 +50,12 @@ fn anonymize_predicate<'a, 'gcx, 'tcx>(tcx: TyCtxt<'a, 'gcx, 'tcx>,
struct PredicateSet<'a, 'gcx: 'a+'tcx, 'tcx: 'a> { struct PredicateSet<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
tcx: TyCtxt<'a, 'gcx, 'tcx>, tcx: TyCtxt<'a, 'gcx, 'tcx>,
set: FnvHashSet<ty::Predicate<'tcx>>, set: FxHashSet<ty::Predicate<'tcx>>,
} }
impl<'a, 'gcx, 'tcx> PredicateSet<'a, 'gcx, 'tcx> { impl<'a, 'gcx, 'tcx> PredicateSet<'a, 'gcx, 'tcx> {
fn new(tcx: TyCtxt<'a, 'gcx, 'tcx>) -> PredicateSet<'a, 'gcx, 'tcx> { fn new(tcx: TyCtxt<'a, 'gcx, 'tcx>) -> PredicateSet<'a, 'gcx, 'tcx> {
PredicateSet { tcx: tcx, set: FnvHashSet() } PredicateSet { tcx: tcx, set: FxHashSet() }
} }
fn insert(&mut self, pred: &ty::Predicate<'tcx>) -> bool { fn insert(&mut self, pred: &ty::Predicate<'tcx>) -> bool {
@ -272,7 +272,7 @@ pub fn transitive_bounds<'cx, 'gcx, 'tcx>(tcx: TyCtxt<'cx, 'gcx, 'tcx>,
pub struct SupertraitDefIds<'a, 'gcx: 'a+'tcx, 'tcx: 'a> { pub struct SupertraitDefIds<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
tcx: TyCtxt<'a, 'gcx, 'tcx>, tcx: TyCtxt<'a, 'gcx, 'tcx>,
stack: Vec<DefId>, stack: Vec<DefId>,
visited: FnvHashSet<DefId>, visited: FxHashSet<DefId>,
} }
pub fn supertrait_def_ids<'cx, 'gcx, 'tcx>(tcx: TyCtxt<'cx, 'gcx, 'tcx>, pub fn supertrait_def_ids<'cx, 'gcx, 'tcx>(tcx: TyCtxt<'cx, 'gcx, 'tcx>,

View file

@ -11,7 +11,7 @@
use hir::def_id::{DefId}; use hir::def_id::{DefId};
use ty::{self, Ty, TyCtxt}; use ty::{self, Ty, TyCtxt};
use util::common::MemoizationMap; use util::common::MemoizationMap;
use util::nodemap::FnvHashMap; use util::nodemap::FxHashMap;
use std::fmt; use std::fmt;
use std::ops; use std::ops;
@ -141,11 +141,11 @@ impl fmt::Debug for TypeContents {
impl<'a, 'tcx> ty::TyS<'tcx> { impl<'a, 'tcx> ty::TyS<'tcx> {
pub fn type_contents(&'tcx self, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> TypeContents { pub fn type_contents(&'tcx self, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> TypeContents {
return tcx.tc_cache.memoize(self, || tc_ty(tcx, self, &mut FnvHashMap())); return tcx.tc_cache.memoize(self, || tc_ty(tcx, self, &mut FxHashMap()));
fn tc_ty<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, fn tc_ty<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
ty: Ty<'tcx>, ty: Ty<'tcx>,
cache: &mut FnvHashMap<Ty<'tcx>, TypeContents>) -> TypeContents cache: &mut FxHashMap<Ty<'tcx>, TypeContents>) -> TypeContents
{ {
// Subtle: Note that we are *not* using tcx.tc_cache here but rather a // Subtle: Note that we are *not* using tcx.tc_cache here but rather a
// private cache for this walk. This is needed in the case of cyclic // private cache for this walk. This is needed in the case of cyclic

View file

@ -36,7 +36,7 @@ use ty::layout::{Layout, TargetDataLayout};
use ty::maps; use ty::maps;
use util::common::MemoizationMap; use util::common::MemoizationMap;
use util::nodemap::{NodeMap, NodeSet, DefIdMap, DefIdSet}; use util::nodemap::{NodeMap, NodeSet, DefIdMap, DefIdSet};
use util::nodemap::{FnvHashMap, FnvHashSet}; use util::nodemap::{FxHashMap, FxHashSet};
use rustc_data_structures::accumulate_vec::AccumulateVec; use rustc_data_structures::accumulate_vec::AccumulateVec;
use arena::TypedArena; use arena::TypedArena;
@ -96,26 +96,26 @@ pub struct CtxtInterners<'tcx> {
/// Specifically use a speedy hash algorithm for these hash sets, /// Specifically use a speedy hash algorithm for these hash sets,
/// they're accessed quite often. /// they're accessed quite often.
type_: RefCell<FnvHashSet<Interned<'tcx, TyS<'tcx>>>>, type_: RefCell<FxHashSet<Interned<'tcx, TyS<'tcx>>>>,
type_list: RefCell<FnvHashSet<Interned<'tcx, Slice<Ty<'tcx>>>>>, type_list: RefCell<FxHashSet<Interned<'tcx, Slice<Ty<'tcx>>>>>,
substs: RefCell<FnvHashSet<Interned<'tcx, Substs<'tcx>>>>, substs: RefCell<FxHashSet<Interned<'tcx, Substs<'tcx>>>>,
bare_fn: RefCell<FnvHashSet<Interned<'tcx, BareFnTy<'tcx>>>>, bare_fn: RefCell<FxHashSet<Interned<'tcx, BareFnTy<'tcx>>>>,
region: RefCell<FnvHashSet<Interned<'tcx, Region>>>, region: RefCell<FxHashSet<Interned<'tcx, Region>>>,
stability: RefCell<FnvHashSet<&'tcx attr::Stability>>, stability: RefCell<FxHashSet<&'tcx attr::Stability>>,
layout: RefCell<FnvHashSet<&'tcx Layout>>, layout: RefCell<FxHashSet<&'tcx Layout>>,
} }
impl<'gcx: 'tcx, 'tcx> CtxtInterners<'tcx> { impl<'gcx: 'tcx, 'tcx> CtxtInterners<'tcx> {
fn new(arenas: &'tcx CtxtArenas<'tcx>) -> CtxtInterners<'tcx> { fn new(arenas: &'tcx CtxtArenas<'tcx>) -> CtxtInterners<'tcx> {
CtxtInterners { CtxtInterners {
arenas: arenas, arenas: arenas,
type_: RefCell::new(FnvHashSet()), type_: RefCell::new(FxHashSet()),
type_list: RefCell::new(FnvHashSet()), type_list: RefCell::new(FxHashSet()),
substs: RefCell::new(FnvHashSet()), substs: RefCell::new(FxHashSet()),
bare_fn: RefCell::new(FnvHashSet()), bare_fn: RefCell::new(FxHashSet()),
region: RefCell::new(FnvHashSet()), region: RefCell::new(FxHashSet()),
stability: RefCell::new(FnvHashSet()), stability: RefCell::new(FxHashSet()),
layout: RefCell::new(FnvHashSet()) layout: RefCell::new(FxHashSet())
} }
} }
@ -244,11 +244,11 @@ pub struct Tables<'tcx> {
impl<'a, 'gcx, 'tcx> Tables<'tcx> { impl<'a, 'gcx, 'tcx> Tables<'tcx> {
pub fn empty() -> Tables<'tcx> { pub fn empty() -> Tables<'tcx> {
Tables { Tables {
node_types: FnvHashMap(), node_types: FxHashMap(),
item_substs: NodeMap(), item_substs: NodeMap(),
adjustments: NodeMap(), adjustments: NodeMap(),
method_map: FnvHashMap(), method_map: FxHashMap(),
upvar_capture_map: FnvHashMap(), upvar_capture_map: FxHashMap(),
closure_tys: DefIdMap(), closure_tys: DefIdMap(),
closure_kinds: DefIdMap(), closure_kinds: DefIdMap(),
liberated_fn_sigs: NodeMap(), liberated_fn_sigs: NodeMap(),
@ -451,16 +451,16 @@ pub struct GlobalCtxt<'tcx> {
pub tcache: RefCell<DepTrackingMap<maps::Tcache<'tcx>>>, pub tcache: RefCell<DepTrackingMap<maps::Tcache<'tcx>>>,
// Internal cache for metadata decoding. No need to track deps on this. // Internal cache for metadata decoding. No need to track deps on this.
pub rcache: RefCell<FnvHashMap<ty::CReaderCacheKey, Ty<'tcx>>>, pub rcache: RefCell<FxHashMap<ty::CReaderCacheKey, Ty<'tcx>>>,
// Cache for the type-contents routine. FIXME -- track deps? // Cache for the type-contents routine. FIXME -- track deps?
pub tc_cache: RefCell<FnvHashMap<Ty<'tcx>, ty::contents::TypeContents>>, pub tc_cache: RefCell<FxHashMap<Ty<'tcx>, ty::contents::TypeContents>>,
// FIXME no dep tracking, but we should be able to remove this // FIXME no dep tracking, but we should be able to remove this
pub ty_param_defs: RefCell<NodeMap<ty::TypeParameterDef<'tcx>>>, pub ty_param_defs: RefCell<NodeMap<ty::TypeParameterDef<'tcx>>>,
// FIXME dep tracking -- should be harmless enough // FIXME dep tracking -- should be harmless enough
pub normalized_cache: RefCell<FnvHashMap<Ty<'tcx>, Ty<'tcx>>>, pub normalized_cache: RefCell<FxHashMap<Ty<'tcx>, Ty<'tcx>>>,
pub lang_items: middle::lang_items::LanguageItems, pub lang_items: middle::lang_items::LanguageItems,
@ -571,7 +571,7 @@ pub struct GlobalCtxt<'tcx> {
pub data_layout: TargetDataLayout, pub data_layout: TargetDataLayout,
/// Cache for layouts computed from types. /// Cache for layouts computed from types.
pub layout_cache: RefCell<FnvHashMap<Ty<'tcx>, &'tcx Layout>>, pub layout_cache: RefCell<FxHashMap<Ty<'tcx>, &'tcx Layout>>,
/// Used to prevent layout from recursing too deeply. /// Used to prevent layout from recursing too deeply.
pub layout_depth: Cell<usize>, pub layout_depth: Cell<usize>,
@ -801,7 +801,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
types: common_types, types: common_types,
named_region_map: named_region_map, named_region_map: named_region_map,
region_maps: region_maps, region_maps: region_maps,
free_region_maps: RefCell::new(FnvHashMap()), free_region_maps: RefCell::new(FxHashMap()),
item_variance_map: RefCell::new(DepTrackingMap::new(dep_graph.clone())), item_variance_map: RefCell::new(DepTrackingMap::new(dep_graph.clone())),
variance_computed: Cell::new(false), variance_computed: Cell::new(false),
sess: s, sess: s,
@ -820,13 +820,13 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
freevars: RefCell::new(freevars), freevars: RefCell::new(freevars),
maybe_unused_trait_imports: maybe_unused_trait_imports, maybe_unused_trait_imports: maybe_unused_trait_imports,
tcache: RefCell::new(DepTrackingMap::new(dep_graph.clone())), tcache: RefCell::new(DepTrackingMap::new(dep_graph.clone())),
rcache: RefCell::new(FnvHashMap()), rcache: RefCell::new(FxHashMap()),
tc_cache: RefCell::new(FnvHashMap()), tc_cache: RefCell::new(FxHashMap()),
impl_or_trait_items: RefCell::new(DepTrackingMap::new(dep_graph.clone())), impl_or_trait_items: RefCell::new(DepTrackingMap::new(dep_graph.clone())),
impl_or_trait_item_def_ids: RefCell::new(DepTrackingMap::new(dep_graph.clone())), impl_or_trait_item_def_ids: RefCell::new(DepTrackingMap::new(dep_graph.clone())),
trait_items_cache: RefCell::new(DepTrackingMap::new(dep_graph.clone())), trait_items_cache: RefCell::new(DepTrackingMap::new(dep_graph.clone())),
ty_param_defs: RefCell::new(NodeMap()), ty_param_defs: RefCell::new(NodeMap()),
normalized_cache: RefCell::new(FnvHashMap()), normalized_cache: RefCell::new(FxHashMap()),
lang_items: lang_items, lang_items: lang_items,
inherent_impls: RefCell::new(DepTrackingMap::new(dep_graph.clone())), inherent_impls: RefCell::new(DepTrackingMap::new(dep_graph.clone())),
used_unsafe: RefCell::new(NodeSet()), used_unsafe: RefCell::new(NodeSet()),
@ -846,7 +846,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
fragment_infos: RefCell::new(DefIdMap()), fragment_infos: RefCell::new(DefIdMap()),
crate_name: token::intern_and_get_ident(crate_name), crate_name: token::intern_and_get_ident(crate_name),
data_layout: data_layout, data_layout: data_layout,
layout_cache: RefCell::new(FnvHashMap()), layout_cache: RefCell::new(FxHashMap()),
layout_depth: Cell::new(0), layout_depth: Cell::new(0),
derive_macros: RefCell::new(NodeMap()), derive_macros: RefCell::new(NodeMap()),
}, f) }, f)

View file

@ -45,7 +45,7 @@ use ty::adjustment;
use ty::{self, Binder, Ty, TyCtxt, TypeFlags}; use ty::{self, Binder, Ty, TyCtxt, TypeFlags};
use std::fmt; use std::fmt;
use util::nodemap::{FnvHashMap, FnvHashSet}; use util::nodemap::{FxHashMap, FxHashSet};
/// The TypeFoldable trait is implemented for every type that can be folded. /// The TypeFoldable trait is implemented for every type that can be folded.
/// Basically, every type that has a corresponding method in TypeFolder. /// Basically, every type that has a corresponding method in TypeFolder.
@ -225,7 +225,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
/// whether any late-bound regions were skipped /// whether any late-bound regions were skipped
pub fn collect_regions<T>(self, pub fn collect_regions<T>(self,
value: &T, value: &T,
region_set: &mut FnvHashSet<&'tcx ty::Region>) region_set: &mut FxHashSet<&'tcx ty::Region>)
-> bool -> bool
where T : TypeFoldable<'tcx> where T : TypeFoldable<'tcx>
{ {
@ -319,14 +319,14 @@ struct RegionReplacer<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
tcx: TyCtxt<'a, 'gcx, 'tcx>, tcx: TyCtxt<'a, 'gcx, 'tcx>,
current_depth: u32, current_depth: u32,
fld_r: &'a mut (FnMut(ty::BoundRegion) -> &'tcx ty::Region + 'a), fld_r: &'a mut (FnMut(ty::BoundRegion) -> &'tcx ty::Region + 'a),
map: FnvHashMap<ty::BoundRegion, &'tcx ty::Region> map: FxHashMap<ty::BoundRegion, &'tcx ty::Region>
} }
impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
pub fn replace_late_bound_regions<T,F>(self, pub fn replace_late_bound_regions<T,F>(self,
value: &Binder<T>, value: &Binder<T>,
mut f: F) mut f: F)
-> (T, FnvHashMap<ty::BoundRegion, &'tcx ty::Region>) -> (T, FxHashMap<ty::BoundRegion, &'tcx ty::Region>)
where F : FnMut(ty::BoundRegion) -> &'tcx ty::Region, where F : FnMut(ty::BoundRegion) -> &'tcx ty::Region,
T : TypeFoldable<'tcx>, T : TypeFoldable<'tcx>,
{ {
@ -390,7 +390,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
/// variables and equate `value` with something else, those /// variables and equate `value` with something else, those
/// variables will also be equated. /// variables will also be equated.
pub fn collect_constrained_late_bound_regions<T>(&self, value: &Binder<T>) pub fn collect_constrained_late_bound_regions<T>(&self, value: &Binder<T>)
-> FnvHashSet<ty::BoundRegion> -> FxHashSet<ty::BoundRegion>
where T : TypeFoldable<'tcx> where T : TypeFoldable<'tcx>
{ {
self.collect_late_bound_regions(value, true) self.collect_late_bound_regions(value, true)
@ -398,14 +398,14 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
/// Returns a set of all late-bound regions that appear in `value` anywhere. /// Returns a set of all late-bound regions that appear in `value` anywhere.
pub fn collect_referenced_late_bound_regions<T>(&self, value: &Binder<T>) pub fn collect_referenced_late_bound_regions<T>(&self, value: &Binder<T>)
-> FnvHashSet<ty::BoundRegion> -> FxHashSet<ty::BoundRegion>
where T : TypeFoldable<'tcx> where T : TypeFoldable<'tcx>
{ {
self.collect_late_bound_regions(value, false) self.collect_late_bound_regions(value, false)
} }
fn collect_late_bound_regions<T>(&self, value: &Binder<T>, just_constraint: bool) fn collect_late_bound_regions<T>(&self, value: &Binder<T>, just_constraint: bool)
-> FnvHashSet<ty::BoundRegion> -> FxHashSet<ty::BoundRegion>
where T : TypeFoldable<'tcx> where T : TypeFoldable<'tcx>
{ {
let mut collector = LateBoundRegionsCollector::new(just_constraint); let mut collector = LateBoundRegionsCollector::new(just_constraint);
@ -450,7 +450,7 @@ impl<'a, 'gcx, 'tcx> RegionReplacer<'a, 'gcx, 'tcx> {
tcx: tcx, tcx: tcx,
current_depth: 1, current_depth: 1,
fld_r: fld_r, fld_r: fld_r,
map: FnvHashMap() map: FxHashMap()
} }
} }
} }
@ -650,7 +650,7 @@ impl<'tcx> TypeVisitor<'tcx> for HasTypeFlagsVisitor {
/// Collects all the late-bound regions it finds into a hash set. /// Collects all the late-bound regions it finds into a hash set.
struct LateBoundRegionsCollector { struct LateBoundRegionsCollector {
current_depth: u32, current_depth: u32,
regions: FnvHashSet<ty::BoundRegion>, regions: FxHashSet<ty::BoundRegion>,
just_constrained: bool, just_constrained: bool,
} }
@ -658,7 +658,7 @@ impl LateBoundRegionsCollector {
fn new(just_constrained: bool) -> Self { fn new(just_constrained: bool) -> Self {
LateBoundRegionsCollector { LateBoundRegionsCollector {
current_depth: 1, current_depth: 1,
regions: FnvHashSet(), regions: FxHashSet(),
just_constrained: just_constrained, just_constrained: just_constrained,
} }
} }

View file

@ -31,7 +31,7 @@ use ty::subst::{Subst, Substs};
use ty::walk::TypeWalker; use ty::walk::TypeWalker;
use util::common::MemoizationMap; use util::common::MemoizationMap;
use util::nodemap::NodeSet; use util::nodemap::NodeSet;
use util::nodemap::FnvHashMap; use util::nodemap::FxHashMap;
use serialize::{self, Encodable, Encoder}; use serialize::{self, Encodable, Encoder};
use std::borrow::Cow; use std::borrow::Cow;
@ -418,7 +418,7 @@ impl MethodCall {
// maps from an expression id that corresponds to a method call to the details // maps from an expression id that corresponds to a method call to the details
// of the method to be invoked // of the method to be invoked
pub type MethodMap<'tcx> = FnvHashMap<MethodCall, MethodCallee<'tcx>>; pub type MethodMap<'tcx> = FxHashMap<MethodCall, MethodCallee<'tcx>>;
// Contains information needed to resolve types and (in the future) look up // Contains information needed to resolve types and (in the future) look up
// the types of AST nodes. // the types of AST nodes.
@ -650,7 +650,7 @@ pub struct UpvarBorrow<'tcx> {
pub region: &'tcx ty::Region, pub region: &'tcx ty::Region,
} }
pub type UpvarCaptureMap<'tcx> = FnvHashMap<UpvarId, UpvarCapture<'tcx>>; pub type UpvarCaptureMap<'tcx> = FxHashMap<UpvarId, UpvarCapture<'tcx>>;
#[derive(Copy, Clone)] #[derive(Copy, Clone)]
pub struct ClosureUpvar<'tcx> { pub struct ClosureUpvar<'tcx> {
@ -1251,10 +1251,10 @@ pub struct ParameterEnvironment<'tcx> {
pub free_id_outlive: CodeExtent, pub free_id_outlive: CodeExtent,
/// A cache for `moves_by_default`. /// A cache for `moves_by_default`.
pub is_copy_cache: RefCell<FnvHashMap<Ty<'tcx>, bool>>, pub is_copy_cache: RefCell<FxHashMap<Ty<'tcx>, bool>>,
/// A cache for `type_is_sized` /// A cache for `type_is_sized`
pub is_sized_cache: RefCell<FnvHashMap<Ty<'tcx>, bool>>, pub is_sized_cache: RefCell<FxHashMap<Ty<'tcx>, bool>>,
} }
impl<'a, 'tcx> ParameterEnvironment<'tcx> { impl<'a, 'tcx> ParameterEnvironment<'tcx> {
@ -1267,8 +1267,8 @@ impl<'a, 'tcx> ParameterEnvironment<'tcx> {
implicit_region_bound: self.implicit_region_bound, implicit_region_bound: self.implicit_region_bound,
caller_bounds: caller_bounds, caller_bounds: caller_bounds,
free_id_outlive: self.free_id_outlive, free_id_outlive: self.free_id_outlive,
is_copy_cache: RefCell::new(FnvHashMap()), is_copy_cache: RefCell::new(FxHashMap()),
is_sized_cache: RefCell::new(FnvHashMap()), is_sized_cache: RefCell::new(FxHashMap()),
} }
} }
@ -2752,8 +2752,8 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
caller_bounds: Vec::new(), caller_bounds: Vec::new(),
implicit_region_bound: self.mk_region(ty::ReEmpty), implicit_region_bound: self.mk_region(ty::ReEmpty),
free_id_outlive: free_id_outlive, free_id_outlive: free_id_outlive,
is_copy_cache: RefCell::new(FnvHashMap()), is_copy_cache: RefCell::new(FxHashMap()),
is_sized_cache: RefCell::new(FnvHashMap()), is_sized_cache: RefCell::new(FxHashMap()),
} }
} }
@ -2824,8 +2824,8 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
implicit_region_bound: tcx.mk_region(ty::ReScope(free_id_outlive)), implicit_region_bound: tcx.mk_region(ty::ReScope(free_id_outlive)),
caller_bounds: predicates, caller_bounds: predicates,
free_id_outlive: free_id_outlive, free_id_outlive: free_id_outlive,
is_copy_cache: RefCell::new(FnvHashMap()), is_copy_cache: RefCell::new(FxHashMap()),
is_sized_cache: RefCell::new(FnvHashMap()), is_sized_cache: RefCell::new(FxHashMap()),
}; };
let cause = traits::ObligationCause::misc(span, free_id_outlive.node_id(&self.region_maps)); let cause = traits::ObligationCause::misc(span, free_id_outlive.node_id(&self.region_maps));

View file

@ -16,7 +16,7 @@ use ty::fast_reject;
use ty::{Ty, TyCtxt, TraitRef}; use ty::{Ty, TyCtxt, TraitRef};
use std::cell::{Cell, RefCell}; use std::cell::{Cell, RefCell};
use hir; use hir;
use util::nodemap::FnvHashMap; use util::nodemap::FxHashMap;
/// As `TypeScheme` but for a trait ref. /// As `TypeScheme` but for a trait ref.
pub struct TraitDef<'tcx> { pub struct TraitDef<'tcx> {
@ -55,7 +55,7 @@ pub struct TraitDef<'tcx> {
/// Impls of the trait. /// Impls of the trait.
nonblanket_impls: RefCell< nonblanket_impls: RefCell<
FnvHashMap<fast_reject::SimplifiedType, Vec<DefId>> FxHashMap<fast_reject::SimplifiedType, Vec<DefId>>
>, >,
/// Blanket impls associated with the trait. /// Blanket impls associated with the trait.
@ -84,7 +84,7 @@ impl<'a, 'gcx, 'tcx> TraitDef<'tcx> {
unsafety: unsafety, unsafety: unsafety,
generics: generics, generics: generics,
trait_ref: trait_ref, trait_ref: trait_ref,
nonblanket_impls: RefCell::new(FnvHashMap()), nonblanket_impls: RefCell::new(FxHashMap()),
blanket_impls: RefCell::new(vec![]), blanket_impls: RefCell::new(vec![]),
flags: Cell::new(ty::TraitFlags::NO_TRAIT_FLAGS), flags: Cell::new(ty::TraitFlags::NO_TRAIT_FLAGS),
specialization_graph: RefCell::new(traits::specialization_graph::Graph::new()), specialization_graph: RefCell::new(traits::specialization_graph::Graph::new()),

View file

@ -20,7 +20,7 @@ use ty::{Disr, ParameterEnvironment};
use ty::fold::TypeVisitor; use ty::fold::TypeVisitor;
use ty::layout::{Layout, LayoutError}; use ty::layout::{Layout, LayoutError};
use ty::TypeVariants::*; use ty::TypeVariants::*;
use util::nodemap::FnvHashMap; use util::nodemap::FxHashMap;
use rustc_const_math::{ConstInt, ConstIsize, ConstUsize}; use rustc_const_math::{ConstInt, ConstIsize, ConstUsize};
@ -594,7 +594,7 @@ impl<'a, 'tcx> ty::TyS<'tcx> {
fn impls_bound(&'tcx self, tcx: TyCtxt<'a, 'tcx, 'tcx>, fn impls_bound(&'tcx self, tcx: TyCtxt<'a, 'tcx, 'tcx>,
param_env: &ParameterEnvironment<'tcx>, param_env: &ParameterEnvironment<'tcx>,
bound: ty::BuiltinBound, bound: ty::BuiltinBound,
cache: &RefCell<FnvHashMap<Ty<'tcx>, bool>>, cache: &RefCell<FxHashMap<Ty<'tcx>, bool>>,
span: Span) -> bool span: Span) -> bool
{ {
if self.has_param_types() || self.has_self_ty() { if self.has_param_types() || self.has_self_ty() {

View file

@ -15,17 +15,17 @@
use hir::def_id::DefId; use hir::def_id::DefId;
use syntax::ast; use syntax::ast;
pub use rustc_data_structures::fnv::FnvHashMap; pub use rustc_data_structures::fx::FxHashMap;
pub use rustc_data_structures::fnv::FnvHashSet; pub use rustc_data_structures::fx::FxHashSet;
pub type NodeMap<T> = FnvHashMap<ast::NodeId, T>; pub type NodeMap<T> = FxHashMap<ast::NodeId, T>;
pub type DefIdMap<T> = FnvHashMap<DefId, T>; pub type DefIdMap<T> = FxHashMap<DefId, T>;
pub type NodeSet = FnvHashSet<ast::NodeId>; pub type NodeSet = FxHashSet<ast::NodeId>;
pub type DefIdSet = FnvHashSet<DefId>; pub type DefIdSet = FxHashSet<DefId>;
pub fn NodeMap<T>() -> NodeMap<T> { FnvHashMap() } pub fn NodeMap<T>() -> NodeMap<T> { FxHashMap() }
pub fn DefIdMap<T>() -> DefIdMap<T> { FnvHashMap() } pub fn DefIdMap<T>() -> DefIdMap<T> { FxHashMap() }
pub fn NodeSet() -> NodeSet { FnvHashSet() } pub fn NodeSet() -> NodeSet { FxHashSet() }
pub fn DefIdSet() -> DefIdSet { FnvHashSet() } pub fn DefIdSet() -> DefIdSet { FxHashSet() }

View file

@ -21,7 +21,7 @@ use rustc::mir::*;
use rustc::mir::transform::{Pass, MirPass, MirSource}; use rustc::mir::transform::{Pass, MirPass, MirSource};
use rustc::middle::const_val::ConstVal; use rustc::middle::const_val::ConstVal;
use rustc::middle::lang_items; use rustc::middle::lang_items;
use rustc::util::nodemap::FnvHashMap; use rustc::util::nodemap::FxHashMap;
use rustc_data_structures::indexed_set::IdxSetBuf; use rustc_data_structures::indexed_set::IdxSetBuf;
use rustc_data_structures::indexed_vec::Idx; use rustc_data_structures::indexed_vec::Idx;
use syntax_pos::Span; use syntax_pos::Span;
@ -63,7 +63,7 @@ impl<'tcx> MirPass<'tcx> for ElaborateDrops {
env: &env, env: &env,
flow_inits: flow_inits, flow_inits: flow_inits,
flow_uninits: flow_uninits, flow_uninits: flow_uninits,
drop_flags: FnvHashMap(), drop_flags: FxHashMap(),
patch: MirPatch::new(mir), patch: MirPatch::new(mir),
}.elaborate() }.elaborate()
}; };
@ -118,7 +118,7 @@ struct ElaborateDropsCtxt<'a, 'tcx: 'a> {
env: &'a MoveDataParamEnv<'tcx>, env: &'a MoveDataParamEnv<'tcx>,
flow_inits: DataflowResults<MaybeInitializedLvals<'a, 'tcx>>, flow_inits: DataflowResults<MaybeInitializedLvals<'a, 'tcx>>,
flow_uninits: DataflowResults<MaybeUninitializedLvals<'a, 'tcx>>, flow_uninits: DataflowResults<MaybeUninitializedLvals<'a, 'tcx>>,
drop_flags: FnvHashMap<MovePathIndex, Local>, drop_flags: FxHashMap<MovePathIndex, Local>,
patch: MirPatch<'tcx>, patch: MirPatch<'tcx>,
} }

View file

@ -11,7 +11,7 @@
use rustc::ty::{self, TyCtxt, ParameterEnvironment}; use rustc::ty::{self, TyCtxt, ParameterEnvironment};
use rustc::mir::*; use rustc::mir::*;
use rustc::util::nodemap::FnvHashMap; use rustc::util::nodemap::FxHashMap;
use rustc_data_structures::indexed_vec::{IndexVec}; use rustc_data_structures::indexed_vec::{IndexVec};
use syntax::codemap::DUMMY_SP; use syntax::codemap::DUMMY_SP;
@ -181,7 +181,7 @@ pub struct MovePathLookup<'tcx> {
/// subsequent search so that it is solely relative to that /// subsequent search so that it is solely relative to that
/// base-lvalue). For the remaining lookup, we map the projection /// base-lvalue). For the remaining lookup, we map the projection
/// elem to the associated MovePathIndex. /// elem to the associated MovePathIndex.
projections: FnvHashMap<(MovePathIndex, AbstractElem<'tcx>), MovePathIndex> projections: FxHashMap<(MovePathIndex, AbstractElem<'tcx>), MovePathIndex>
} }
struct MoveDataBuilder<'a, 'tcx: 'a> { struct MoveDataBuilder<'a, 'tcx: 'a> {
@ -215,7 +215,7 @@ impl<'a, 'tcx> MoveDataBuilder<'a, 'tcx> {
locals: mir.local_decls.indices().map(Lvalue::Local).map(|v| { locals: mir.local_decls.indices().map(Lvalue::Local).map(|v| {
Self::new_move_path(&mut move_paths, &mut path_map, None, v) Self::new_move_path(&mut move_paths, &mut path_map, None, v)
}).collect(), }).collect(),
projections: FnvHashMap(), projections: FxHashMap(),
}, },
move_paths: move_paths, move_paths: move_paths,
path_map: path_map, path_map: path_map,

View file

@ -23,7 +23,7 @@ use rustc::middle::expr_use_visitor as euv;
use rustc::middle::expr_use_visitor::MutateMode; use rustc::middle::expr_use_visitor::MutateMode;
use rustc::middle::mem_categorization as mc; use rustc::middle::mem_categorization as mc;
use rustc::ty::{self, TyCtxt}; use rustc::ty::{self, TyCtxt};
use rustc::util::nodemap::{FnvHashMap, NodeSet}; use rustc::util::nodemap::{FxHashMap, NodeSet};
use std::cell::RefCell; use std::cell::RefCell;
use std::rc::Rc; use std::rc::Rc;
@ -41,7 +41,7 @@ pub struct MoveData<'tcx> {
pub paths: RefCell<Vec<MovePath<'tcx>>>, pub paths: RefCell<Vec<MovePath<'tcx>>>,
/// Cache of loan path to move path index, for easy lookup. /// Cache of loan path to move path index, for easy lookup.
pub path_map: RefCell<FnvHashMap<Rc<LoanPath<'tcx>>, MovePathIndex>>, pub path_map: RefCell<FxHashMap<Rc<LoanPath<'tcx>>, MovePathIndex>>,
/// Each move or uninitialized variable gets an entry here. /// Each move or uninitialized variable gets an entry here.
pub moves: RefCell<Vec<Move>>, pub moves: RefCell<Vec<Move>>,
@ -217,7 +217,7 @@ impl<'a, 'tcx> MoveData<'tcx> {
pub fn new() -> MoveData<'tcx> { pub fn new() -> MoveData<'tcx> {
MoveData { MoveData {
paths: RefCell::new(Vec::new()), paths: RefCell::new(Vec::new()),
path_map: RefCell::new(FnvHashMap()), path_map: RefCell::new(FxHashMap()),
moves: RefCell::new(Vec::new()), moves: RefCell::new(Vec::new()),
path_assignments: RefCell::new(Vec::new()), path_assignments: RefCell::new(Vec::new()),
var_assignments: RefCell::new(Vec::new()), var_assignments: RefCell::new(Vec::new()),

View file

@ -17,7 +17,7 @@ use eval::{compare_const_vals};
use rustc_const_math::ConstInt; use rustc_const_math::ConstInt;
use rustc_data_structures::fnv::FnvHashMap; use rustc_data_structures::fx::FxHashMap;
use rustc_data_structures::indexed_vec::Idx; use rustc_data_structures::indexed_vec::Idx;
use pattern::{FieldPattern, Pattern, PatternKind}; use pattern::{FieldPattern, Pattern, PatternKind};
@ -160,7 +160,7 @@ pub struct MatchCheckCtxt<'a, 'tcx: 'a> {
/// associated types to get field types. /// associated types to get field types.
pub wild_pattern: &'a Pattern<'tcx>, pub wild_pattern: &'a Pattern<'tcx>,
pub pattern_arena: &'a TypedArena<Pattern<'tcx>>, pub pattern_arena: &'a TypedArena<Pattern<'tcx>>,
pub byte_array_map: FnvHashMap<*const Pattern<'tcx>, Vec<&'a Pattern<'tcx>>>, pub byte_array_map: FxHashMap<*const Pattern<'tcx>, Vec<&'a Pattern<'tcx>>>,
} }
impl<'a, 'tcx> MatchCheckCtxt<'a, 'tcx> { impl<'a, 'tcx> MatchCheckCtxt<'a, 'tcx> {
@ -181,7 +181,7 @@ impl<'a, 'tcx> MatchCheckCtxt<'a, 'tcx> {
tcx: tcx, tcx: tcx,
wild_pattern: &wild_pattern, wild_pattern: &wild_pattern,
pattern_arena: &pattern_arena, pattern_arena: &pattern_arena,
byte_array_map: FnvHashMap(), byte_array_map: FxHashMap(),
}) })
} }

View file

@ -0,0 +1,115 @@
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::collections::{HashMap, HashSet};
use std::default::Default;
use std::hash::{Hasher, Hash, BuildHasherDefault};
use std::ops::BitXor;
pub type FxHashMap<K, V> = HashMap<K, V, BuildHasherDefault<FxHasher>>;
pub type FxHashSet<V> = HashSet<V, BuildHasherDefault<FxHasher>>;
#[allow(non_snake_case)]
pub fn FxHashMap<K: Hash + Eq, V>() -> FxHashMap<K, V> {
HashMap::default()
}
#[allow(non_snake_case)]
pub fn FxHashSet<V: Hash + Eq>() -> FxHashSet<V> {
HashSet::default()
}
/// A speedy hash algorithm for use within rustc. The hashmap in libcollections
/// by default uses SipHash which isn't quite as speedy as we want. In the
/// compiler we're not really worried about DOS attempts, so we use a fast
/// non-cryptographic hash.
///
/// This is the same as the algorithm used by Firefox -- which is a homespun
/// one not based on any widely-known algorithm -- though modified to produce
/// 64-bit hash values instead of 32-bit hash values. It consistently
/// out-performs an FNV-based hash within rustc itself -- the collision rate is
/// similar or slightly worse than FNV, but the speed of the hash function
/// itself is much higher because it works on up to 8 bytes at a time.
pub struct FxHasher {
hash: usize
}
#[cfg(target_pointer_width = "32")]
const K: usize = 0x9e3779b9;
#[cfg(target_pointer_width = "64")]
const K: usize = 0x517cc1b727220a95;
impl Default for FxHasher {
#[inline]
fn default() -> FxHasher {
FxHasher { hash: 0 }
}
}
impl FxHasher {
#[inline]
fn add_to_hash(&mut self, i: usize) {
self.hash = self.hash.rotate_left(5).bitxor(i).wrapping_mul(K);
}
}
impl Hasher for FxHasher {
#[inline]
fn write(&mut self, bytes: &[u8]) {
for byte in bytes {
let i = *byte;
self.add_to_hash(i as usize);
}
}
#[inline]
fn write_u8(&mut self, i: u8) {
self.add_to_hash(i as usize);
}
#[inline]
fn write_u16(&mut self, i: u16) {
self.add_to_hash(i as usize);
}
#[inline]
fn write_u32(&mut self, i: u32) {
self.add_to_hash(i as usize);
}
#[cfg(target_pointer_width = "32")]
#[inline]
fn write_u64(&mut self, i: u64) {
self.add_to_hash(i as usize);
self.add_to_hash((i >> 32) as usize);
}
#[cfg(target_pointer_width = "64")]
#[inline]
fn write_u64(&mut self, i: u64) {
self.add_to_hash(i as usize);
}
#[inline]
fn write_usize(&mut self, i: usize) {
self.add_to_hash(i);
}
#[inline]
fn finish(&self) -> u64 {
self.hash as u64
}
}
pub fn hash<T: Hash>(v: &T) -> u64 {
let mut state = FxHasher::default();
v.hash(&mut state);
state.finish()
}

View file

@ -60,6 +60,7 @@ pub mod snapshot_vec;
pub mod transitive_relation; pub mod transitive_relation;
pub mod unify; pub mod unify;
pub mod fnv; pub mod fnv;
pub mod fx;
pub mod tuple_slice; pub mod tuple_slice;
pub mod veccell; pub mod veccell;
pub mod control_flow_graph; pub mod control_flow_graph;

View file

@ -15,7 +15,7 @@
//! in the first place). See README.md for a general overview of how //! in the first place). See README.md for a general overview of how
//! to use this class. //! to use this class.
use fnv::{FnvHashMap, FnvHashSet}; use fx::{FxHashMap, FxHashSet};
use std::cell::Cell; use std::cell::Cell;
use std::collections::hash_map::Entry; use std::collections::hash_map::Entry;
@ -68,9 +68,9 @@ pub struct ObligationForest<O: ForestObligation> {
/// backtrace iterator (which uses `split_at`). /// backtrace iterator (which uses `split_at`).
nodes: Vec<Node<O>>, nodes: Vec<Node<O>>,
/// A cache of predicates that have been successfully completed. /// A cache of predicates that have been successfully completed.
done_cache: FnvHashSet<O::Predicate>, done_cache: FxHashSet<O::Predicate>,
/// An cache of the nodes in `nodes`, indexed by predicate. /// An cache of the nodes in `nodes`, indexed by predicate.
waiting_cache: FnvHashMap<O::Predicate, NodeIndex>, waiting_cache: FxHashMap<O::Predicate, NodeIndex>,
/// A list of the obligations added in snapshots, to allow /// A list of the obligations added in snapshots, to allow
/// for their removal. /// for their removal.
cache_list: Vec<O::Predicate>, cache_list: Vec<O::Predicate>,
@ -158,8 +158,8 @@ impl<O: ForestObligation> ObligationForest<O> {
ObligationForest { ObligationForest {
nodes: vec![], nodes: vec![],
snapshots: vec![], snapshots: vec![],
done_cache: FnvHashSet(), done_cache: FxHashSet(),
waiting_cache: FnvHashMap(), waiting_cache: FxHashMap(),
cache_list: vec![], cache_list: vec![],
scratch: Some(vec![]), scratch: Some(vec![]),
} }

View file

@ -8,7 +8,7 @@
// option. This file may not be copied, modified, or distributed // option. This file may not be copied, modified, or distributed
// except according to those terms. // except according to those terms.
use fnv::FnvHashMap; use fx::FxHashMap;
use std::hash::Hash; use std::hash::Hash;
use std::ops; use std::ops;
use std::mem; use std::mem;
@ -19,7 +19,7 @@ mod test;
pub struct SnapshotMap<K, V> pub struct SnapshotMap<K, V>
where K: Hash + Clone + Eq where K: Hash + Clone + Eq
{ {
map: FnvHashMap<K, V>, map: FxHashMap<K, V>,
undo_log: Vec<UndoLog<K, V>>, undo_log: Vec<UndoLog<K, V>>,
} }
@ -40,7 +40,7 @@ impl<K, V> SnapshotMap<K, V>
{ {
pub fn new() -> Self { pub fn new() -> Self {
SnapshotMap { SnapshotMap {
map: FnvHashMap(), map: FxHashMap(),
undo_log: vec![], undo_log: vec![],
} }
} }

View file

@ -48,7 +48,7 @@ use rustc::dep_graph::{DepGraphQuery, DepNode};
use rustc::dep_graph::debug::{DepNodeFilter, EdgeFilter}; use rustc::dep_graph::debug::{DepNodeFilter, EdgeFilter};
use rustc::hir::def_id::DefId; use rustc::hir::def_id::DefId;
use rustc::ty::TyCtxt; use rustc::ty::TyCtxt;
use rustc_data_structures::fnv::FnvHashSet; use rustc_data_structures::fx::FxHashSet;
use rustc_data_structures::graph::{Direction, INCOMING, OUTGOING, NodeIndex}; use rustc_data_structures::graph::{Direction, INCOMING, OUTGOING, NodeIndex};
use rustc::hir; use rustc::hir;
use rustc::hir::intravisit::Visitor; use rustc::hir::intravisit::Visitor;
@ -244,7 +244,7 @@ fn dump_graph(tcx: TyCtxt) {
} }
} }
pub struct GraphvizDepGraph<'q>(FnvHashSet<&'q DepNode<DefId>>, pub struct GraphvizDepGraph<'q>(FxHashSet<&'q DepNode<DefId>>,
Vec<(&'q DepNode<DefId>, &'q DepNode<DefId>)>); Vec<(&'q DepNode<DefId>, &'q DepNode<DefId>)>);
impl<'a, 'tcx, 'q> dot::GraphWalk<'a> for GraphvizDepGraph<'q> { impl<'a, 'tcx, 'q> dot::GraphWalk<'a> for GraphvizDepGraph<'q> {
@ -288,7 +288,7 @@ impl<'a, 'tcx, 'q> dot::Labeller<'a> for GraphvizDepGraph<'q> {
// filter) or the set of nodes whose labels contain all of those // filter) or the set of nodes whose labels contain all of those
// substrings. // substrings.
fn node_set<'q>(query: &'q DepGraphQuery<DefId>, filter: &DepNodeFilter) fn node_set<'q>(query: &'q DepGraphQuery<DefId>, filter: &DepNodeFilter)
-> Option<FnvHashSet<&'q DepNode<DefId>>> -> Option<FxHashSet<&'q DepNode<DefId>>>
{ {
debug!("node_set(filter={:?})", filter); debug!("node_set(filter={:?})", filter);
@ -300,9 +300,9 @@ fn node_set<'q>(query: &'q DepGraphQuery<DefId>, filter: &DepNodeFilter)
} }
fn filter_nodes<'q>(query: &'q DepGraphQuery<DefId>, fn filter_nodes<'q>(query: &'q DepGraphQuery<DefId>,
sources: &Option<FnvHashSet<&'q DepNode<DefId>>>, sources: &Option<FxHashSet<&'q DepNode<DefId>>>,
targets: &Option<FnvHashSet<&'q DepNode<DefId>>>) targets: &Option<FxHashSet<&'q DepNode<DefId>>>)
-> FnvHashSet<&'q DepNode<DefId>> -> FxHashSet<&'q DepNode<DefId>>
{ {
if let &Some(ref sources) = sources { if let &Some(ref sources) = sources {
if let &Some(ref targets) = targets { if let &Some(ref targets) = targets {
@ -318,11 +318,11 @@ fn filter_nodes<'q>(query: &'q DepGraphQuery<DefId>,
} }
fn walk_nodes<'q>(query: &'q DepGraphQuery<DefId>, fn walk_nodes<'q>(query: &'q DepGraphQuery<DefId>,
starts: &FnvHashSet<&'q DepNode<DefId>>, starts: &FxHashSet<&'q DepNode<DefId>>,
direction: Direction) direction: Direction)
-> FnvHashSet<&'q DepNode<DefId>> -> FxHashSet<&'q DepNode<DefId>>
{ {
let mut set = FnvHashSet(); let mut set = FxHashSet();
for &start in starts { for &start in starts {
debug!("walk_nodes: start={:?} outgoing?={:?}", start, direction == OUTGOING); debug!("walk_nodes: start={:?} outgoing?={:?}", start, direction == OUTGOING);
if set.insert(start) { if set.insert(start) {
@ -342,9 +342,9 @@ fn walk_nodes<'q>(query: &'q DepGraphQuery<DefId>,
} }
fn walk_between<'q>(query: &'q DepGraphQuery<DefId>, fn walk_between<'q>(query: &'q DepGraphQuery<DefId>,
sources: &FnvHashSet<&'q DepNode<DefId>>, sources: &FxHashSet<&'q DepNode<DefId>>,
targets: &FnvHashSet<&'q DepNode<DefId>>) targets: &FxHashSet<&'q DepNode<DefId>>)
-> FnvHashSet<&'q DepNode<DefId>> -> FxHashSet<&'q DepNode<DefId>>
{ {
// This is a bit tricky. We want to include a node only if it is: // This is a bit tricky. We want to include a node only if it is:
// (a) reachable from a source and (b) will reach a target. And we // (a) reachable from a source and (b) will reach a target. And we
@ -410,7 +410,7 @@ fn walk_between<'q>(query: &'q DepGraphQuery<DefId>,
} }
fn filter_edges<'q>(query: &'q DepGraphQuery<DefId>, fn filter_edges<'q>(query: &'q DepGraphQuery<DefId>,
nodes: &FnvHashSet<&'q DepNode<DefId>>) nodes: &FxHashSet<&'q DepNode<DefId>>)
-> Vec<(&'q DepNode<DefId>, &'q DepNode<DefId>)> -> Vec<(&'q DepNode<DefId>, &'q DepNode<DefId>)>
{ {
query.edges() query.edges()

View file

@ -35,7 +35,7 @@ use rustc::hir;
use rustc::hir::def_id::{CRATE_DEF_INDEX, DefId}; use rustc::hir::def_id::{CRATE_DEF_INDEX, DefId};
use rustc::hir::intravisit as visit; use rustc::hir::intravisit as visit;
use rustc::ty::TyCtxt; use rustc::ty::TyCtxt;
use rustc_data_structures::fnv::FnvHashMap; use rustc_data_structures::fx::FxHashMap;
use rustc::util::common::record_time; use rustc::util::common::record_time;
use rustc::session::config::DebugInfoLevel::NoDebugInfo; use rustc::session::config::DebugInfoLevel::NoDebugInfo;
@ -51,21 +51,21 @@ mod caching_codemap_view;
pub mod hasher; pub mod hasher;
pub struct IncrementalHashesMap { pub struct IncrementalHashesMap {
hashes: FnvHashMap<DepNode<DefId>, Fingerprint>, hashes: FxHashMap<DepNode<DefId>, Fingerprint>,
// These are the metadata hashes for the current crate as they were stored // These are the metadata hashes for the current crate as they were stored
// during the last compilation session. They are only loaded if // during the last compilation session. They are only loaded if
// -Z query-dep-graph was specified and are needed for auto-tests using // -Z query-dep-graph was specified and are needed for auto-tests using
// the #[rustc_metadata_dirty] and #[rustc_metadata_clean] attributes to // the #[rustc_metadata_dirty] and #[rustc_metadata_clean] attributes to
// check whether some metadata hash has changed in between two revisions. // check whether some metadata hash has changed in between two revisions.
pub prev_metadata_hashes: RefCell<FnvHashMap<DefId, Fingerprint>>, pub prev_metadata_hashes: RefCell<FxHashMap<DefId, Fingerprint>>,
} }
impl IncrementalHashesMap { impl IncrementalHashesMap {
pub fn new() -> IncrementalHashesMap { pub fn new() -> IncrementalHashesMap {
IncrementalHashesMap { IncrementalHashesMap {
hashes: FnvHashMap(), hashes: FxHashMap(),
prev_metadata_hashes: RefCell::new(FnvHashMap()), prev_metadata_hashes: RefCell::new(FxHashMap()),
} }
} }

View file

@ -13,7 +13,7 @@
use rustc::dep_graph::{DepNode, WorkProduct, WorkProductId}; use rustc::dep_graph::{DepNode, WorkProduct, WorkProductId};
use rustc::hir::def_id::DefIndex; use rustc::hir::def_id::DefIndex;
use std::sync::Arc; use std::sync::Arc;
use rustc_data_structures::fnv::FnvHashMap; use rustc_data_structures::fx::FxHashMap;
use ich::Fingerprint; use ich::Fingerprint;
use super::directory::DefPathIndex; use super::directory::DefPathIndex;
@ -106,7 +106,7 @@ pub struct SerializedMetadataHashes {
/// is only populated if -Z query-dep-graph is specified. It will be /// is only populated if -Z query-dep-graph is specified. It will be
/// empty otherwise. Importing crates are perfectly happy with just having /// empty otherwise. Importing crates are perfectly happy with just having
/// the DefIndex. /// the DefIndex.
pub index_map: FnvHashMap<DefIndex, DefPathIndex> pub index_map: FxHashMap<DefIndex, DefPathIndex>
} }
/// The hash for some metadata that (when saving) will be exported /// The hash for some metadata that (when saving) will be exported

View file

@ -47,7 +47,7 @@ use rustc::hir;
use rustc::hir::def_id::DefId; use rustc::hir::def_id::DefId;
use rustc::hir::intravisit::Visitor; use rustc::hir::intravisit::Visitor;
use syntax::ast::{self, Attribute, NestedMetaItem}; use syntax::ast::{self, Attribute, NestedMetaItem};
use rustc_data_structures::fnv::{FnvHashSet, FnvHashMap}; use rustc_data_structures::fx::{FxHashSet, FxHashMap};
use syntax::parse::token::InternedString; use syntax::parse::token::InternedString;
use syntax_pos::Span; use syntax_pos::Span;
use rustc::ty::TyCtxt; use rustc::ty::TyCtxt;
@ -67,7 +67,7 @@ pub fn check_dirty_clean_annotations<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
} }
let _ignore = tcx.dep_graph.in_ignore(); let _ignore = tcx.dep_graph.in_ignore();
let dirty_inputs: FnvHashSet<DepNode<DefId>> = let dirty_inputs: FxHashSet<DepNode<DefId>> =
dirty_inputs.iter() dirty_inputs.iter()
.filter_map(|d| retraced.map(d)) .filter_map(|d| retraced.map(d))
.collect(); .collect();
@ -84,7 +84,7 @@ pub fn check_dirty_clean_annotations<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
pub struct DirtyCleanVisitor<'a, 'tcx:'a> { pub struct DirtyCleanVisitor<'a, 'tcx:'a> {
tcx: TyCtxt<'a, 'tcx, 'tcx>, tcx: TyCtxt<'a, 'tcx, 'tcx>,
query: &'a DepGraphQuery<DefId>, query: &'a DepGraphQuery<DefId>,
dirty_inputs: FnvHashSet<DepNode<DefId>>, dirty_inputs: FxHashSet<DepNode<DefId>>,
} }
impl<'a, 'tcx> DirtyCleanVisitor<'a, 'tcx> { impl<'a, 'tcx> DirtyCleanVisitor<'a, 'tcx> {
@ -187,8 +187,8 @@ impl<'a, 'tcx> Visitor<'tcx> for DirtyCleanVisitor<'a, 'tcx> {
} }
pub fn check_dirty_clean_metadata<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, pub fn check_dirty_clean_metadata<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
prev_metadata_hashes: &FnvHashMap<DefId, Fingerprint>, prev_metadata_hashes: &FxHashMap<DefId, Fingerprint>,
current_metadata_hashes: &FnvHashMap<DefId, Fingerprint>) { current_metadata_hashes: &FxHashMap<DefId, Fingerprint>) {
if !tcx.sess.opts.debugging_opts.query_dep_graph { if !tcx.sess.opts.debugging_opts.query_dep_graph {
return; return;
} }
@ -205,8 +205,8 @@ pub fn check_dirty_clean_metadata<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
pub struct DirtyCleanMetadataVisitor<'a, 'tcx:'a, 'm> { pub struct DirtyCleanMetadataVisitor<'a, 'tcx:'a, 'm> {
tcx: TyCtxt<'a, 'tcx, 'tcx>, tcx: TyCtxt<'a, 'tcx, 'tcx>,
prev_metadata_hashes: &'m FnvHashMap<DefId, Fingerprint>, prev_metadata_hashes: &'m FxHashMap<DefId, Fingerprint>,
current_metadata_hashes: &'m FnvHashMap<DefId, Fingerprint>, current_metadata_hashes: &'m FxHashMap<DefId, Fingerprint>,
} }
impl<'a, 'tcx, 'm> Visitor<'tcx> for DirtyCleanMetadataVisitor<'a, 'tcx, 'm> { impl<'a, 'tcx, 'm> Visitor<'tcx> for DirtyCleanMetadataVisitor<'a, 'tcx, 'm> {

View file

@ -120,7 +120,7 @@ use rustc::session::Session;
use rustc::ty::TyCtxt; use rustc::ty::TyCtxt;
use rustc::util::fs as fs_util; use rustc::util::fs as fs_util;
use rustc_data_structures::flock; use rustc_data_structures::flock;
use rustc_data_structures::fnv::{FnvHashSet, FnvHashMap}; use rustc_data_structures::fx::{FxHashSet, FxHashMap};
use std::ffi::OsString; use std::ffi::OsString;
use std::fs as std_fs; use std::fs as std_fs;
@ -195,7 +195,7 @@ pub fn prepare_session_directory(tcx: TyCtxt) -> Result<bool, ()> {
debug!("crate-dir: {}", crate_dir.display()); debug!("crate-dir: {}", crate_dir.display());
try!(create_dir(tcx.sess, &crate_dir, "crate")); try!(create_dir(tcx.sess, &crate_dir, "crate"));
let mut source_directories_already_tried = FnvHashSet(); let mut source_directories_already_tried = FxHashSet();
loop { loop {
// Generate a session directory of the form: // Generate a session directory of the form:
@ -490,7 +490,7 @@ fn delete_session_dir_lock_file(sess: &Session,
/// Find the most recent published session directory that is not in the /// Find the most recent published session directory that is not in the
/// ignore-list. /// ignore-list.
fn find_source_directory(crate_dir: &Path, fn find_source_directory(crate_dir: &Path,
source_directories_already_tried: &FnvHashSet<PathBuf>) source_directories_already_tried: &FxHashSet<PathBuf>)
-> Option<PathBuf> { -> Option<PathBuf> {
let iter = crate_dir.read_dir() let iter = crate_dir.read_dir()
.unwrap() // FIXME .unwrap() // FIXME
@ -500,7 +500,7 @@ fn find_source_directory(crate_dir: &Path,
} }
fn find_source_directory_in_iter<I>(iter: I, fn find_source_directory_in_iter<I>(iter: I,
source_directories_already_tried: &FnvHashSet<PathBuf>) source_directories_already_tried: &FxHashSet<PathBuf>)
-> Option<PathBuf> -> Option<PathBuf>
where I: Iterator<Item=PathBuf> where I: Iterator<Item=PathBuf>
{ {
@ -704,8 +704,8 @@ pub fn garbage_collect_session_directories(sess: &Session) -> io::Result<()> {
// First do a pass over the crate directory, collecting lock files and // First do a pass over the crate directory, collecting lock files and
// session directories // session directories
let mut session_directories = FnvHashSet(); let mut session_directories = FxHashSet();
let mut lock_files = FnvHashSet(); let mut lock_files = FxHashSet();
for dir_entry in try!(crate_directory.read_dir()) { for dir_entry in try!(crate_directory.read_dir()) {
let dir_entry = match dir_entry { let dir_entry = match dir_entry {
@ -731,7 +731,7 @@ pub fn garbage_collect_session_directories(sess: &Session) -> io::Result<()> {
} }
// Now map from lock files to session directories // Now map from lock files to session directories
let lock_file_to_session_dir: FnvHashMap<String, Option<String>> = let lock_file_to_session_dir: FxHashMap<String, Option<String>> =
lock_files.into_iter() lock_files.into_iter()
.map(|lock_file_name| { .map(|lock_file_name| {
assert!(lock_file_name.ends_with(LOCK_FILE_EXT)); assert!(lock_file_name.ends_with(LOCK_FILE_EXT));
@ -774,7 +774,7 @@ pub fn garbage_collect_session_directories(sess: &Session) -> io::Result<()> {
} }
// Filter out `None` directories // Filter out `None` directories
let lock_file_to_session_dir: FnvHashMap<String, String> = let lock_file_to_session_dir: FxHashMap<String, String> =
lock_file_to_session_dir.into_iter() lock_file_to_session_dir.into_iter()
.filter_map(|(lock_file_name, directory_name)| { .filter_map(|(lock_file_name, directory_name)| {
directory_name.map(|n| (lock_file_name, n)) directory_name.map(|n| (lock_file_name, n))
@ -898,7 +898,7 @@ pub fn garbage_collect_session_directories(sess: &Session) -> io::Result<()> {
} }
fn all_except_most_recent(deletion_candidates: Vec<(SystemTime, PathBuf, Option<flock::Lock>)>) fn all_except_most_recent(deletion_candidates: Vec<(SystemTime, PathBuf, Option<flock::Lock>)>)
-> FnvHashMap<PathBuf, Option<flock::Lock>> { -> FxHashMap<PathBuf, Option<flock::Lock>> {
let most_recent = deletion_candidates.iter() let most_recent = deletion_candidates.iter()
.map(|&(timestamp, ..)| timestamp) .map(|&(timestamp, ..)| timestamp)
.max(); .max();
@ -909,7 +909,7 @@ fn all_except_most_recent(deletion_candidates: Vec<(SystemTime, PathBuf, Option<
.map(|(_, path, lock)| (path, lock)) .map(|(_, path, lock)| (path, lock))
.collect() .collect()
} else { } else {
FnvHashMap() FxHashMap()
} }
} }
@ -946,19 +946,19 @@ fn test_all_except_most_recent() {
(UNIX_EPOCH + Duration::new(5, 0), PathBuf::from("5"), None), (UNIX_EPOCH + Duration::new(5, 0), PathBuf::from("5"), None),
(UNIX_EPOCH + Duration::new(3, 0), PathBuf::from("3"), None), (UNIX_EPOCH + Duration::new(3, 0), PathBuf::from("3"), None),
(UNIX_EPOCH + Duration::new(2, 0), PathBuf::from("2"), None), (UNIX_EPOCH + Duration::new(2, 0), PathBuf::from("2"), None),
]).keys().cloned().collect::<FnvHashSet<PathBuf>>(), ]).keys().cloned().collect::<FxHashSet<PathBuf>>(),
vec![ vec![
PathBuf::from("1"), PathBuf::from("1"),
PathBuf::from("2"), PathBuf::from("2"),
PathBuf::from("3"), PathBuf::from("3"),
PathBuf::from("4"), PathBuf::from("4"),
].into_iter().collect::<FnvHashSet<PathBuf>>() ].into_iter().collect::<FxHashSet<PathBuf>>()
); );
assert_eq!(all_except_most_recent( assert_eq!(all_except_most_recent(
vec![ vec![
]).keys().cloned().collect::<FnvHashSet<PathBuf>>(), ]).keys().cloned().collect::<FxHashSet<PathBuf>>(),
FnvHashSet() FxHashSet()
); );
} }
@ -973,7 +973,7 @@ fn test_timestamp_serialization() {
#[test] #[test]
fn test_find_source_directory_in_iter() { fn test_find_source_directory_in_iter() {
let already_visited = FnvHashSet(); let already_visited = FxHashSet();
// Find newest // Find newest
assert_eq!(find_source_directory_in_iter( assert_eq!(find_source_directory_in_iter(

View file

@ -12,7 +12,7 @@ use rustc::dep_graph::DepNode;
use rustc::hir::def_id::{CrateNum, DefId}; use rustc::hir::def_id::{CrateNum, DefId};
use rustc::hir::svh::Svh; use rustc::hir::svh::Svh;
use rustc::ty::TyCtxt; use rustc::ty::TyCtxt;
use rustc_data_structures::fnv::FnvHashMap; use rustc_data_structures::fx::FxHashMap;
use rustc_data_structures::flock; use rustc_data_structures::flock;
use rustc_serialize::Decodable; use rustc_serialize::Decodable;
use rustc_serialize::opaque::Decoder; use rustc_serialize::opaque::Decoder;
@ -26,8 +26,8 @@ use super::file_format;
pub struct HashContext<'a, 'tcx: 'a> { pub struct HashContext<'a, 'tcx: 'a> {
pub tcx: TyCtxt<'a, 'tcx, 'tcx>, pub tcx: TyCtxt<'a, 'tcx, 'tcx>,
incremental_hashes_map: &'a IncrementalHashesMap, incremental_hashes_map: &'a IncrementalHashesMap,
item_metadata_hashes: FnvHashMap<DefId, Fingerprint>, item_metadata_hashes: FxHashMap<DefId, Fingerprint>,
crate_hashes: FnvHashMap<CrateNum, Svh>, crate_hashes: FxHashMap<CrateNum, Svh>,
} }
impl<'a, 'tcx> HashContext<'a, 'tcx> { impl<'a, 'tcx> HashContext<'a, 'tcx> {
@ -37,8 +37,8 @@ impl<'a, 'tcx> HashContext<'a, 'tcx> {
HashContext { HashContext {
tcx: tcx, tcx: tcx,
incremental_hashes_map: incremental_hashes_map, incremental_hashes_map: incremental_hashes_map,
item_metadata_hashes: FnvHashMap(), item_metadata_hashes: FxHashMap(),
crate_hashes: FnvHashMap(), crate_hashes: FxHashMap(),
} }
} }

View file

@ -15,7 +15,7 @@ use rustc::hir::def_id::DefId;
use rustc::hir::svh::Svh; use rustc::hir::svh::Svh;
use rustc::session::Session; use rustc::session::Session;
use rustc::ty::TyCtxt; use rustc::ty::TyCtxt;
use rustc_data_structures::fnv::{FnvHashSet, FnvHashMap}; use rustc_data_structures::fx::{FxHashSet, FxHashMap};
use rustc_serialize::Decodable as RustcDecodable; use rustc_serialize::Decodable as RustcDecodable;
use rustc_serialize::opaque::Decoder; use rustc_serialize::opaque::Decoder;
use std::fs; use std::fs;
@ -30,7 +30,7 @@ use super::hash::*;
use super::fs::*; use super::fs::*;
use super::file_format; use super::file_format;
pub type DirtyNodes = FnvHashSet<DepNode<DefPathIndex>>; pub type DirtyNodes = FxHashSet<DepNode<DefPathIndex>>;
/// If we are in incremental mode, and a previous dep-graph exists, /// If we are in incremental mode, and a previous dep-graph exists,
/// then load up those nodes/edges that are still valid into the /// then load up those nodes/edges that are still valid into the
@ -183,7 +183,7 @@ pub fn decode_dep_graph<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
// Compute which work-products have an input that has changed or // Compute which work-products have an input that has changed or
// been removed. Put the dirty ones into a set. // been removed. Put the dirty ones into a set.
let mut dirty_target_nodes = FnvHashSet(); let mut dirty_target_nodes = FxHashSet();
for &(raw_source_node, ref target_node) in &retraced_edges { for &(raw_source_node, ref target_node) in &retraced_edges {
if dirty_raw_source_nodes.contains(raw_source_node) { if dirty_raw_source_nodes.contains(raw_source_node) {
if !dirty_target_nodes.contains(target_node) { if !dirty_target_nodes.contains(target_node) {
@ -239,7 +239,7 @@ fn dirty_nodes<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
retraced: &RetracedDefIdDirectory) retraced: &RetracedDefIdDirectory)
-> DirtyNodes { -> DirtyNodes {
let mut hcx = HashContext::new(tcx, incremental_hashes_map); let mut hcx = HashContext::new(tcx, incremental_hashes_map);
let mut dirty_nodes = FnvHashSet(); let mut dirty_nodes = FxHashSet();
for hash in serialized_hashes { for hash in serialized_hashes {
if let Some(dep_node) = retraced.map(&hash.dep_node) { if let Some(dep_node) = retraced.map(&hash.dep_node) {
@ -270,7 +270,7 @@ fn dirty_nodes<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
/// otherwise no longer applicable. /// otherwise no longer applicable.
fn reconcile_work_products<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, fn reconcile_work_products<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
work_products: Vec<SerializedWorkProduct>, work_products: Vec<SerializedWorkProduct>,
dirty_target_nodes: &FnvHashSet<DepNode<DefId>>) { dirty_target_nodes: &FxHashSet<DepNode<DefId>>) {
debug!("reconcile_work_products({:?})", work_products); debug!("reconcile_work_products({:?})", work_products);
for swp in work_products { for swp in work_products {
if dirty_target_nodes.contains(&DepNode::WorkProduct(swp.id.clone())) { if dirty_target_nodes.contains(&DepNode::WorkProduct(swp.id.clone())) {
@ -314,7 +314,7 @@ fn delete_dirty_work_product(tcx: TyCtxt,
fn load_prev_metadata_hashes(tcx: TyCtxt, fn load_prev_metadata_hashes(tcx: TyCtxt,
retraced: &RetracedDefIdDirectory, retraced: &RetracedDefIdDirectory,
output: &mut FnvHashMap<DefId, Fingerprint>) { output: &mut FxHashMap<DefId, Fingerprint>) {
if !tcx.sess.opts.debugging_opts.query_dep_graph { if !tcx.sess.opts.debugging_opts.query_dep_graph {
return return
} }

View file

@ -10,7 +10,7 @@
use rustc::dep_graph::{DepGraphQuery, DepNode}; use rustc::dep_graph::{DepGraphQuery, DepNode};
use rustc::hir::def_id::DefId; use rustc::hir::def_id::DefId;
use rustc_data_structures::fnv::FnvHashMap; use rustc_data_structures::fx::FxHashMap;
use rustc_data_structures::graph::{DepthFirstTraversal, INCOMING, NodeIndex}; use rustc_data_structures::graph::{DepthFirstTraversal, INCOMING, NodeIndex};
use super::hash::*; use super::hash::*;
@ -23,11 +23,11 @@ pub struct Predecessors<'query> {
// nodes. // nodes.
// - Values: transitive predecessors of the key that are hashable // - Values: transitive predecessors of the key that are hashable
// (e.g., HIR nodes, input meta-data nodes) // (e.g., HIR nodes, input meta-data nodes)
pub inputs: FnvHashMap<&'query DepNode<DefId>, Vec<&'query DepNode<DefId>>>, pub inputs: FxHashMap<&'query DepNode<DefId>, Vec<&'query DepNode<DefId>>>,
// - Keys: some hashable node // - Keys: some hashable node
// - Values: the hash thereof // - Values: the hash thereof
pub hashes: FnvHashMap<&'query DepNode<DefId>, Fingerprint>, pub hashes: FxHashMap<&'query DepNode<DefId>, Fingerprint>,
} }
impl<'q> Predecessors<'q> { impl<'q> Predecessors<'q> {
@ -37,7 +37,7 @@ impl<'q> Predecessors<'q> {
let all_nodes = query.graph.all_nodes(); let all_nodes = query.graph.all_nodes();
let tcx = hcx.tcx; let tcx = hcx.tcx;
let inputs: FnvHashMap<_, _> = all_nodes.iter() let inputs: FxHashMap<_, _> = all_nodes.iter()
.enumerate() .enumerate()
.filter(|&(_, node)| match node.data { .filter(|&(_, node)| match node.data {
DepNode::WorkProduct(_) => true, DepNode::WorkProduct(_) => true,
@ -60,7 +60,7 @@ impl<'q> Predecessors<'q> {
}) })
.collect(); .collect();
let mut hashes = FnvHashMap(); let mut hashes = FxHashMap();
for input in inputs.values().flat_map(|v| v.iter().cloned()) { for input in inputs.values().flat_map(|v| v.iter().cloned()) {
hashes.entry(input) hashes.entry(input)
.or_insert_with(|| hcx.hash(input).unwrap()); .or_insert_with(|| hcx.hash(input).unwrap());

View file

@ -13,7 +13,7 @@ use rustc::hir::def_id::DefId;
use rustc::hir::svh::Svh; use rustc::hir::svh::Svh;
use rustc::session::Session; use rustc::session::Session;
use rustc::ty::TyCtxt; use rustc::ty::TyCtxt;
use rustc_data_structures::fnv::FnvHashMap; use rustc_data_structures::fx::FxHashMap;
use rustc_serialize::Encodable as RustcEncodable; use rustc_serialize::Encodable as RustcEncodable;
use rustc_serialize::opaque::Encoder; use rustc_serialize::opaque::Encoder;
use std::hash::Hash; use std::hash::Hash;
@ -46,7 +46,7 @@ pub fn save_dep_graph<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
let query = tcx.dep_graph.query(); let query = tcx.dep_graph.query();
let mut hcx = HashContext::new(tcx, incremental_hashes_map); let mut hcx = HashContext::new(tcx, incremental_hashes_map);
let preds = Predecessors::new(&query, &mut hcx); let preds = Predecessors::new(&query, &mut hcx);
let mut current_metadata_hashes = FnvHashMap(); let mut current_metadata_hashes = FxHashMap();
// IMPORTANT: We are saving the metadata hashes *before* the dep-graph, // IMPORTANT: We are saving the metadata hashes *before* the dep-graph,
// since metadata-encoding might add new entries to the // since metadata-encoding might add new entries to the
@ -186,7 +186,7 @@ pub fn encode_metadata_hashes(tcx: TyCtxt,
svh: Svh, svh: Svh,
preds: &Predecessors, preds: &Predecessors,
builder: &mut DefIdDirectoryBuilder, builder: &mut DefIdDirectoryBuilder,
current_metadata_hashes: &mut FnvHashMap<DefId, Fingerprint>, current_metadata_hashes: &mut FxHashMap<DefId, Fingerprint>,
encoder: &mut Encoder) encoder: &mut Encoder)
-> io::Result<()> { -> io::Result<()> {
// For each `MetaData(X)` node where `X` is local, accumulate a // For each `MetaData(X)` node where `X` is local, accumulate a
@ -198,10 +198,10 @@ pub fn encode_metadata_hashes(tcx: TyCtxt,
// (I initially wrote this with an iterator, but it seemed harder to read.) // (I initially wrote this with an iterator, but it seemed harder to read.)
let mut serialized_hashes = SerializedMetadataHashes { let mut serialized_hashes = SerializedMetadataHashes {
hashes: vec![], hashes: vec![],
index_map: FnvHashMap() index_map: FxHashMap()
}; };
let mut def_id_hashes = FnvHashMap(); let mut def_id_hashes = FxHashMap();
for (&target, sources) in &preds.inputs { for (&target, sources) in &preds.inputs {
let def_id = match *target { let def_id = match *target {

View file

@ -18,7 +18,7 @@ use rustc::traits::Reveal;
use middle::const_val::ConstVal; use middle::const_val::ConstVal;
use rustc_const_eval::eval_const_expr_partial; use rustc_const_eval::eval_const_expr_partial;
use rustc_const_eval::EvalHint::ExprTypeChecked; use rustc_const_eval::EvalHint::ExprTypeChecked;
use util::nodemap::FnvHashSet; use util::nodemap::FxHashSet;
use lint::{LateContext, LintContext, LintArray}; use lint::{LateContext, LintContext, LintArray};
use lint::{LintPass, LateLintPass}; use lint::{LintPass, LateLintPass};
@ -428,7 +428,7 @@ fn is_repr_nullable_ptr<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
impl<'a, 'tcx> ImproperCTypesVisitor<'a, 'tcx> { impl<'a, 'tcx> ImproperCTypesVisitor<'a, 'tcx> {
/// Check if the given type is "ffi-safe" (has a stable, well-defined /// Check if the given type is "ffi-safe" (has a stable, well-defined
/// representation which can be exported to C code). /// representation which can be exported to C code).
fn check_type_for_ffi(&self, cache: &mut FnvHashSet<Ty<'tcx>>, ty: Ty<'tcx>) -> FfiResult { fn check_type_for_ffi(&self, cache: &mut FxHashSet<Ty<'tcx>>, ty: Ty<'tcx>) -> FfiResult {
use self::FfiResult::*; use self::FfiResult::*;
let cx = self.cx.tcx; let cx = self.cx.tcx;
@ -639,7 +639,7 @@ impl<'a, 'tcx> ImproperCTypesVisitor<'a, 'tcx> {
// any generic types right now: // any generic types right now:
let ty = self.cx.tcx.normalize_associated_type(&ty); let ty = self.cx.tcx.normalize_associated_type(&ty);
match self.check_type_for_ffi(&mut FnvHashSet(), ty) { match self.check_type_for_ffi(&mut FxHashSet(), ty) {
FfiResult::FfiSafe => {} FfiResult::FfiSafe => {}
FfiResult::FfiUnsafe(s) => { FfiResult::FfiUnsafe(s) => {
self.cx.span_lint(IMPROPER_CTYPES, sp, s); self.cx.span_lint(IMPROPER_CTYPES, sp, s);

View file

@ -11,7 +11,7 @@
use rustc::hir::pat_util; use rustc::hir::pat_util;
use rustc::ty; use rustc::ty;
use rustc::ty::adjustment; use rustc::ty::adjustment;
use util::nodemap::FnvHashMap; use util::nodemap::FxHashMap;
use lint::{LateContext, EarlyContext, LintContext, LintArray}; use lint::{LateContext, EarlyContext, LintContext, LintArray};
use lint::{LintPass, EarlyLintPass, LateLintPass}; use lint::{LintPass, EarlyLintPass, LateLintPass};
@ -42,7 +42,7 @@ impl UnusedMut {
// collect all mutable pattern and group their NodeIDs by their Identifier to // collect all mutable pattern and group their NodeIDs by their Identifier to
// avoid false warnings in match arms with multiple patterns // avoid false warnings in match arms with multiple patterns
let mut mutables = FnvHashMap(); let mut mutables = FxHashMap();
for p in pats { for p in pats {
pat_util::pat_bindings(p, |mode, id, _, path1| { pat_util::pat_bindings(p, |mode, id, _, path1| {
let name = path1.node; let name = path1.node;

View file

@ -22,7 +22,7 @@ use rustc_back::PanicStrategy;
use rustc::session::search_paths::PathKind; use rustc::session::search_paths::PathKind;
use rustc::middle; use rustc::middle;
use rustc::middle::cstore::{CrateStore, validate_crate_name, ExternCrate}; use rustc::middle::cstore::{CrateStore, validate_crate_name, ExternCrate};
use rustc::util::nodemap::{FnvHashMap, FnvHashSet}; use rustc::util::nodemap::{FxHashMap, FxHashSet};
use rustc::hir::map::Definitions; use rustc::hir::map::Definitions;
use std::cell::{RefCell, Cell}; use std::cell::{RefCell, Cell};
@ -50,7 +50,7 @@ pub struct CrateLoader<'a> {
pub sess: &'a Session, pub sess: &'a Session,
cstore: &'a CStore, cstore: &'a CStore,
next_crate_num: CrateNum, next_crate_num: CrateNum,
foreign_item_map: FnvHashMap<String, Vec<ast::NodeId>>, foreign_item_map: FxHashMap<String, Vec<ast::NodeId>>,
local_crate_name: String, local_crate_name: String,
} }
@ -148,7 +148,7 @@ impl<'a> CrateLoader<'a> {
sess: sess, sess: sess,
cstore: cstore, cstore: cstore,
next_crate_num: cstore.next_crate_num(), next_crate_num: cstore.next_crate_num(),
foreign_item_map: FnvHashMap(), foreign_item_map: FxHashMap(),
local_crate_name: local_crate_name.to_owned(), local_crate_name: local_crate_name.to_owned(),
} }
} }
@ -401,7 +401,7 @@ impl<'a> CrateLoader<'a> {
fn update_extern_crate(&mut self, fn update_extern_crate(&mut self,
cnum: CrateNum, cnum: CrateNum,
mut extern_crate: ExternCrate, mut extern_crate: ExternCrate,
visited: &mut FnvHashSet<(CrateNum, bool)>) visited: &mut FxHashSet<(CrateNum, bool)>)
{ {
if !visited.insert((cnum, extern_crate.direct)) { return } if !visited.insert((cnum, extern_crate.direct)) { return }
@ -442,7 +442,7 @@ impl<'a> CrateLoader<'a> {
// The map from crate numbers in the crate we're resolving to local crate // The map from crate numbers in the crate we're resolving to local crate
// numbers // numbers
let deps = crate_root.crate_deps.decode(metadata); let deps = crate_root.crate_deps.decode(metadata);
let map: FnvHashMap<_, _> = deps.enumerate().map(|(crate_num, dep)| { let map: FxHashMap<_, _> = deps.enumerate().map(|(crate_num, dep)| {
debug!("resolving dep crate {} hash: `{}`", dep.name, dep.hash); debug!("resolving dep crate {} hash: `{}`", dep.name, dep.hash);
let (local_cnum, ..) = self.resolve_crate(root, let (local_cnum, ..) = self.resolve_crate(root,
&dep.name.as_str(), &dep.name.as_str(),
@ -1021,7 +1021,7 @@ impl<'a> middle::cstore::CrateLoader for CrateLoader<'a> {
let extern_crate = let extern_crate =
ExternCrate { def_id: def_id, span: item.span, direct: true, path_len: len }; ExternCrate { def_id: def_id, span: item.span, direct: true, path_len: len };
self.update_extern_crate(cnum, extern_crate, &mut FnvHashSet()); self.update_extern_crate(cnum, extern_crate, &mut FxHashSet());
self.cstore.add_extern_mod_stmt_cnum(info.id, cnum); self.cstore.add_extern_mod_stmt_cnum(info.id, cnum);
loaded_macros loaded_macros

View file

@ -21,7 +21,7 @@ use rustc::hir::svh::Svh;
use rustc::middle::cstore::ExternCrate; use rustc::middle::cstore::ExternCrate;
use rustc_back::PanicStrategy; use rustc_back::PanicStrategy;
use rustc_data_structures::indexed_vec::IndexVec; use rustc_data_structures::indexed_vec::IndexVec;
use rustc::util::nodemap::{FnvHashMap, NodeMap, NodeSet, DefIdMap}; use rustc::util::nodemap::{FxHashMap, NodeMap, NodeSet, DefIdMap};
use std::cell::{RefCell, Cell}; use std::cell::{RefCell, Cell};
use std::rc::Rc; use std::rc::Rc;
@ -76,7 +76,7 @@ pub struct CrateMetadata {
/// hashmap, which gives the reverse mapping. This allows us to /// hashmap, which gives the reverse mapping. This allows us to
/// quickly retrace a `DefPath`, which is needed for incremental /// quickly retrace a `DefPath`, which is needed for incremental
/// compilation support. /// compilation support.
pub key_map: FnvHashMap<DefKey, DefIndex>, pub key_map: FxHashMap<DefKey, DefIndex>,
/// Flag if this crate is required by an rlib version of this crate, or in /// Flag if this crate is required by an rlib version of this crate, or in
/// other words whether it was explicitly linked to. An example of a crate /// other words whether it was explicitly linked to. An example of a crate
@ -94,7 +94,7 @@ pub struct CachedInlinedItem {
pub struct CStore { pub struct CStore {
pub dep_graph: DepGraph, pub dep_graph: DepGraph,
metas: RefCell<FnvHashMap<CrateNum, Rc<CrateMetadata>>>, metas: RefCell<FxHashMap<CrateNum, Rc<CrateMetadata>>>,
/// Map from NodeId's of local extern crate statements to crate numbers /// Map from NodeId's of local extern crate statements to crate numbers
extern_mod_crate_map: RefCell<NodeMap<CrateNum>>, extern_mod_crate_map: RefCell<NodeMap<CrateNum>>,
used_crate_sources: RefCell<Vec<CrateSource>>, used_crate_sources: RefCell<Vec<CrateSource>>,
@ -110,15 +110,15 @@ impl CStore {
pub fn new(dep_graph: &DepGraph) -> CStore { pub fn new(dep_graph: &DepGraph) -> CStore {
CStore { CStore {
dep_graph: dep_graph.clone(), dep_graph: dep_graph.clone(),
metas: RefCell::new(FnvHashMap()), metas: RefCell::new(FxHashMap()),
extern_mod_crate_map: RefCell::new(FnvHashMap()), extern_mod_crate_map: RefCell::new(FxHashMap()),
used_crate_sources: RefCell::new(Vec::new()), used_crate_sources: RefCell::new(Vec::new()),
used_libraries: RefCell::new(Vec::new()), used_libraries: RefCell::new(Vec::new()),
used_link_args: RefCell::new(Vec::new()), used_link_args: RefCell::new(Vec::new()),
statically_included_foreign_items: RefCell::new(NodeSet()), statically_included_foreign_items: RefCell::new(NodeSet()),
visible_parent_map: RefCell::new(FnvHashMap()), visible_parent_map: RefCell::new(FxHashMap()),
inlined_item_cache: RefCell::new(FnvHashMap()), inlined_item_cache: RefCell::new(FxHashMap()),
defid_for_inlined_node: RefCell::new(FnvHashMap()), defid_for_inlined_node: RefCell::new(FxHashMap()),
} }
} }

View file

@ -17,7 +17,7 @@ use schema::*;
use rustc::hir::map as hir_map; use rustc::hir::map as hir_map;
use rustc::hir::map::{DefKey, DefPathData}; use rustc::hir::map::{DefKey, DefPathData};
use rustc::util::nodemap::FnvHashMap; use rustc::util::nodemap::FxHashMap;
use rustc::hir; use rustc::hir;
use rustc::hir::intravisit::IdRange; use rustc::hir::intravisit::IdRange;
@ -432,7 +432,7 @@ impl<'a, 'tcx> MetadataBlob {
/// Go through each item in the metadata and create a map from that /// Go through each item in the metadata and create a map from that
/// item's def-key to the item's DefIndex. /// item's def-key to the item's DefIndex.
pub fn load_key_map(&self, index: LazySeq<Index>) -> FnvHashMap<DefKey, DefIndex> { pub fn load_key_map(&self, index: LazySeq<Index>) -> FxHashMap<DefKey, DefIndex> {
index.iter_enumerated(self.raw_bytes()) index.iter_enumerated(self.raw_bytes())
.map(|(index, item)| (item.decode(self).def_key.decode(self), index)) .map(|(index, item)| (item.decode(self).def_key.decode(self), index))
.collect() .collect()

View file

@ -23,7 +23,7 @@ use rustc::traits::specialization_graph;
use rustc::ty::{self, Ty, TyCtxt}; use rustc::ty::{self, Ty, TyCtxt};
use rustc::session::config::{self, CrateTypeProcMacro}; use rustc::session::config::{self, CrateTypeProcMacro};
use rustc::util::nodemap::{FnvHashMap, NodeSet}; use rustc::util::nodemap::{FxHashMap, NodeSet};
use rustc_serialize::{Encodable, Encoder, SpecializedEncoder, opaque}; use rustc_serialize::{Encodable, Encoder, SpecializedEncoder, opaque};
use std::hash::Hash; use std::hash::Hash;
@ -52,8 +52,8 @@ pub struct EncodeContext<'a, 'tcx: 'a> {
reachable: &'a NodeSet, reachable: &'a NodeSet,
lazy_state: LazyState, lazy_state: LazyState,
type_shorthands: FnvHashMap<Ty<'tcx>, usize>, type_shorthands: FxHashMap<Ty<'tcx>, usize>,
predicate_shorthands: FnvHashMap<ty::Predicate<'tcx>, usize>, predicate_shorthands: FxHashMap<ty::Predicate<'tcx>, usize>,
} }
macro_rules! encoder_methods { macro_rules! encoder_methods {
@ -200,7 +200,7 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
variant: &U, variant: &U,
map: M) map: M)
-> Result<(), <Self as Encoder>::Error> -> Result<(), <Self as Encoder>::Error>
where M: for<'b> Fn(&'b mut Self) -> &'b mut FnvHashMap<T, usize>, where M: for<'b> Fn(&'b mut Self) -> &'b mut FxHashMap<T, usize>,
T: Clone + Eq + Hash, T: Clone + Eq + Hash,
U: Encodable U: Encodable
{ {
@ -1143,7 +1143,7 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
struct ImplVisitor<'a, 'tcx: 'a> { struct ImplVisitor<'a, 'tcx: 'a> {
tcx: TyCtxt<'a, 'tcx, 'tcx>, tcx: TyCtxt<'a, 'tcx, 'tcx>,
impls: FnvHashMap<DefId, Vec<DefIndex>>, impls: FxHashMap<DefId, Vec<DefIndex>>,
} }
impl<'a, 'tcx, 'v> Visitor<'v> for ImplVisitor<'a, 'tcx> { impl<'a, 'tcx, 'v> Visitor<'v> for ImplVisitor<'a, 'tcx> {
@ -1165,7 +1165,7 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
fn encode_impls(&mut self) -> LazySeq<TraitImpls> { fn encode_impls(&mut self) -> LazySeq<TraitImpls> {
let mut visitor = ImplVisitor { let mut visitor = ImplVisitor {
tcx: self.tcx, tcx: self.tcx,
impls: FnvHashMap(), impls: FxHashMap(),
}; };
self.tcx.map.krate().visit_all_items(&mut visitor); self.tcx.map.krate().visit_all_items(&mut visitor);

View file

@ -221,7 +221,7 @@ use rustc::session::Session;
use rustc::session::filesearch::{FileSearch, FileMatches, FileDoesntMatch}; use rustc::session::filesearch::{FileSearch, FileMatches, FileDoesntMatch};
use rustc::session::search_paths::PathKind; use rustc::session::search_paths::PathKind;
use rustc::util::common; use rustc::util::common;
use rustc::util::nodemap::FnvHashMap; use rustc::util::nodemap::FxHashMap;
use rustc_llvm as llvm; use rustc_llvm as llvm;
use rustc_llvm::{False, ObjectFile, mk_section_iter}; use rustc_llvm::{False, ObjectFile, mk_section_iter};
@ -430,7 +430,7 @@ impl<'a> Context<'a> {
let rlib_prefix = format!("lib{}", self.crate_name); let rlib_prefix = format!("lib{}", self.crate_name);
let staticlib_prefix = format!("{}{}", staticpair.0, self.crate_name); let staticlib_prefix = format!("{}{}", staticpair.0, self.crate_name);
let mut candidates = FnvHashMap(); let mut candidates = FxHashMap();
let mut staticlibs = vec![]; let mut staticlibs = vec![];
// First, find all possible candidate rlibs and dylibs purely based on // First, find all possible candidate rlibs and dylibs purely based on
@ -469,7 +469,7 @@ impl<'a> Context<'a> {
let hash_str = hash.to_string(); let hash_str = hash.to_string();
let slot = candidates.entry(hash_str) let slot = candidates.entry(hash_str)
.or_insert_with(|| (FnvHashMap(), FnvHashMap())); .or_insert_with(|| (FxHashMap(), FxHashMap()));
let (ref mut rlibs, ref mut dylibs) = *slot; let (ref mut rlibs, ref mut dylibs) = *slot;
fs::canonicalize(path) fs::canonicalize(path)
.map(|p| { .map(|p| {
@ -492,7 +492,7 @@ impl<'a> Context<'a> {
// A Library candidate is created if the metadata for the set of // A Library candidate is created if the metadata for the set of
// libraries corresponds to the crate id and hash criteria that this // libraries corresponds to the crate id and hash criteria that this
// search is being performed for. // search is being performed for.
let mut libraries = FnvHashMap(); let mut libraries = FxHashMap();
for (_hash, (rlibs, dylibs)) in candidates { for (_hash, (rlibs, dylibs)) in candidates {
let mut slot = None; let mut slot = None;
let rlib = self.extract_one(rlibs, CrateFlavor::Rlib, &mut slot); let rlib = self.extract_one(rlibs, CrateFlavor::Rlib, &mut slot);
@ -544,7 +544,7 @@ impl<'a> Context<'a> {
// be read, it is assumed that the file isn't a valid rust library (no // be read, it is assumed that the file isn't a valid rust library (no
// errors are emitted). // errors are emitted).
fn extract_one(&mut self, fn extract_one(&mut self,
m: FnvHashMap<PathBuf, PathKind>, m: FxHashMap<PathBuf, PathKind>,
flavor: CrateFlavor, flavor: CrateFlavor,
slot: &mut Option<(Svh, MetadataBlob)>) slot: &mut Option<(Svh, MetadataBlob)>)
-> Option<(PathBuf, PathKind)> { -> Option<(PathBuf, PathKind)> {
@ -690,8 +690,8 @@ impl<'a> Context<'a> {
// rlibs/dylibs. // rlibs/dylibs.
let sess = self.sess; let sess = self.sess;
let dylibname = self.dylibname(); let dylibname = self.dylibname();
let mut rlibs = FnvHashMap(); let mut rlibs = FxHashMap();
let mut dylibs = FnvHashMap(); let mut dylibs = FxHashMap();
{ {
let locs = locs.map(|l| PathBuf::from(l)).filter(|loc| { let locs = locs.map(|l| PathBuf::from(l)).filter(|loc| {
if !loc.exists() { if !loc.exists() {

View file

@ -13,7 +13,7 @@
use std; use std;
use rustc_const_math::{ConstMathErr, Op}; use rustc_const_math::{ConstMathErr, Op};
use rustc_data_structures::fnv::FnvHashMap; use rustc_data_structures::fx::FxHashMap;
use rustc_data_structures::indexed_vec::Idx; use rustc_data_structures::indexed_vec::Idx;
use build::{BlockAnd, BlockAndExtension, Builder}; use build::{BlockAnd, BlockAndExtension, Builder};
@ -190,7 +190,7 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
// first process the set of fields that were provided // first process the set of fields that were provided
// (evaluating them in order given by user) // (evaluating them in order given by user)
let fields_map: FnvHashMap<_, _> = let fields_map: FxHashMap<_, _> =
fields.into_iter() fields.into_iter()
.map(|f| (f.name, unpack!(block = this.as_operand(block, f.expr)))) .map(|f| (f.name, unpack!(block = this.as_operand(block, f.expr))))
.collect(); .collect();

View file

@ -14,7 +14,7 @@
//! details. //! details.
use build::{BlockAnd, BlockAndExtension, Builder}; use build::{BlockAnd, BlockAndExtension, Builder};
use rustc_data_structures::fnv::FnvHashMap; use rustc_data_structures::fx::FxHashMap;
use rustc_data_structures::bitvec::BitVector; use rustc_data_structures::bitvec::BitVector;
use rustc::middle::const_val::ConstVal; use rustc::middle::const_val::ConstVal;
use rustc::ty::{AdtDef, Ty}; use rustc::ty::{AdtDef, Ty};
@ -309,7 +309,7 @@ enum TestKind<'tcx> {
SwitchInt { SwitchInt {
switch_ty: Ty<'tcx>, switch_ty: Ty<'tcx>,
options: Vec<ConstVal>, options: Vec<ConstVal>,
indices: FnvHashMap<ConstVal, usize>, indices: FxHashMap<ConstVal, usize>,
}, },
// test for equality // test for equality

View file

@ -18,7 +18,7 @@
use build::Builder; use build::Builder;
use build::matches::{Candidate, MatchPair, Test, TestKind}; use build::matches::{Candidate, MatchPair, Test, TestKind};
use hair::*; use hair::*;
use rustc_data_structures::fnv::FnvHashMap; use rustc_data_structures::fx::FxHashMap;
use rustc_data_structures::bitvec::BitVector; use rustc_data_structures::bitvec::BitVector;
use rustc::middle::const_val::ConstVal; use rustc::middle::const_val::ConstVal;
use rustc::ty::{self, Ty}; use rustc::ty::{self, Ty};
@ -54,7 +54,7 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
// these maps are empty to start; cases are // these maps are empty to start; cases are
// added below in add_cases_to_switch // added below in add_cases_to_switch
options: vec![], options: vec![],
indices: FnvHashMap(), indices: FxHashMap(),
} }
} }
} }
@ -110,7 +110,7 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
candidate: &Candidate<'pat, 'tcx>, candidate: &Candidate<'pat, 'tcx>,
switch_ty: Ty<'tcx>, switch_ty: Ty<'tcx>,
options: &mut Vec<ConstVal>, options: &mut Vec<ConstVal>,
indices: &mut FnvHashMap<ConstVal, usize>) indices: &mut FxHashMap<ConstVal, usize>)
-> bool -> bool
{ {
let match_pair = match candidate.match_pairs.iter().find(|mp| mp.lvalue == *test_lvalue) { let match_pair = match candidate.match_pairs.iter().find(|mp| mp.lvalue == *test_lvalue) {

View file

@ -94,7 +94,7 @@ use rustc::ty::{Ty, TyCtxt};
use rustc::mir::*; use rustc::mir::*;
use syntax_pos::Span; use syntax_pos::Span;
use rustc_data_structures::indexed_vec::Idx; use rustc_data_structures::indexed_vec::Idx;
use rustc_data_structures::fnv::FnvHashMap; use rustc_data_structures::fx::FxHashMap;
pub struct Scope<'tcx> { pub struct Scope<'tcx> {
/// the scope-id within the scope_auxiliary /// the scope-id within the scope_auxiliary
@ -140,7 +140,7 @@ pub struct Scope<'tcx> {
free: Option<FreeData<'tcx>>, free: Option<FreeData<'tcx>>,
/// The cache for drop chain on “normal” exit into a particular BasicBlock. /// The cache for drop chain on “normal” exit into a particular BasicBlock.
cached_exits: FnvHashMap<(BasicBlock, CodeExtent), BasicBlock>, cached_exits: FxHashMap<(BasicBlock, CodeExtent), BasicBlock>,
} }
struct DropData<'tcx> { struct DropData<'tcx> {
@ -298,7 +298,7 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
needs_cleanup: false, needs_cleanup: false,
drops: vec![], drops: vec![],
free: None, free: None,
cached_exits: FnvHashMap() cached_exits: FxHashMap()
}); });
self.scope_auxiliary.push(ScopeAuxiliary { self.scope_auxiliary.push(ScopeAuxiliary {
extent: extent, extent: extent,

View file

@ -14,7 +14,7 @@ use rustc::hir::def_id::DefId;
use rustc::mir::*; use rustc::mir::*;
use rustc::mir::transform::MirSource; use rustc::mir::transform::MirSource;
use rustc::ty::TyCtxt; use rustc::ty::TyCtxt;
use rustc_data_structures::fnv::FnvHashMap; use rustc_data_structures::fx::FxHashMap;
use rustc_data_structures::indexed_vec::{Idx}; use rustc_data_structures::indexed_vec::{Idx};
use std::fmt::Display; use std::fmt::Display;
use std::fs; use std::fs;
@ -122,10 +122,10 @@ enum Annotation {
} }
fn scope_entry_exit_annotations(auxiliary: Option<&ScopeAuxiliaryVec>) fn scope_entry_exit_annotations(auxiliary: Option<&ScopeAuxiliaryVec>)
-> FnvHashMap<Location, Vec<Annotation>> -> FxHashMap<Location, Vec<Annotation>>
{ {
// compute scope/entry exit annotations // compute scope/entry exit annotations
let mut annotations = FnvHashMap(); let mut annotations = FxHashMap();
if let Some(auxiliary) = auxiliary { if let Some(auxiliary) = auxiliary {
for (scope_id, auxiliary) in auxiliary.iter_enumerated() { for (scope_id, auxiliary) in auxiliary.iter_enumerated() {
annotations.entry(auxiliary.dom) annotations.entry(auxiliary.dom)
@ -166,7 +166,7 @@ fn write_basic_block(tcx: TyCtxt,
block: BasicBlock, block: BasicBlock,
mir: &Mir, mir: &Mir,
w: &mut Write, w: &mut Write,
annotations: &FnvHashMap<Location, Vec<Annotation>>) annotations: &FxHashMap<Location, Vec<Annotation>>)
-> io::Result<()> { -> io::Result<()> {
let data = &mir[block]; let data = &mir[block];
@ -217,7 +217,7 @@ fn comment(tcx: TyCtxt, SourceInfo { span, scope }: SourceInfo) -> String {
/// Returns the total number of variables printed. /// Returns the total number of variables printed.
fn write_scope_tree(tcx: TyCtxt, fn write_scope_tree(tcx: TyCtxt,
mir: &Mir, mir: &Mir,
scope_tree: &FnvHashMap<VisibilityScope, Vec<VisibilityScope>>, scope_tree: &FxHashMap<VisibilityScope, Vec<VisibilityScope>>,
w: &mut Write, w: &mut Write,
parent: VisibilityScope, parent: VisibilityScope,
depth: usize) depth: usize)
@ -283,7 +283,7 @@ fn write_mir_intro<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
writeln!(w, " {{")?; writeln!(w, " {{")?;
// construct a scope tree and write it out // construct a scope tree and write it out
let mut scope_tree: FnvHashMap<VisibilityScope, Vec<VisibilityScope>> = FnvHashMap(); let mut scope_tree: FxHashMap<VisibilityScope, Vec<VisibilityScope>> = FxHashMap();
for (index, scope_data) in mir.visibility_scopes.iter().enumerate() { for (index, scope_data) in mir.visibility_scopes.iter().enumerate() {
if let Some(parent) = scope_data.parent_scope { if let Some(parent) = scope_data.parent_scope {
scope_tree.entry(parent) scope_tree.entry(parent)

View file

@ -14,7 +14,7 @@ use rustc::mir::{Location, Lvalue, Mir, Operand, ProjectionElem, Rvalue, Local};
use rustc::mir::transform::{MirPass, MirSource, Pass}; use rustc::mir::transform::{MirPass, MirSource, Pass};
use rustc::mir::visit::{MutVisitor, Visitor}; use rustc::mir::visit::{MutVisitor, Visitor};
use rustc::ty::TyCtxt; use rustc::ty::TyCtxt;
use rustc::util::nodemap::FnvHashSet; use rustc::util::nodemap::FxHashSet;
use rustc_data_structures::indexed_vec::Idx; use rustc_data_structures::indexed_vec::Idx;
use std::mem; use std::mem;
@ -107,5 +107,5 @@ impl<'b, 'a, 'tcx> Visitor<'tcx> for OptimizationFinder<'b, 'a, 'tcx> {
#[derive(Default)] #[derive(Default)]
struct OptimizationList { struct OptimizationList {
and_stars: FnvHashSet<Location>, and_stars: FxHashSet<Location>,
} }

View file

@ -15,7 +15,7 @@
use rustc::hir; use rustc::hir;
use rustc::hir::intravisit as hir_visit; use rustc::hir::intravisit as hir_visit;
use rustc::util::common::to_readable_str; use rustc::util::common::to_readable_str;
use rustc::util::nodemap::{FnvHashMap, FnvHashSet}; use rustc::util::nodemap::{FxHashMap, FxHashSet};
use syntax::ast::{self, NodeId, AttrId}; use syntax::ast::{self, NodeId, AttrId};
use syntax::visit as ast_visit; use syntax::visit as ast_visit;
use syntax_pos::Span; use syntax_pos::Span;
@ -34,15 +34,15 @@ struct NodeData {
struct StatCollector<'k> { struct StatCollector<'k> {
krate: Option<&'k hir::Crate>, krate: Option<&'k hir::Crate>,
data: FnvHashMap<&'static str, NodeData>, data: FxHashMap<&'static str, NodeData>,
seen: FnvHashSet<Id>, seen: FxHashSet<Id>,
} }
pub fn print_hir_stats(krate: &hir::Crate) { pub fn print_hir_stats(krate: &hir::Crate) {
let mut collector = StatCollector { let mut collector = StatCollector {
krate: Some(krate), krate: Some(krate),
data: FnvHashMap(), data: FxHashMap(),
seen: FnvHashSet(), seen: FxHashSet(),
}; };
hir_visit::walk_crate(&mut collector, krate); hir_visit::walk_crate(&mut collector, krate);
collector.print("HIR STATS"); collector.print("HIR STATS");
@ -51,8 +51,8 @@ pub fn print_hir_stats(krate: &hir::Crate) {
pub fn print_ast_stats(krate: &ast::Crate, title: &str) { pub fn print_ast_stats(krate: &ast::Crate, title: &str) {
let mut collector = StatCollector { let mut collector = StatCollector {
krate: None, krate: None,
data: FnvHashMap(), data: FxHashMap(),
seen: FnvHashSet(), seen: FxHashSet(),
}; };
ast_visit::walk_crate(&mut collector, krate); ast_visit::walk_crate(&mut collector, krate);
collector.print(title); collector.print(title);

View file

@ -25,7 +25,7 @@ use rustc::middle::cstore::LoadedMacros;
use rustc::hir::def::*; use rustc::hir::def::*;
use rustc::hir::def_id::{CRATE_DEF_INDEX, DefId}; use rustc::hir::def_id::{CRATE_DEF_INDEX, DefId};
use rustc::ty; use rustc::ty;
use rustc::util::nodemap::FnvHashMap; use rustc::util::nodemap::FxHashMap;
use std::cell::Cell; use std::cell::Cell;
use std::rc::Rc; use std::rc::Rc;
@ -539,7 +539,7 @@ impl<'b> Resolver<'b> {
self.invocations.insert(mark, invocation); self.invocations.insert(mark, invocation);
} }
let mut macros: FnvHashMap<_, _> = macros.into_iter().map(|mut def| { let mut macros: FxHashMap<_, _> = macros.into_iter().map(|mut def| {
def.body = mark_tts(&def.body, mark); def.body = mark_tts(&def.body, mark);
let ext = macro_rules::compile(&self.session.parse_sess, &def); let ext = macro_rules::compile(&self.session.parse_sess, &def);
(def.ident.name, (def, Rc::new(ext))) (def.ident.name, (def, Rc::new(ext)))

View file

@ -51,7 +51,7 @@ use rustc::hir::def::*;
use rustc::hir::def_id::{CrateNum, CRATE_DEF_INDEX, DefId}; use rustc::hir::def_id::{CrateNum, CRATE_DEF_INDEX, DefId};
use rustc::ty; use rustc::ty;
use rustc::hir::{Freevar, FreevarMap, TraitCandidate, TraitMap, GlobMap}; use rustc::hir::{Freevar, FreevarMap, TraitCandidate, TraitMap, GlobMap};
use rustc::util::nodemap::{NodeMap, NodeSet, FnvHashMap, FnvHashSet}; use rustc::util::nodemap::{NodeMap, NodeSet, FxHashMap, FxHashSet};
use syntax::ext::hygiene::{Mark, SyntaxContext}; use syntax::ext::hygiene::{Mark, SyntaxContext};
use syntax::ast::{self, FloatTy}; use syntax::ast::{self, FloatTy};
@ -498,7 +498,7 @@ struct BindingInfo {
} }
// Map from the name in a pattern to its binding mode. // Map from the name in a pattern to its binding mode.
type BindingMap = FnvHashMap<Ident, BindingInfo>; type BindingMap = FxHashMap<Ident, BindingInfo>;
#[derive(Copy, Clone, PartialEq, Eq, Debug)] #[derive(Copy, Clone, PartialEq, Eq, Debug)]
enum PatternSource { enum PatternSource {
@ -703,14 +703,14 @@ enum ModulePrefixResult<'a> {
/// One local scope. /// One local scope.
#[derive(Debug)] #[derive(Debug)]
struct Rib<'a> { struct Rib<'a> {
bindings: FnvHashMap<Ident, Def>, bindings: FxHashMap<Ident, Def>,
kind: RibKind<'a>, kind: RibKind<'a>,
} }
impl<'a> Rib<'a> { impl<'a> Rib<'a> {
fn new(kind: RibKind<'a>) -> Rib<'a> { fn new(kind: RibKind<'a>) -> Rib<'a> {
Rib { Rib {
bindings: FnvHashMap(), bindings: FxHashMap(),
kind: kind, kind: kind,
} }
} }
@ -769,7 +769,7 @@ pub struct ModuleS<'a> {
// is the NodeId of the local `extern crate` item (otherwise, `extern_crate_id` is None). // is the NodeId of the local `extern crate` item (otherwise, `extern_crate_id` is None).
extern_crate_id: Option<NodeId>, extern_crate_id: Option<NodeId>,
resolutions: RefCell<FnvHashMap<(Name, Namespace), &'a RefCell<NameResolution<'a>>>>, resolutions: RefCell<FxHashMap<(Name, Namespace), &'a RefCell<NameResolution<'a>>>>,
no_implicit_prelude: bool, no_implicit_prelude: bool,
@ -794,7 +794,7 @@ impl<'a> ModuleS<'a> {
kind: kind, kind: kind,
normal_ancestor_id: None, normal_ancestor_id: None,
extern_crate_id: None, extern_crate_id: None,
resolutions: RefCell::new(FnvHashMap()), resolutions: RefCell::new(FxHashMap()),
no_implicit_prelude: false, no_implicit_prelude: false,
glob_importers: RefCell::new(Vec::new()), glob_importers: RefCell::new(Vec::new()),
globs: RefCell::new((Vec::new())), globs: RefCell::new((Vec::new())),
@ -950,12 +950,12 @@ impl<'a> NameBinding<'a> {
/// Interns the names of the primitive types. /// Interns the names of the primitive types.
struct PrimitiveTypeTable { struct PrimitiveTypeTable {
primitive_types: FnvHashMap<Name, PrimTy>, primitive_types: FxHashMap<Name, PrimTy>,
} }
impl PrimitiveTypeTable { impl PrimitiveTypeTable {
fn new() -> PrimitiveTypeTable { fn new() -> PrimitiveTypeTable {
let mut table = PrimitiveTypeTable { primitive_types: FnvHashMap() }; let mut table = PrimitiveTypeTable { primitive_types: FxHashMap() };
table.intern("bool", TyBool); table.intern("bool", TyBool);
table.intern("char", TyChar); table.intern("char", TyChar);
@ -989,17 +989,17 @@ pub struct Resolver<'a> {
// Maps the node id of a statement to the expansions of the `macro_rules!`s // Maps the node id of a statement to the expansions of the `macro_rules!`s
// immediately above the statement (if appropriate). // immediately above the statement (if appropriate).
macros_at_scope: FnvHashMap<NodeId, Vec<Mark>>, macros_at_scope: FxHashMap<NodeId, Vec<Mark>>,
graph_root: Module<'a>, graph_root: Module<'a>,
prelude: Option<Module<'a>>, prelude: Option<Module<'a>>,
trait_item_map: FnvHashMap<(Name, DefId), bool /* is static method? */>, trait_item_map: FxHashMap<(Name, DefId), bool /* is static method? */>,
// Names of fields of an item `DefId` accessible with dot syntax. // Names of fields of an item `DefId` accessible with dot syntax.
// Used for hints during error reporting. // Used for hints during error reporting.
field_names: FnvHashMap<DefId, Vec<Name>>, field_names: FxHashMap<DefId, Vec<Name>>,
// All imports known to succeed or fail. // All imports known to succeed or fail.
determined_imports: Vec<&'a ImportDirective<'a>>, determined_imports: Vec<&'a ImportDirective<'a>>,
@ -1061,8 +1061,8 @@ pub struct Resolver<'a> {
// all imports, but only glob imports are actually interesting). // all imports, but only glob imports are actually interesting).
pub glob_map: GlobMap, pub glob_map: GlobMap,
used_imports: FnvHashSet<(NodeId, Namespace)>, used_imports: FxHashSet<(NodeId, Namespace)>,
used_crates: FnvHashSet<CrateNum>, used_crates: FxHashSet<CrateNum>,
pub maybe_unused_trait_imports: NodeSet, pub maybe_unused_trait_imports: NodeSet,
privacy_errors: Vec<PrivacyError<'a>>, privacy_errors: Vec<PrivacyError<'a>>,
@ -1075,12 +1075,12 @@ pub struct Resolver<'a> {
pub exported_macros: Vec<ast::MacroDef>, pub exported_macros: Vec<ast::MacroDef>,
crate_loader: &'a mut CrateLoader, crate_loader: &'a mut CrateLoader,
macro_names: FnvHashSet<Name>, macro_names: FxHashSet<Name>,
builtin_macros: FnvHashMap<Name, Rc<SyntaxExtension>>, builtin_macros: FxHashMap<Name, Rc<SyntaxExtension>>,
lexical_macro_resolutions: Vec<(Name, LegacyScope<'a>)>, lexical_macro_resolutions: Vec<(Name, LegacyScope<'a>)>,
// Maps the `Mark` of an expansion to its containing module or block. // Maps the `Mark` of an expansion to its containing module or block.
invocations: FnvHashMap<Mark, &'a InvocationData<'a>>, invocations: FxHashMap<Mark, &'a InvocationData<'a>>,
} }
pub struct ResolverArenas<'a> { pub struct ResolverArenas<'a> {
@ -1206,7 +1206,7 @@ impl<'a> Resolver<'a> {
let mut definitions = Definitions::new(); let mut definitions = Definitions::new();
DefCollector::new(&mut definitions).collect_root(); DefCollector::new(&mut definitions).collect_root();
let mut invocations = FnvHashMap(); let mut invocations = FxHashMap();
invocations.insert(Mark::root(), invocations.insert(Mark::root(),
arenas.alloc_invocation_data(InvocationData::root(graph_root))); arenas.alloc_invocation_data(InvocationData::root(graph_root)));
@ -1214,15 +1214,15 @@ impl<'a> Resolver<'a> {
session: session, session: session,
definitions: definitions, definitions: definitions,
macros_at_scope: FnvHashMap(), macros_at_scope: FxHashMap(),
// The outermost module has def ID 0; this is not reflected in the // The outermost module has def ID 0; this is not reflected in the
// AST. // AST.
graph_root: graph_root, graph_root: graph_root,
prelude: None, prelude: None,
trait_item_map: FnvHashMap(), trait_item_map: FxHashMap(),
field_names: FnvHashMap(), field_names: FxHashMap(),
determined_imports: Vec::new(), determined_imports: Vec::new(),
indeterminate_imports: Vec::new(), indeterminate_imports: Vec::new(),
@ -1248,8 +1248,8 @@ impl<'a> Resolver<'a> {
make_glob_map: make_glob_map == MakeGlobMap::Yes, make_glob_map: make_glob_map == MakeGlobMap::Yes,
glob_map: NodeMap(), glob_map: NodeMap(),
used_imports: FnvHashSet(), used_imports: FxHashSet(),
used_crates: FnvHashSet(), used_crates: FxHashSet(),
maybe_unused_trait_imports: NodeSet(), maybe_unused_trait_imports: NodeSet(),
privacy_errors: Vec::new(), privacy_errors: Vec::new(),
@ -1266,8 +1266,8 @@ impl<'a> Resolver<'a> {
exported_macros: Vec::new(), exported_macros: Vec::new(),
crate_loader: crate_loader, crate_loader: crate_loader,
macro_names: FnvHashSet(), macro_names: FxHashSet(),
builtin_macros: FnvHashMap(), builtin_macros: FxHashMap(),
lexical_macro_resolutions: Vec::new(), lexical_macro_resolutions: Vec::new(),
invocations: invocations, invocations: invocations,
} }
@ -1340,7 +1340,7 @@ impl<'a> Resolver<'a> {
fn add_to_glob_map(&mut self, id: NodeId, name: Name) { fn add_to_glob_map(&mut self, id: NodeId, name: Name) {
if self.make_glob_map { if self.make_glob_map {
self.glob_map.entry(id).or_insert_with(FnvHashSet).insert(name); self.glob_map.entry(id).or_insert_with(FxHashSet).insert(name);
} }
} }
@ -1803,7 +1803,7 @@ impl<'a> Resolver<'a> {
match type_parameters { match type_parameters {
HasTypeParameters(generics, rib_kind) => { HasTypeParameters(generics, rib_kind) => {
let mut function_type_rib = Rib::new(rib_kind); let mut function_type_rib = Rib::new(rib_kind);
let mut seen_bindings = FnvHashMap(); let mut seen_bindings = FxHashMap();
for type_parameter in &generics.ty_params { for type_parameter in &generics.ty_params {
let name = type_parameter.ident.name; let name = type_parameter.ident.name;
debug!("with_type_parameter_rib: {}", type_parameter.id); debug!("with_type_parameter_rib: {}", type_parameter.id);
@ -1867,7 +1867,7 @@ impl<'a> Resolver<'a> {
self.label_ribs.push(Rib::new(rib_kind)); self.label_ribs.push(Rib::new(rib_kind));
// Add each argument to the rib. // Add each argument to the rib.
let mut bindings_list = FnvHashMap(); let mut bindings_list = FxHashMap();
for argument in &declaration.inputs { for argument in &declaration.inputs {
self.resolve_pattern(&argument.pat, PatternSource::FnParam, &mut bindings_list); self.resolve_pattern(&argument.pat, PatternSource::FnParam, &mut bindings_list);
@ -2069,7 +2069,7 @@ impl<'a> Resolver<'a> {
walk_list!(self, visit_expr, &local.init); walk_list!(self, visit_expr, &local.init);
// Resolve the pattern. // Resolve the pattern.
self.resolve_pattern(&local.pat, PatternSource::Let, &mut FnvHashMap()); self.resolve_pattern(&local.pat, PatternSource::Let, &mut FxHashMap());
} }
// build a map from pattern identifiers to binding-info's. // build a map from pattern identifiers to binding-info's.
@ -2077,7 +2077,7 @@ impl<'a> Resolver<'a> {
// that expands into an or-pattern where one 'x' was from the // that expands into an or-pattern where one 'x' was from the
// user and one 'x' came from the macro. // user and one 'x' came from the macro.
fn binding_mode_map(&mut self, pat: &Pat) -> BindingMap { fn binding_mode_map(&mut self, pat: &Pat) -> BindingMap {
let mut binding_map = FnvHashMap(); let mut binding_map = FxHashMap();
pat.walk(&mut |pat| { pat.walk(&mut |pat| {
if let PatKind::Ident(binding_mode, ident, ref sub_pat) = pat.node { if let PatKind::Ident(binding_mode, ident, ref sub_pat) = pat.node {
@ -2137,7 +2137,7 @@ impl<'a> Resolver<'a> {
fn resolve_arm(&mut self, arm: &Arm) { fn resolve_arm(&mut self, arm: &Arm) {
self.value_ribs.push(Rib::new(NormalRibKind)); self.value_ribs.push(Rib::new(NormalRibKind));
let mut bindings_list = FnvHashMap(); let mut bindings_list = FxHashMap();
for pattern in &arm.pats { for pattern in &arm.pats {
self.resolve_pattern(&pattern, PatternSource::Match, &mut bindings_list); self.resolve_pattern(&pattern, PatternSource::Match, &mut bindings_list);
} }
@ -2278,7 +2278,7 @@ impl<'a> Resolver<'a> {
pat_id: NodeId, pat_id: NodeId,
outer_pat_id: NodeId, outer_pat_id: NodeId,
pat_src: PatternSource, pat_src: PatternSource,
bindings: &mut FnvHashMap<Ident, NodeId>) bindings: &mut FxHashMap<Ident, NodeId>)
-> PathResolution { -> PathResolution {
// Add the binding to the local ribs, if it // Add the binding to the local ribs, if it
// doesn't already exist in the bindings map. (We // doesn't already exist in the bindings map. (We
@ -2391,7 +2391,7 @@ impl<'a> Resolver<'a> {
pat_src: PatternSource, pat_src: PatternSource,
// Maps idents to the node ID for the // Maps idents to the node ID for the
// outermost pattern that binds them. // outermost pattern that binds them.
bindings: &mut FnvHashMap<Ident, NodeId>) { bindings: &mut FxHashMap<Ident, NodeId>) {
// Visit all direct subpatterns of this pattern. // Visit all direct subpatterns of this pattern.
let outer_pat_id = pat.id; let outer_pat_id = pat.id;
pat.walk(&mut |pat| { pat.walk(&mut |pat| {
@ -3048,7 +3048,7 @@ impl<'a> Resolver<'a> {
self.visit_expr(subexpression); self.visit_expr(subexpression);
self.value_ribs.push(Rib::new(NormalRibKind)); self.value_ribs.push(Rib::new(NormalRibKind));
self.resolve_pattern(pattern, PatternSource::IfLet, &mut FnvHashMap()); self.resolve_pattern(pattern, PatternSource::IfLet, &mut FxHashMap());
self.visit_block(if_block); self.visit_block(if_block);
self.value_ribs.pop(); self.value_ribs.pop();
@ -3065,7 +3065,7 @@ impl<'a> Resolver<'a> {
ExprKind::WhileLet(ref pattern, ref subexpression, ref block, label) => { ExprKind::WhileLet(ref pattern, ref subexpression, ref block, label) => {
self.visit_expr(subexpression); self.visit_expr(subexpression);
self.value_ribs.push(Rib::new(NormalRibKind)); self.value_ribs.push(Rib::new(NormalRibKind));
self.resolve_pattern(pattern, PatternSource::WhileLet, &mut FnvHashMap()); self.resolve_pattern(pattern, PatternSource::WhileLet, &mut FxHashMap());
self.resolve_labeled_block(label, expr.id, block); self.resolve_labeled_block(label, expr.id, block);
@ -3075,7 +3075,7 @@ impl<'a> Resolver<'a> {
ExprKind::ForLoop(ref pattern, ref subexpression, ref block, label) => { ExprKind::ForLoop(ref pattern, ref subexpression, ref block, label) => {
self.visit_expr(subexpression); self.visit_expr(subexpression);
self.value_ribs.push(Rib::new(NormalRibKind)); self.value_ribs.push(Rib::new(NormalRibKind));
self.resolve_pattern(pattern, PatternSource::For, &mut FnvHashMap()); self.resolve_pattern(pattern, PatternSource::For, &mut FxHashMap());
self.resolve_labeled_block(label, expr.id, block); self.resolve_labeled_block(label, expr.id, block);
@ -3337,7 +3337,7 @@ impl<'a> Resolver<'a> {
fn report_errors(&mut self) { fn report_errors(&mut self) {
self.report_shadowing_errors(); self.report_shadowing_errors();
let mut reported_spans = FnvHashSet(); let mut reported_spans = FxHashSet();
for &AmbiguityError { span, name, b1, b2 } in &self.ambiguity_errors { for &AmbiguityError { span, name, b1, b2 } in &self.ambiguity_errors {
if !reported_spans.insert(span) { continue } if !reported_spans.insert(span) { continue }
@ -3369,7 +3369,7 @@ impl<'a> Resolver<'a> {
self.resolve_macro_name(scope, name); self.resolve_macro_name(scope, name);
} }
let mut reported_errors = FnvHashSet(); let mut reported_errors = FxHashSet();
for binding in replace(&mut self.disallowed_shadowing, Vec::new()) { for binding in replace(&mut self.disallowed_shadowing, Vec::new()) {
if self.resolve_macro_name(binding.parent, binding.name).is_some() && if self.resolve_macro_name(binding.parent, binding.name).is_some() &&
reported_errors.insert((binding.name, binding.span)) { reported_errors.insert((binding.name, binding.span)) {

View file

@ -79,7 +79,7 @@ use type_::Type;
use type_of; use type_of;
use value::Value; use value::Value;
use Disr; use Disr;
use util::nodemap::{NodeSet, FnvHashMap, FnvHashSet}; use util::nodemap::{NodeSet, FxHashMap, FxHashSet};
use arena::TypedArena; use arena::TypedArena;
use libc::c_uint; use libc::c_uint;
@ -1318,7 +1318,7 @@ fn write_metadata(cx: &SharedCrateContext,
fn internalize_symbols<'a, 'tcx>(sess: &Session, fn internalize_symbols<'a, 'tcx>(sess: &Session,
ccxs: &CrateContextList<'a, 'tcx>, ccxs: &CrateContextList<'a, 'tcx>,
symbol_map: &SymbolMap<'tcx>, symbol_map: &SymbolMap<'tcx>,
reachable: &FnvHashSet<&str>) { reachable: &FxHashSet<&str>) {
let scx = ccxs.shared(); let scx = ccxs.shared();
let tcx = scx.tcx(); let tcx = scx.tcx();
@ -1332,7 +1332,7 @@ fn internalize_symbols<'a, 'tcx>(sess: &Session,
// 'unsafe' because we are holding on to CStr's from the LLVM module within // 'unsafe' because we are holding on to CStr's from the LLVM module within
// this block. // this block.
unsafe { unsafe {
let mut referenced_somewhere = FnvHashSet(); let mut referenced_somewhere = FxHashSet();
// Collect all symbols that need to stay externally visible because they // Collect all symbols that need to stay externally visible because they
// are referenced via a declaration in some other codegen unit. // are referenced via a declaration in some other codegen unit.
@ -1353,7 +1353,7 @@ fn internalize_symbols<'a, 'tcx>(sess: &Session,
// Also collect all symbols for which we cannot adjust linkage, because // Also collect all symbols for which we cannot adjust linkage, because
// it is fixed by some directive in the source code (e.g. #[no_mangle]). // it is fixed by some directive in the source code (e.g. #[no_mangle]).
let linkage_fixed_explicitly: FnvHashSet<_> = scx let linkage_fixed_explicitly: FxHashSet<_> = scx
.translation_items() .translation_items()
.borrow() .borrow()
.iter() .iter()
@ -1862,7 +1862,7 @@ fn collect_and_partition_translation_items<'a, 'tcx>(scx: &SharedCrateContext<'a
} }
if scx.sess().opts.debugging_opts.print_trans_items.is_some() { if scx.sess().opts.debugging_opts.print_trans_items.is_some() {
let mut item_to_cgus = FnvHashMap(); let mut item_to_cgus = FxHashMap();
for cgu in &codegen_units { for cgu in &codegen_units {
for (&trans_item, &linkage) in cgu.items() { for (&trans_item, &linkage) in cgu.items() {

View file

@ -19,7 +19,7 @@ use common::*;
use machine::llalign_of_pref; use machine::llalign_of_pref;
use type_::Type; use type_::Type;
use value::Value; use value::Value;
use util::nodemap::FnvHashMap; use util::nodemap::FxHashMap;
use libc::{c_uint, c_char}; use libc::{c_uint, c_char};
use std::borrow::Cow; use std::borrow::Cow;
@ -62,7 +62,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
// Build version of path with cycles removed. // Build version of path with cycles removed.
// Pass 1: scan table mapping str -> rightmost pos. // Pass 1: scan table mapping str -> rightmost pos.
let mut mm = FnvHashMap(); let mut mm = FxHashMap();
let len = v.len(); let len = v.len();
let mut i = 0; let mut i = 0;
while i < len { while i < len {

View file

@ -211,7 +211,7 @@ use context::SharedCrateContext;
use common::{fulfill_obligation, type_is_sized}; use common::{fulfill_obligation, type_is_sized};
use glue::{self, DropGlueKind}; use glue::{self, DropGlueKind};
use monomorphize::{self, Instance}; use monomorphize::{self, Instance};
use util::nodemap::{FnvHashSet, FnvHashMap, DefIdMap}; use util::nodemap::{FxHashSet, FxHashMap, DefIdMap};
use trans_item::{TransItem, type_to_string, def_id_to_string}; use trans_item::{TransItem, type_to_string, def_id_to_string};
@ -228,7 +228,7 @@ pub struct InliningMap<'tcx> {
// that are potentially inlined by LLVM into the source. // that are potentially inlined by LLVM into the source.
// The two numbers in the tuple are the start (inclusive) and // The two numbers in the tuple are the start (inclusive) and
// end index (exclusive) within the `targets` vecs. // end index (exclusive) within the `targets` vecs.
index: FnvHashMap<TransItem<'tcx>, (usize, usize)>, index: FxHashMap<TransItem<'tcx>, (usize, usize)>,
targets: Vec<TransItem<'tcx>>, targets: Vec<TransItem<'tcx>>,
} }
@ -236,7 +236,7 @@ impl<'tcx> InliningMap<'tcx> {
fn new() -> InliningMap<'tcx> { fn new() -> InliningMap<'tcx> {
InliningMap { InliningMap {
index: FnvHashMap(), index: FxHashMap(),
targets: Vec::new(), targets: Vec::new(),
} }
} }
@ -269,7 +269,7 @@ impl<'tcx> InliningMap<'tcx> {
pub fn collect_crate_translation_items<'a, 'tcx>(scx: &SharedCrateContext<'a, 'tcx>, pub fn collect_crate_translation_items<'a, 'tcx>(scx: &SharedCrateContext<'a, 'tcx>,
mode: TransItemCollectionMode) mode: TransItemCollectionMode)
-> (FnvHashSet<TransItem<'tcx>>, -> (FxHashSet<TransItem<'tcx>>,
InliningMap<'tcx>) { InliningMap<'tcx>) {
// We are not tracking dependencies of this pass as it has to be re-executed // We are not tracking dependencies of this pass as it has to be re-executed
// every time no matter what. // every time no matter what.
@ -277,7 +277,7 @@ pub fn collect_crate_translation_items<'a, 'tcx>(scx: &SharedCrateContext<'a, 't
let roots = collect_roots(scx, mode); let roots = collect_roots(scx, mode);
debug!("Building translation item graph, beginning at roots"); debug!("Building translation item graph, beginning at roots");
let mut visited = FnvHashSet(); let mut visited = FxHashSet();
let mut recursion_depths = DefIdMap(); let mut recursion_depths = DefIdMap();
let mut inlining_map = InliningMap::new(); let mut inlining_map = InliningMap::new();
@ -318,7 +318,7 @@ fn collect_roots<'a, 'tcx>(scx: &SharedCrateContext<'a, 'tcx>,
// Collect all monomorphized translation items reachable from `starting_point` // Collect all monomorphized translation items reachable from `starting_point`
fn collect_items_rec<'a, 'tcx: 'a>(scx: &SharedCrateContext<'a, 'tcx>, fn collect_items_rec<'a, 'tcx: 'a>(scx: &SharedCrateContext<'a, 'tcx>,
starting_point: TransItem<'tcx>, starting_point: TransItem<'tcx>,
visited: &mut FnvHashSet<TransItem<'tcx>>, visited: &mut FxHashSet<TransItem<'tcx>>,
recursion_depths: &mut DefIdMap<usize>, recursion_depths: &mut DefIdMap<usize>,
inlining_map: &mut InliningMap<'tcx>) { inlining_map: &mut InliningMap<'tcx>) {
if !visited.insert(starting_point.clone()) { if !visited.insert(starting_point.clone()) {
@ -1179,9 +1179,9 @@ fn create_trans_items_for_default_impls<'a, 'tcx>(scx: &SharedCrateContext<'a, '
if let Some(trait_ref) = tcx.impl_trait_ref(impl_def_id) { if let Some(trait_ref) = tcx.impl_trait_ref(impl_def_id) {
let callee_substs = tcx.erase_regions(&trait_ref.substs); let callee_substs = tcx.erase_regions(&trait_ref.substs);
let overridden_methods: FnvHashSet<_> = items.iter() let overridden_methods: FxHashSet<_> = items.iter()
.map(|item| item.name) .map(|item| item.name)
.collect(); .collect();
for method in tcx.provided_trait_methods(trait_ref.def_id) { for method in tcx.provided_trait_methods(trait_ref.def_id) {
if overridden_methods.contains(&method.name) { if overridden_methods.contains(&method.name) {
continue; continue;

View file

@ -32,7 +32,7 @@ use session::config::NoDebugInfo;
use session::Session; use session::Session;
use session::config; use session::config;
use symbol_map::SymbolMap; use symbol_map::SymbolMap;
use util::nodemap::{NodeSet, DefIdMap, FnvHashMap, FnvHashSet}; use util::nodemap::{NodeSet, DefIdMap, FxHashMap, FxHashSet};
use std::ffi::{CStr, CString}; use std::ffi::{CStr, CString};
use std::cell::{Cell, RefCell}; use std::cell::{Cell, RefCell};
@ -52,7 +52,7 @@ pub struct Stats {
pub n_inlines: Cell<usize>, pub n_inlines: Cell<usize>,
pub n_closures: Cell<usize>, pub n_closures: Cell<usize>,
pub n_llvm_insns: Cell<usize>, pub n_llvm_insns: Cell<usize>,
pub llvm_insns: RefCell<FnvHashMap<String, usize>>, pub llvm_insns: RefCell<FxHashMap<String, usize>>,
// (ident, llvm-instructions) // (ident, llvm-instructions)
pub fn_stats: RefCell<Vec<(String, usize)> >, pub fn_stats: RefCell<Vec<(String, usize)> >,
} }
@ -74,7 +74,7 @@ pub struct SharedCrateContext<'a, 'tcx: 'a> {
use_dll_storage_attrs: bool, use_dll_storage_attrs: bool,
translation_items: RefCell<FnvHashSet<TransItem<'tcx>>>, translation_items: RefCell<FxHashSet<TransItem<'tcx>>>,
trait_cache: RefCell<DepTrackingMap<TraitSelectionCache<'tcx>>>, trait_cache: RefCell<DepTrackingMap<TraitSelectionCache<'tcx>>>,
project_cache: RefCell<DepTrackingMap<ProjectionCache<'tcx>>>, project_cache: RefCell<DepTrackingMap<ProjectionCache<'tcx>>>,
} }
@ -89,15 +89,15 @@ pub struct LocalCrateContext<'tcx> {
previous_work_product: Option<WorkProduct>, previous_work_product: Option<WorkProduct>,
tn: TypeNames, // FIXME: This seems to be largely unused. tn: TypeNames, // FIXME: This seems to be largely unused.
codegen_unit: CodegenUnit<'tcx>, codegen_unit: CodegenUnit<'tcx>,
needs_unwind_cleanup_cache: RefCell<FnvHashMap<Ty<'tcx>, bool>>, needs_unwind_cleanup_cache: RefCell<FxHashMap<Ty<'tcx>, bool>>,
fn_pointer_shims: RefCell<FnvHashMap<Ty<'tcx>, ValueRef>>, fn_pointer_shims: RefCell<FxHashMap<Ty<'tcx>, ValueRef>>,
drop_glues: RefCell<FnvHashMap<DropGlueKind<'tcx>, (ValueRef, FnType)>>, drop_glues: RefCell<FxHashMap<DropGlueKind<'tcx>, (ValueRef, FnType)>>,
/// Cache instances of monomorphic and polymorphic items /// Cache instances of monomorphic and polymorphic items
instances: RefCell<FnvHashMap<Instance<'tcx>, ValueRef>>, instances: RefCell<FxHashMap<Instance<'tcx>, ValueRef>>,
/// Cache generated vtables /// Cache generated vtables
vtables: RefCell<FnvHashMap<ty::PolyTraitRef<'tcx>, ValueRef>>, vtables: RefCell<FxHashMap<ty::PolyTraitRef<'tcx>, ValueRef>>,
/// Cache of constant strings, /// Cache of constant strings,
const_cstr_cache: RefCell<FnvHashMap<InternedString, ValueRef>>, const_cstr_cache: RefCell<FxHashMap<InternedString, ValueRef>>,
/// Reverse-direction for const ptrs cast from globals. /// Reverse-direction for const ptrs cast from globals.
/// Key is a ValueRef holding a *T, /// Key is a ValueRef holding a *T,
@ -107,24 +107,24 @@ pub struct LocalCrateContext<'tcx> {
/// when we ptrcast, and we have to ptrcast during translation /// when we ptrcast, and we have to ptrcast during translation
/// of a [T] const because we form a slice, a (*T,usize) pair, not /// of a [T] const because we form a slice, a (*T,usize) pair, not
/// a pointer to an LLVM array type. Similar for trait objects. /// a pointer to an LLVM array type. Similar for trait objects.
const_unsized: RefCell<FnvHashMap<ValueRef, ValueRef>>, const_unsized: RefCell<FxHashMap<ValueRef, ValueRef>>,
/// Cache of emitted const globals (value -> global) /// Cache of emitted const globals (value -> global)
const_globals: RefCell<FnvHashMap<ValueRef, ValueRef>>, const_globals: RefCell<FxHashMap<ValueRef, ValueRef>>,
/// Cache of emitted const values /// Cache of emitted const values
const_values: RefCell<FnvHashMap<(ast::NodeId, &'tcx Substs<'tcx>), ValueRef>>, const_values: RefCell<FxHashMap<(ast::NodeId, &'tcx Substs<'tcx>), ValueRef>>,
/// Cache of external const values /// Cache of external const values
extern_const_values: RefCell<DefIdMap<ValueRef>>, extern_const_values: RefCell<DefIdMap<ValueRef>>,
/// Mapping from static definitions to their DefId's. /// Mapping from static definitions to their DefId's.
statics: RefCell<FnvHashMap<ValueRef, DefId>>, statics: RefCell<FxHashMap<ValueRef, DefId>>,
impl_method_cache: RefCell<FnvHashMap<(DefId, ast::Name), DefId>>, impl_method_cache: RefCell<FxHashMap<(DefId, ast::Name), DefId>>,
/// Cache of closure wrappers for bare fn's. /// Cache of closure wrappers for bare fn's.
closure_bare_wrapper_cache: RefCell<FnvHashMap<ValueRef, ValueRef>>, closure_bare_wrapper_cache: RefCell<FxHashMap<ValueRef, ValueRef>>,
/// List of globals for static variables which need to be passed to the /// List of globals for static variables which need to be passed to the
/// LLVM function ReplaceAllUsesWith (RAUW) when translation is complete. /// LLVM function ReplaceAllUsesWith (RAUW) when translation is complete.
@ -132,15 +132,15 @@ pub struct LocalCrateContext<'tcx> {
/// to constants.) /// to constants.)
statics_to_rauw: RefCell<Vec<(ValueRef, ValueRef)>>, statics_to_rauw: RefCell<Vec<(ValueRef, ValueRef)>>,
lltypes: RefCell<FnvHashMap<Ty<'tcx>, Type>>, lltypes: RefCell<FxHashMap<Ty<'tcx>, Type>>,
llsizingtypes: RefCell<FnvHashMap<Ty<'tcx>, Type>>, llsizingtypes: RefCell<FxHashMap<Ty<'tcx>, Type>>,
type_hashcodes: RefCell<FnvHashMap<Ty<'tcx>, String>>, type_hashcodes: RefCell<FxHashMap<Ty<'tcx>, String>>,
int_type: Type, int_type: Type,
opaque_vec_type: Type, opaque_vec_type: Type,
builder: BuilderRef_res, builder: BuilderRef_res,
/// Holds the LLVM values for closure IDs. /// Holds the LLVM values for closure IDs.
closure_vals: RefCell<FnvHashMap<Instance<'tcx>, ValueRef>>, closure_vals: RefCell<FxHashMap<Instance<'tcx>, ValueRef>>,
dbg_cx: Option<debuginfo::CrateDebugContext<'tcx>>, dbg_cx: Option<debuginfo::CrateDebugContext<'tcx>>,
@ -148,7 +148,7 @@ pub struct LocalCrateContext<'tcx> {
eh_unwind_resume: Cell<Option<ValueRef>>, eh_unwind_resume: Cell<Option<ValueRef>>,
rust_try_fn: Cell<Option<ValueRef>>, rust_try_fn: Cell<Option<ValueRef>>,
intrinsics: RefCell<FnvHashMap<&'static str, ValueRef>>, intrinsics: RefCell<FxHashMap<&'static str, ValueRef>>,
/// Number of LLVM instructions translated into this `LocalCrateContext`. /// Number of LLVM instructions translated into this `LocalCrateContext`.
/// This is used to perform some basic load-balancing to keep all LLVM /// This is used to perform some basic load-balancing to keep all LLVM
@ -502,12 +502,12 @@ impl<'b, 'tcx> SharedCrateContext<'b, 'tcx> {
n_inlines: Cell::new(0), n_inlines: Cell::new(0),
n_closures: Cell::new(0), n_closures: Cell::new(0),
n_llvm_insns: Cell::new(0), n_llvm_insns: Cell::new(0),
llvm_insns: RefCell::new(FnvHashMap()), llvm_insns: RefCell::new(FxHashMap()),
fn_stats: RefCell::new(Vec::new()), fn_stats: RefCell::new(Vec::new()),
}, },
check_overflow: check_overflow, check_overflow: check_overflow,
use_dll_storage_attrs: use_dll_storage_attrs, use_dll_storage_attrs: use_dll_storage_attrs,
translation_items: RefCell::new(FnvHashSet()), translation_items: RefCell::new(FxHashSet()),
trait_cache: RefCell::new(DepTrackingMap::new(tcx.dep_graph.clone())), trait_cache: RefCell::new(DepTrackingMap::new(tcx.dep_graph.clone())),
project_cache: RefCell::new(DepTrackingMap::new(tcx.dep_graph.clone())), project_cache: RefCell::new(DepTrackingMap::new(tcx.dep_graph.clone())),
} }
@ -557,7 +557,7 @@ impl<'b, 'tcx> SharedCrateContext<'b, 'tcx> {
self.use_dll_storage_attrs self.use_dll_storage_attrs
} }
pub fn translation_items(&self) -> &RefCell<FnvHashSet<TransItem<'tcx>>> { pub fn translation_items(&self) -> &RefCell<FxHashSet<TransItem<'tcx>>> {
&self.translation_items &self.translation_items
} }
@ -612,32 +612,32 @@ impl<'tcx> LocalCrateContext<'tcx> {
previous_work_product: previous_work_product, previous_work_product: previous_work_product,
codegen_unit: codegen_unit, codegen_unit: codegen_unit,
tn: TypeNames::new(), tn: TypeNames::new(),
needs_unwind_cleanup_cache: RefCell::new(FnvHashMap()), needs_unwind_cleanup_cache: RefCell::new(FxHashMap()),
fn_pointer_shims: RefCell::new(FnvHashMap()), fn_pointer_shims: RefCell::new(FxHashMap()),
drop_glues: RefCell::new(FnvHashMap()), drop_glues: RefCell::new(FxHashMap()),
instances: RefCell::new(FnvHashMap()), instances: RefCell::new(FxHashMap()),
vtables: RefCell::new(FnvHashMap()), vtables: RefCell::new(FxHashMap()),
const_cstr_cache: RefCell::new(FnvHashMap()), const_cstr_cache: RefCell::new(FxHashMap()),
const_unsized: RefCell::new(FnvHashMap()), const_unsized: RefCell::new(FxHashMap()),
const_globals: RefCell::new(FnvHashMap()), const_globals: RefCell::new(FxHashMap()),
const_values: RefCell::new(FnvHashMap()), const_values: RefCell::new(FxHashMap()),
extern_const_values: RefCell::new(DefIdMap()), extern_const_values: RefCell::new(DefIdMap()),
statics: RefCell::new(FnvHashMap()), statics: RefCell::new(FxHashMap()),
impl_method_cache: RefCell::new(FnvHashMap()), impl_method_cache: RefCell::new(FxHashMap()),
closure_bare_wrapper_cache: RefCell::new(FnvHashMap()), closure_bare_wrapper_cache: RefCell::new(FxHashMap()),
statics_to_rauw: RefCell::new(Vec::new()), statics_to_rauw: RefCell::new(Vec::new()),
lltypes: RefCell::new(FnvHashMap()), lltypes: RefCell::new(FxHashMap()),
llsizingtypes: RefCell::new(FnvHashMap()), llsizingtypes: RefCell::new(FxHashMap()),
type_hashcodes: RefCell::new(FnvHashMap()), type_hashcodes: RefCell::new(FxHashMap()),
int_type: Type::from_ref(ptr::null_mut()), int_type: Type::from_ref(ptr::null_mut()),
opaque_vec_type: Type::from_ref(ptr::null_mut()), opaque_vec_type: Type::from_ref(ptr::null_mut()),
builder: BuilderRef_res(llvm::LLVMCreateBuilderInContext(llcx)), builder: BuilderRef_res(llvm::LLVMCreateBuilderInContext(llcx)),
closure_vals: RefCell::new(FnvHashMap()), closure_vals: RefCell::new(FxHashMap()),
dbg_cx: dbg_cx, dbg_cx: dbg_cx,
eh_personality: Cell::new(None), eh_personality: Cell::new(None),
eh_unwind_resume: Cell::new(None), eh_unwind_resume: Cell::new(None),
rust_try_fn: Cell::new(None), rust_try_fn: Cell::new(None),
intrinsics: RefCell::new(FnvHashMap()), intrinsics: RefCell::new(FxHashMap()),
n_llvm_insns: Cell::new(0), n_llvm_insns: Cell::new(0),
type_of_depth: Cell::new(0), type_of_depth: Cell::new(0),
symbol_map: symbol_map, symbol_map: symbol_map,
@ -794,16 +794,16 @@ impl<'b, 'tcx> CrateContext<'b, 'tcx> {
&self.shared.link_meta &self.shared.link_meta
} }
pub fn needs_unwind_cleanup_cache(&self) -> &RefCell<FnvHashMap<Ty<'tcx>, bool>> { pub fn needs_unwind_cleanup_cache(&self) -> &RefCell<FxHashMap<Ty<'tcx>, bool>> {
&self.local().needs_unwind_cleanup_cache &self.local().needs_unwind_cleanup_cache
} }
pub fn fn_pointer_shims(&self) -> &RefCell<FnvHashMap<Ty<'tcx>, ValueRef>> { pub fn fn_pointer_shims(&self) -> &RefCell<FxHashMap<Ty<'tcx>, ValueRef>> {
&self.local().fn_pointer_shims &self.local().fn_pointer_shims
} }
pub fn drop_glues<'a>(&'a self) pub fn drop_glues<'a>(&'a self)
-> &'a RefCell<FnvHashMap<DropGlueKind<'tcx>, (ValueRef, FnType)>> { -> &'a RefCell<FxHashMap<DropGlueKind<'tcx>, (ValueRef, FnType)>> {
&self.local().drop_glues &self.local().drop_glues
} }
@ -815,28 +815,28 @@ impl<'b, 'tcx> CrateContext<'b, 'tcx> {
self.sess().cstore.defid_for_inlined_node(node_id) self.sess().cstore.defid_for_inlined_node(node_id)
} }
pub fn instances<'a>(&'a self) -> &'a RefCell<FnvHashMap<Instance<'tcx>, ValueRef>> { pub fn instances<'a>(&'a self) -> &'a RefCell<FxHashMap<Instance<'tcx>, ValueRef>> {
&self.local().instances &self.local().instances
} }
pub fn vtables<'a>(&'a self) -> &'a RefCell<FnvHashMap<ty::PolyTraitRef<'tcx>, ValueRef>> { pub fn vtables<'a>(&'a self) -> &'a RefCell<FxHashMap<ty::PolyTraitRef<'tcx>, ValueRef>> {
&self.local().vtables &self.local().vtables
} }
pub fn const_cstr_cache<'a>(&'a self) -> &'a RefCell<FnvHashMap<InternedString, ValueRef>> { pub fn const_cstr_cache<'a>(&'a self) -> &'a RefCell<FxHashMap<InternedString, ValueRef>> {
&self.local().const_cstr_cache &self.local().const_cstr_cache
} }
pub fn const_unsized<'a>(&'a self) -> &'a RefCell<FnvHashMap<ValueRef, ValueRef>> { pub fn const_unsized<'a>(&'a self) -> &'a RefCell<FxHashMap<ValueRef, ValueRef>> {
&self.local().const_unsized &self.local().const_unsized
} }
pub fn const_globals<'a>(&'a self) -> &'a RefCell<FnvHashMap<ValueRef, ValueRef>> { pub fn const_globals<'a>(&'a self) -> &'a RefCell<FxHashMap<ValueRef, ValueRef>> {
&self.local().const_globals &self.local().const_globals
} }
pub fn const_values<'a>(&'a self) -> &'a RefCell<FnvHashMap<(ast::NodeId, &'tcx Substs<'tcx>), pub fn const_values<'a>(&'a self) -> &'a RefCell<FxHashMap<(ast::NodeId, &'tcx Substs<'tcx>),
ValueRef>> { ValueRef>> {
&self.local().const_values &self.local().const_values
} }
@ -844,16 +844,16 @@ impl<'b, 'tcx> CrateContext<'b, 'tcx> {
&self.local().extern_const_values &self.local().extern_const_values
} }
pub fn statics<'a>(&'a self) -> &'a RefCell<FnvHashMap<ValueRef, DefId>> { pub fn statics<'a>(&'a self) -> &'a RefCell<FxHashMap<ValueRef, DefId>> {
&self.local().statics &self.local().statics
} }
pub fn impl_method_cache<'a>(&'a self) pub fn impl_method_cache<'a>(&'a self)
-> &'a RefCell<FnvHashMap<(DefId, ast::Name), DefId>> { -> &'a RefCell<FxHashMap<(DefId, ast::Name), DefId>> {
&self.local().impl_method_cache &self.local().impl_method_cache
} }
pub fn closure_bare_wrapper_cache<'a>(&'a self) -> &'a RefCell<FnvHashMap<ValueRef, ValueRef>> { pub fn closure_bare_wrapper_cache<'a>(&'a self) -> &'a RefCell<FxHashMap<ValueRef, ValueRef>> {
&self.local().closure_bare_wrapper_cache &self.local().closure_bare_wrapper_cache
} }
@ -861,15 +861,15 @@ impl<'b, 'tcx> CrateContext<'b, 'tcx> {
&self.local().statics_to_rauw &self.local().statics_to_rauw
} }
pub fn lltypes<'a>(&'a self) -> &'a RefCell<FnvHashMap<Ty<'tcx>, Type>> { pub fn lltypes<'a>(&'a self) -> &'a RefCell<FxHashMap<Ty<'tcx>, Type>> {
&self.local().lltypes &self.local().lltypes
} }
pub fn llsizingtypes<'a>(&'a self) -> &'a RefCell<FnvHashMap<Ty<'tcx>, Type>> { pub fn llsizingtypes<'a>(&'a self) -> &'a RefCell<FxHashMap<Ty<'tcx>, Type>> {
&self.local().llsizingtypes &self.local().llsizingtypes
} }
pub fn type_hashcodes<'a>(&'a self) -> &'a RefCell<FnvHashMap<Ty<'tcx>, String>> { pub fn type_hashcodes<'a>(&'a self) -> &'a RefCell<FxHashMap<Ty<'tcx>, String>> {
&self.local().type_hashcodes &self.local().type_hashcodes
} }
@ -885,7 +885,7 @@ impl<'b, 'tcx> CrateContext<'b, 'tcx> {
self.local().opaque_vec_type self.local().opaque_vec_type
} }
pub fn closure_vals<'a>(&'a self) -> &'a RefCell<FnvHashMap<Instance<'tcx>, ValueRef>> { pub fn closure_vals<'a>(&'a self) -> &'a RefCell<FxHashMap<Instance<'tcx>, ValueRef>> {
&self.local().closure_vals &self.local().closure_vals
} }
@ -905,7 +905,7 @@ impl<'b, 'tcx> CrateContext<'b, 'tcx> {
&self.local().rust_try_fn &self.local().rust_try_fn
} }
fn intrinsics<'a>(&'a self) -> &'a RefCell<FnvHashMap<&'static str, ValueRef>> { fn intrinsics<'a>(&'a self) -> &'a RefCell<FxHashMap<&'static str, ValueRef>> {
&self.local().intrinsics &self.local().intrinsics
} }
@ -958,7 +958,7 @@ impl<'b, 'tcx> CrateContext<'b, 'tcx> {
&*self.local().symbol_map &*self.local().symbol_map
} }
pub fn translation_items(&self) -> &RefCell<FnvHashSet<TransItem<'tcx>>> { pub fn translation_items(&self) -> &RefCell<FxHashSet<TransItem<'tcx>>> {
&self.shared.translation_items &self.shared.translation_items
} }

View file

@ -36,7 +36,7 @@ use common::CrateContext;
use type_::Type; use type_::Type;
use rustc::ty::{self, AdtKind, Ty, layout}; use rustc::ty::{self, AdtKind, Ty, layout};
use session::config; use session::config;
use util::nodemap::FnvHashMap; use util::nodemap::FxHashMap;
use util::common::path2cstr; use util::common::path2cstr;
use libc::{c_uint, c_longlong}; use libc::{c_uint, c_longlong};
@ -84,20 +84,20 @@ pub struct TypeMap<'tcx> {
// The UniqueTypeIds created so far // The UniqueTypeIds created so far
unique_id_interner: Interner, unique_id_interner: Interner,
// A map from UniqueTypeId to debuginfo metadata for that type. This is a 1:1 mapping. // A map from UniqueTypeId to debuginfo metadata for that type. This is a 1:1 mapping.
unique_id_to_metadata: FnvHashMap<UniqueTypeId, DIType>, unique_id_to_metadata: FxHashMap<UniqueTypeId, DIType>,
// A map from types to debuginfo metadata. This is a N:1 mapping. // A map from types to debuginfo metadata. This is a N:1 mapping.
type_to_metadata: FnvHashMap<Ty<'tcx>, DIType>, type_to_metadata: FxHashMap<Ty<'tcx>, DIType>,
// A map from types to UniqueTypeId. This is a N:1 mapping. // A map from types to UniqueTypeId. This is a N:1 mapping.
type_to_unique_id: FnvHashMap<Ty<'tcx>, UniqueTypeId> type_to_unique_id: FxHashMap<Ty<'tcx>, UniqueTypeId>
} }
impl<'tcx> TypeMap<'tcx> { impl<'tcx> TypeMap<'tcx> {
pub fn new() -> TypeMap<'tcx> { pub fn new() -> TypeMap<'tcx> {
TypeMap { TypeMap {
unique_id_interner: Interner::new(), unique_id_interner: Interner::new(),
type_to_metadata: FnvHashMap(), type_to_metadata: FxHashMap(),
unique_id_to_metadata: FnvHashMap(), unique_id_to_metadata: FxHashMap(),
type_to_unique_id: FnvHashMap(), type_to_unique_id: FxHashMap(),
} }
} }

View file

@ -34,7 +34,7 @@ use monomorphize::{self, Instance};
use rustc::ty::{self, Ty}; use rustc::ty::{self, Ty};
use rustc::mir; use rustc::mir;
use session::config::{self, FullDebugInfo, LimitedDebugInfo, NoDebugInfo}; use session::config::{self, FullDebugInfo, LimitedDebugInfo, NoDebugInfo};
use util::nodemap::{DefIdMap, FnvHashMap, FnvHashSet}; use util::nodemap::{DefIdMap, FxHashMap, FxHashSet};
use libc::c_uint; use libc::c_uint;
use std::cell::{Cell, RefCell}; use std::cell::{Cell, RefCell};
@ -68,15 +68,15 @@ pub struct CrateDebugContext<'tcx> {
llcontext: ContextRef, llcontext: ContextRef,
builder: DIBuilderRef, builder: DIBuilderRef,
current_debug_location: Cell<InternalDebugLocation>, current_debug_location: Cell<InternalDebugLocation>,
created_files: RefCell<FnvHashMap<String, DIFile>>, created_files: RefCell<FxHashMap<String, DIFile>>,
created_enum_disr_types: RefCell<FnvHashMap<(DefId, layout::Integer), DIType>>, created_enum_disr_types: RefCell<FxHashMap<(DefId, layout::Integer), DIType>>,
type_map: RefCell<TypeMap<'tcx>>, type_map: RefCell<TypeMap<'tcx>>,
namespace_map: RefCell<DefIdMap<DIScope>>, namespace_map: RefCell<DefIdMap<DIScope>>,
// This collection is used to assert that composite types (structs, enums, // This collection is used to assert that composite types (structs, enums,
// ...) have their members only set once: // ...) have their members only set once:
composite_types_completed: RefCell<FnvHashSet<DIType>>, composite_types_completed: RefCell<FxHashSet<DIType>>,
} }
impl<'tcx> CrateDebugContext<'tcx> { impl<'tcx> CrateDebugContext<'tcx> {
@ -89,11 +89,11 @@ impl<'tcx> CrateDebugContext<'tcx> {
llcontext: llcontext, llcontext: llcontext,
builder: builder, builder: builder,
current_debug_location: Cell::new(InternalDebugLocation::UnknownLocation), current_debug_location: Cell::new(InternalDebugLocation::UnknownLocation),
created_files: RefCell::new(FnvHashMap()), created_files: RefCell::new(FxHashMap()),
created_enum_disr_types: RefCell::new(FnvHashMap()), created_enum_disr_types: RefCell::new(FxHashMap()),
type_map: RefCell::new(TypeMap::new()), type_map: RefCell::new(TypeMap::new()),
namespace_map: RefCell::new(DefIdMap()), namespace_map: RefCell::new(DefIdMap()),
composite_types_completed: RefCell::new(FnvHashSet()), composite_types_completed: RefCell::new(FxHashSet()),
}; };
} }
} }

View file

@ -29,7 +29,7 @@ use type_of;
use glue; use glue;
use type_::Type; use type_::Type;
use rustc_data_structures::fnv::FnvHashMap; use rustc_data_structures::fx::FxHashMap;
use syntax::parse::token; use syntax::parse::token;
use super::{MirContext, LocalRef}; use super::{MirContext, LocalRef};
@ -144,7 +144,7 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> {
adt::trans_get_discr(bcx, ty, discr_lvalue.llval, None, true) adt::trans_get_discr(bcx, ty, discr_lvalue.llval, None, true)
); );
let mut bb_hist = FnvHashMap(); let mut bb_hist = FxHashMap();
for target in targets { for target in targets {
*bb_hist.entry(target).or_insert(0) += 1; *bb_hist.entry(target).or_insert(0) += 1;
} }

View file

@ -134,7 +134,7 @@ use symbol_map::SymbolMap;
use syntax::ast::NodeId; use syntax::ast::NodeId;
use syntax::parse::token::{self, InternedString}; use syntax::parse::token::{self, InternedString};
use trans_item::TransItem; use trans_item::TransItem;
use util::nodemap::{FnvHashMap, FnvHashSet}; use util::nodemap::{FxHashMap, FxHashSet};
pub enum PartitioningStrategy { pub enum PartitioningStrategy {
/// Generate one codegen unit per source-level module. /// Generate one codegen unit per source-level module.
@ -151,12 +151,12 @@ pub struct CodegenUnit<'tcx> {
/// as well as the crate name and disambiguator. /// as well as the crate name and disambiguator.
name: InternedString, name: InternedString,
items: FnvHashMap<TransItem<'tcx>, llvm::Linkage>, items: FxHashMap<TransItem<'tcx>, llvm::Linkage>,
} }
impl<'tcx> CodegenUnit<'tcx> { impl<'tcx> CodegenUnit<'tcx> {
pub fn new(name: InternedString, pub fn new(name: InternedString,
items: FnvHashMap<TransItem<'tcx>, llvm::Linkage>) items: FxHashMap<TransItem<'tcx>, llvm::Linkage>)
-> Self { -> Self {
CodegenUnit { CodegenUnit {
name: name, name: name,
@ -165,7 +165,7 @@ impl<'tcx> CodegenUnit<'tcx> {
} }
pub fn empty(name: InternedString) -> Self { pub fn empty(name: InternedString) -> Self {
Self::new(name, FnvHashMap()) Self::new(name, FxHashMap())
} }
pub fn contains_item(&self, item: &TransItem<'tcx>) -> bool { pub fn contains_item(&self, item: &TransItem<'tcx>) -> bool {
@ -176,7 +176,7 @@ impl<'tcx> CodegenUnit<'tcx> {
&self.name &self.name
} }
pub fn items(&self) -> &FnvHashMap<TransItem<'tcx>, llvm::Linkage> { pub fn items(&self) -> &FxHashMap<TransItem<'tcx>, llvm::Linkage> {
&self.items &self.items
} }
@ -297,7 +297,7 @@ pub fn partition<'a, 'tcx, I>(scx: &SharedCrateContext<'a, 'tcx>,
struct PreInliningPartitioning<'tcx> { struct PreInliningPartitioning<'tcx> {
codegen_units: Vec<CodegenUnit<'tcx>>, codegen_units: Vec<CodegenUnit<'tcx>>,
roots: FnvHashSet<TransItem<'tcx>>, roots: FxHashSet<TransItem<'tcx>>,
} }
struct PostInliningPartitioning<'tcx>(Vec<CodegenUnit<'tcx>>); struct PostInliningPartitioning<'tcx>(Vec<CodegenUnit<'tcx>>);
@ -308,8 +308,8 @@ fn place_root_translation_items<'a, 'tcx, I>(scx: &SharedCrateContext<'a, 'tcx>,
where I: Iterator<Item = TransItem<'tcx>> where I: Iterator<Item = TransItem<'tcx>>
{ {
let tcx = scx.tcx(); let tcx = scx.tcx();
let mut roots = FnvHashSet(); let mut roots = FxHashSet();
let mut codegen_units = FnvHashMap(); let mut codegen_units = FxHashMap();
for trans_item in trans_items { for trans_item in trans_items {
let is_root = !trans_item.is_instantiated_only_on_demand(tcx); let is_root = !trans_item.is_instantiated_only_on_demand(tcx);
@ -419,7 +419,7 @@ fn place_inlined_translation_items<'tcx>(initial_partitioning: PreInliningPartit
for codegen_unit in &initial_partitioning.codegen_units[..] { for codegen_unit in &initial_partitioning.codegen_units[..] {
// Collect all items that need to be available in this codegen unit // Collect all items that need to be available in this codegen unit
let mut reachable = FnvHashSet(); let mut reachable = FxHashSet();
for root in codegen_unit.items.keys() { for root in codegen_unit.items.keys() {
follow_inlining(*root, inlining_map, &mut reachable); follow_inlining(*root, inlining_map, &mut reachable);
} }
@ -465,7 +465,7 @@ fn place_inlined_translation_items<'tcx>(initial_partitioning: PreInliningPartit
fn follow_inlining<'tcx>(trans_item: TransItem<'tcx>, fn follow_inlining<'tcx>(trans_item: TransItem<'tcx>,
inlining_map: &InliningMap<'tcx>, inlining_map: &InliningMap<'tcx>,
visited: &mut FnvHashSet<TransItem<'tcx>>) { visited: &mut FxHashSet<TransItem<'tcx>>) {
if !visited.insert(trans_item) { if !visited.insert(trans_item) {
return; return;
} }

View file

@ -14,7 +14,7 @@ use rustc::ty::TyCtxt;
use std::borrow::Cow; use std::borrow::Cow;
use syntax::codemap::Span; use syntax::codemap::Span;
use trans_item::TransItem; use trans_item::TransItem;
use util::nodemap::FnvHashMap; use util::nodemap::FxHashMap;
// In the SymbolMap we collect the symbol names of all translation items of // In the SymbolMap we collect the symbol names of all translation items of
// the current crate. This map exists as a performance optimization. Symbol // the current crate. This map exists as a performance optimization. Symbol
@ -22,7 +22,7 @@ use util::nodemap::FnvHashMap;
// Thus they could also always be recomputed if needed. // Thus they could also always be recomputed if needed.
pub struct SymbolMap<'tcx> { pub struct SymbolMap<'tcx> {
index: FnvHashMap<TransItem<'tcx>, (usize, usize)>, index: FxHashMap<TransItem<'tcx>, (usize, usize)>,
arena: String, arena: String,
} }
@ -78,7 +78,7 @@ impl<'tcx> SymbolMap<'tcx> {
} }
let mut symbol_map = SymbolMap { let mut symbol_map = SymbolMap {
index: FnvHashMap(), index: FxHashMap(),
arena: String::with_capacity(1024), arena: String::with_capacity(1024),
}; };

View file

@ -15,7 +15,7 @@ use llvm::{TypeRef, Bool, False, True, TypeKind};
use llvm::{Float, Double, X86_FP80, PPC_FP128, FP128}; use llvm::{Float, Double, X86_FP80, PPC_FP128, FP128};
use context::CrateContext; use context::CrateContext;
use util::nodemap::FnvHashMap; use util::nodemap::FxHashMap;
use syntax::ast; use syntax::ast;
use rustc::ty::layout; use rustc::ty::layout;
@ -325,13 +325,13 @@ impl Type {
/* Memory-managed object interface to type handles. */ /* Memory-managed object interface to type handles. */
pub struct TypeNames { pub struct TypeNames {
named_types: RefCell<FnvHashMap<String, TypeRef>>, named_types: RefCell<FxHashMap<String, TypeRef>>,
} }
impl TypeNames { impl TypeNames {
pub fn new() -> TypeNames { pub fn new() -> TypeNames {
TypeNames { TypeNames {
named_types: RefCell::new(FnvHashMap()) named_types: RefCell::new(FxHashMap())
} }
} }

View file

@ -66,7 +66,7 @@ use rscope::{self, UnelidableRscope, RegionScope, ElidableRscope,
ElisionFailureInfo, ElidedLifetime}; ElisionFailureInfo, ElidedLifetime};
use rscope::{AnonTypeScope, MaybeWithAnonTypes}; use rscope::{AnonTypeScope, MaybeWithAnonTypes};
use util::common::{ErrorReported, FN_OUTPUT_NAME}; use util::common::{ErrorReported, FN_OUTPUT_NAME};
use util::nodemap::{NodeMap, FnvHashSet}; use util::nodemap::{NodeMap, FxHashSet};
use std::cell::RefCell; use std::cell::RefCell;
use syntax::{abi, ast}; use syntax::{abi, ast};
@ -569,7 +569,7 @@ impl<'o, 'gcx: 'tcx, 'tcx> AstConv<'gcx, 'tcx>+'o {
let mut possible_implied_output_region = None; let mut possible_implied_output_region = None;
for input_type in input_tys.iter() { for input_type in input_tys.iter() {
let mut regions = FnvHashSet(); let mut regions = FxHashSet();
let have_bound_regions = tcx.collect_regions(input_type, &mut regions); let have_bound_regions = tcx.collect_regions(input_type, &mut regions);
debug!("find_implied_output_regions: collected {:?} from {:?} \ debug!("find_implied_output_regions: collected {:?} from {:?} \
@ -1142,7 +1142,7 @@ impl<'o, 'gcx: 'tcx, 'tcx> AstConv<'gcx, 'tcx>+'o {
return tcx.types.err; return tcx.types.err;
} }
let mut associated_types = FnvHashSet::default(); let mut associated_types = FxHashSet::default();
for tr in traits::supertraits(tcx, principal) { for tr in traits::supertraits(tcx, principal) {
if let Some(trait_id) = tcx.map.as_local_node_id(tr.def_id()) { if let Some(trait_id) = tcx.map.as_local_node_id(tr.def_id()) {
use collect::trait_associated_type_names; use collect::trait_associated_type_names;

View file

@ -14,7 +14,7 @@ use rustc::hir::pat_util::EnumerateAndAdjustIterator;
use rustc::infer::{self, InferOk, TypeOrigin}; use rustc::infer::{self, InferOk, TypeOrigin};
use rustc::ty::{self, Ty, TypeFoldable, LvaluePreference}; use rustc::ty::{self, Ty, TypeFoldable, LvaluePreference};
use check::{FnCtxt, Expectation}; use check::{FnCtxt, Expectation};
use util::nodemap::FnvHashMap; use util::nodemap::FxHashMap;
use std::collections::hash_map::Entry::{Occupied, Vacant}; use std::collections::hash_map::Entry::{Occupied, Vacant};
use std::cmp; use std::cmp;
@ -633,10 +633,10 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> {
let field_map = variant.fields let field_map = variant.fields
.iter() .iter()
.map(|field| (field.name, field)) .map(|field| (field.name, field))
.collect::<FnvHashMap<_, _>>(); .collect::<FxHashMap<_, _>>();
// Keep track of which fields have already appeared in the pattern. // Keep track of which fields have already appeared in the pattern.
let mut used_fields = FnvHashMap(); let mut used_fields = FxHashMap();
// Typecheck each field. // Typecheck each field.
for &Spanned { node: ref field, span } in fields { for &Spanned { node: ref field, span } in fields {

View file

@ -18,7 +18,7 @@ use middle::region;
use rustc::ty::subst::{Subst, Substs}; use rustc::ty::subst::{Subst, Substs};
use rustc::ty::{self, AdtKind, Ty, TyCtxt}; use rustc::ty::{self, AdtKind, Ty, TyCtxt};
use rustc::traits::{self, Reveal}; use rustc::traits::{self, Reveal};
use util::nodemap::FnvHashSet; use util::nodemap::FxHashSet;
use syntax::ast; use syntax::ast;
use syntax_pos::{self, Span}; use syntax_pos::{self, Span};
@ -289,7 +289,7 @@ pub fn check_safety_of_destructor_if_necessary<'a, 'gcx, 'tcx>(
rcx: rcx, rcx: rcx,
span: span, span: span,
parent_scope: parent_scope, parent_scope: parent_scope,
breadcrumbs: FnvHashSet() breadcrumbs: FxHashSet()
}, },
TypeContext::Root, TypeContext::Root,
typ, typ,
@ -347,7 +347,7 @@ enum TypeContext {
struct DropckContext<'a, 'b: 'a, 'gcx: 'b+'tcx, 'tcx: 'b> { struct DropckContext<'a, 'b: 'a, 'gcx: 'b+'tcx, 'tcx: 'b> {
rcx: &'a mut RegionCtxt<'b, 'gcx, 'tcx>, rcx: &'a mut RegionCtxt<'b, 'gcx, 'tcx>,
/// types that have already been traversed /// types that have already been traversed
breadcrumbs: FnvHashSet<Ty<'tcx>>, breadcrumbs: FxHashSet<Ty<'tcx>>,
/// span for error reporting /// span for error reporting
span: Span, span: Span,
/// the scope reachable dtorck types must outlive /// the scope reachable dtorck types must outlive

View file

@ -16,7 +16,7 @@ use rustc::infer::TypeOrigin;
use rustc::ty::subst::Substs; use rustc::ty::subst::Substs;
use rustc::ty::FnSig; use rustc::ty::FnSig;
use rustc::ty::{self, Ty}; use rustc::ty::{self, Ty};
use rustc::util::nodemap::FnvHashMap; use rustc::util::nodemap::FxHashMap;
use {CrateCtxt, require_same_types}; use {CrateCtxt, require_same_types};
use syntax::abi::Abi; use syntax::abi::Abi;
@ -372,7 +372,7 @@ pub fn check_platform_intrinsic_type(ccx: &CrateCtxt,
return return
} }
let mut structural_to_nomimal = FnvHashMap(); let mut structural_to_nomimal = FxHashMap();
let sig = tcx.no_late_bound_regions(i_ty.ty.fn_sig()).unwrap(); let sig = tcx.no_late_bound_regions(i_ty.ty.fn_sig()).unwrap();
if intr.inputs.len() != sig.inputs.len() { if intr.inputs.len() != sig.inputs.len() {
@ -412,7 +412,7 @@ fn match_intrinsic_type_to_type<'tcx, 'a>(
ccx: &CrateCtxt<'a, 'tcx>, ccx: &CrateCtxt<'a, 'tcx>,
position: &str, position: &str,
span: Span, span: Span,
structural_to_nominal: &mut FnvHashMap<&'a intrinsics::Type, ty::Ty<'tcx>>, structural_to_nominal: &mut FxHashMap<&'a intrinsics::Type, ty::Ty<'tcx>>,
expected: &'a intrinsics::Type, t: ty::Ty<'tcx>) expected: &'a intrinsics::Type, t: ty::Ty<'tcx>)
{ {
use intrinsics::Type::*; use intrinsics::Type::*;

View file

@ -20,7 +20,7 @@ use rustc::ty::subst::{Subst, Substs};
use rustc::traits; use rustc::traits;
use rustc::ty::{self, Ty, ToPolyTraitRef, TraitRef, TypeFoldable}; use rustc::ty::{self, Ty, ToPolyTraitRef, TraitRef, TypeFoldable};
use rustc::infer::{InferOk, TypeOrigin}; use rustc::infer::{InferOk, TypeOrigin};
use rustc::util::nodemap::FnvHashSet; use rustc::util::nodemap::FxHashSet;
use syntax::ast; use syntax::ast;
use syntax_pos::{Span, DUMMY_SP}; use syntax_pos::{Span, DUMMY_SP};
use rustc::hir; use rustc::hir;
@ -40,7 +40,7 @@ struct ProbeContext<'a, 'gcx: 'a + 'tcx, 'tcx: 'a> {
opt_simplified_steps: Option<Vec<ty::fast_reject::SimplifiedType>>, opt_simplified_steps: Option<Vec<ty::fast_reject::SimplifiedType>>,
inherent_candidates: Vec<Candidate<'tcx>>, inherent_candidates: Vec<Candidate<'tcx>>,
extension_candidates: Vec<Candidate<'tcx>>, extension_candidates: Vec<Candidate<'tcx>>,
impl_dups: FnvHashSet<DefId>, impl_dups: FxHashSet<DefId>,
import_id: Option<ast::NodeId>, import_id: Option<ast::NodeId>,
/// Collects near misses when the candidate functions are missing a `self` keyword and is only /// Collects near misses when the candidate functions are missing a `self` keyword and is only
@ -263,7 +263,7 @@ impl<'a, 'gcx, 'tcx> ProbeContext<'a, 'gcx, 'tcx> {
item_name: item_name, item_name: item_name,
inherent_candidates: Vec::new(), inherent_candidates: Vec::new(),
extension_candidates: Vec::new(), extension_candidates: Vec::new(),
impl_dups: FnvHashSet(), impl_dups: FxHashSet(),
import_id: None, import_id: None,
steps: Rc::new(steps), steps: Rc::new(steps),
opt_simplified_steps: opt_simplified_steps, opt_simplified_steps: opt_simplified_steps,
@ -568,7 +568,7 @@ impl<'a, 'gcx, 'tcx> ProbeContext<'a, 'gcx, 'tcx> {
fn assemble_extension_candidates_for_traits_in_scope(&mut self, fn assemble_extension_candidates_for_traits_in_scope(&mut self,
expr_id: ast::NodeId) expr_id: ast::NodeId)
-> Result<(), MethodError<'tcx>> { -> Result<(), MethodError<'tcx>> {
let mut duplicates = FnvHashSet(); let mut duplicates = FxHashSet();
let opt_applicable_traits = self.tcx.trait_map.get(&expr_id); let opt_applicable_traits = self.tcx.trait_map.get(&expr_id);
if let Some(applicable_traits) = opt_applicable_traits { if let Some(applicable_traits) = opt_applicable_traits {
for trait_candidate in applicable_traits { for trait_candidate in applicable_traits {
@ -585,7 +585,7 @@ impl<'a, 'gcx, 'tcx> ProbeContext<'a, 'gcx, 'tcx> {
} }
fn assemble_extension_candidates_for_all_traits(&mut self) -> Result<(), MethodError<'tcx>> { fn assemble_extension_candidates_for_all_traits(&mut self) -> Result<(), MethodError<'tcx>> {
let mut duplicates = FnvHashSet(); let mut duplicates = FxHashSet();
for trait_info in suggest::all_traits(self.ccx) { for trait_info in suggest::all_traits(self.ccx) {
if duplicates.insert(trait_info.def_id) { if duplicates.insert(trait_info.def_id) {
self.assemble_extension_candidates_for_trait(trait_info.def_id)?; self.assemble_extension_candidates_for_trait(trait_info.def_id)?;

View file

@ -20,7 +20,7 @@ use hir::def::Def;
use hir::def_id::{CRATE_DEF_INDEX, DefId}; use hir::def_id::{CRATE_DEF_INDEX, DefId};
use middle::lang_items::FnOnceTraitLangItem; use middle::lang_items::FnOnceTraitLangItem;
use rustc::traits::{Obligation, SelectionContext}; use rustc::traits::{Obligation, SelectionContext};
use util::nodemap::FnvHashSet; use util::nodemap::FxHashSet;
use syntax::ast; use syntax::ast;
use errors::DiagnosticBuilder; use errors::DiagnosticBuilder;
@ -470,10 +470,10 @@ pub fn all_traits<'a>(ccx: &'a CrateCtxt) -> AllTraits<'a> {
}); });
// Cross-crate: // Cross-crate:
let mut external_mods = FnvHashSet(); let mut external_mods = FxHashSet();
fn handle_external_def(ccx: &CrateCtxt, fn handle_external_def(ccx: &CrateCtxt,
traits: &mut AllTraitsVec, traits: &mut AllTraitsVec,
external_mods: &mut FnvHashSet<DefId>, external_mods: &mut FxHashSet<DefId>,
def: Def) { def: Def) {
let def_id = def.def_id(); let def_id = def.def_id();
match def { match def {

View file

@ -103,7 +103,7 @@ use CrateCtxt;
use TypeAndSubsts; use TypeAndSubsts;
use lint; use lint;
use util::common::{block_query, ErrorReported, indenter, loop_query}; use util::common::{block_query, ErrorReported, indenter, loop_query};
use util::nodemap::{DefIdMap, FnvHashMap, FnvHashSet, NodeMap}; use util::nodemap::{DefIdMap, FxHashMap, FxHashSet, NodeMap};
use std::cell::{Cell, Ref, RefCell}; use std::cell::{Cell, Ref, RefCell};
use std::mem::replace; use std::mem::replace;
@ -1975,13 +1975,13 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> {
// We must collect the defaults *before* we do any unification. Because we have // We must collect the defaults *before* we do any unification. Because we have
// directly attached defaults to the type variables any unification that occurs // directly attached defaults to the type variables any unification that occurs
// will erase defaults causing conflicting defaults to be completely ignored. // will erase defaults causing conflicting defaults to be completely ignored.
let default_map: FnvHashMap<_, _> = let default_map: FxHashMap<_, _> =
unsolved_variables unsolved_variables
.iter() .iter()
.filter_map(|t| self.default(t).map(|d| (t, d))) .filter_map(|t| self.default(t).map(|d| (t, d)))
.collect(); .collect();
let mut unbound_tyvars = FnvHashSet(); let mut unbound_tyvars = FxHashSet();
debug!("select_all_obligations_and_apply_defaults: defaults={:?}", default_map); debug!("select_all_obligations_and_apply_defaults: defaults={:?}", default_map);
@ -2129,8 +2129,8 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> {
// table then apply defaults until we find a conflict. That default must be the one // table then apply defaults until we find a conflict. That default must be the one
// that caused conflict earlier. // that caused conflict earlier.
fn find_conflicting_default(&self, fn find_conflicting_default(&self,
unbound_vars: &FnvHashSet<Ty<'tcx>>, unbound_vars: &FxHashSet<Ty<'tcx>>,
default_map: &FnvHashMap<&Ty<'tcx>, type_variable::Default<'tcx>>, default_map: &FxHashMap<&Ty<'tcx>, type_variable::Default<'tcx>>,
conflict: Ty<'tcx>) conflict: Ty<'tcx>)
-> Option<type_variable::Default<'tcx>> { -> Option<type_variable::Default<'tcx>> {
use rustc::ty::error::UnconstrainedNumeric::Neither; use rustc::ty::error::UnconstrainedNumeric::Neither;
@ -3123,12 +3123,12 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> {
_ => span_bug!(span, "non-ADT passed to check_expr_struct_fields") _ => span_bug!(span, "non-ADT passed to check_expr_struct_fields")
}; };
let mut remaining_fields = FnvHashMap(); let mut remaining_fields = FxHashMap();
for field in &variant.fields { for field in &variant.fields {
remaining_fields.insert(field.name, field); remaining_fields.insert(field.name, field);
} }
let mut seen_fields = FnvHashMap(); let mut seen_fields = FxHashMap();
let mut error_happened = false; let mut error_happened = false;

View file

@ -16,7 +16,7 @@ use middle::region::{CodeExtent};
use rustc::infer::TypeOrigin; use rustc::infer::TypeOrigin;
use rustc::traits; use rustc::traits;
use rustc::ty::{self, Ty, TyCtxt}; use rustc::ty::{self, Ty, TyCtxt};
use rustc::util::nodemap::{FnvHashSet, FnvHashMap}; use rustc::util::nodemap::{FxHashSet, FxHashMap};
use syntax::ast; use syntax::ast;
use syntax_pos::Span; use syntax_pos::Span;
@ -529,7 +529,7 @@ impl<'ccx, 'gcx> CheckTypeWellFormedVisitor<'ccx, 'gcx> {
assert_eq!(ty_predicates.parent, None); assert_eq!(ty_predicates.parent, None);
let variances = self.tcx().item_variances(item_def_id); let variances = self.tcx().item_variances(item_def_id);
let mut constrained_parameters: FnvHashSet<_> = let mut constrained_parameters: FxHashSet<_> =
variances.iter().enumerate() variances.iter().enumerate()
.filter(|&(_, &variance)| variance != ty::Bivariant) .filter(|&(_, &variance)| variance != ty::Bivariant)
.map(|(index, _)| Parameter(index as u32)) .map(|(index, _)| Parameter(index as u32))
@ -580,10 +580,10 @@ impl<'ccx, 'gcx> CheckTypeWellFormedVisitor<'ccx, 'gcx> {
fn reject_shadowing_type_parameters(tcx: TyCtxt, span: Span, generics: &ty::Generics) { fn reject_shadowing_type_parameters(tcx: TyCtxt, span: Span, generics: &ty::Generics) {
let parent = tcx.lookup_generics(generics.parent.unwrap()); let parent = tcx.lookup_generics(generics.parent.unwrap());
let impl_params: FnvHashMap<_, _> = parent.types let impl_params: FxHashMap<_, _> = parent.types
.iter() .iter()
.map(|tp| (tp.name, tp.def_id)) .map(|tp| (tp.name, tp.def_id))
.collect(); .collect();
for method_param in &generics.types { for method_param in &generics.types {
if impl_params.contains_key(&method_param.name) { if impl_params.contains_key(&method_param.name) {

View file

@ -72,7 +72,7 @@ use rustc::ty::util::IntTypeExt;
use rscope::*; use rscope::*;
use rustc::dep_graph::DepNode; use rustc::dep_graph::DepNode;
use util::common::{ErrorReported, MemoizationMap}; use util::common::{ErrorReported, MemoizationMap};
use util::nodemap::{NodeMap, FnvHashMap, FnvHashSet}; use util::nodemap::{NodeMap, FxHashMap, FxHashSet};
use {CrateCtxt, write_ty_to_tcx}; use {CrateCtxt, write_ty_to_tcx};
use rustc_const_math::ConstInt; use rustc_const_math::ConstInt;
@ -786,8 +786,8 @@ fn convert_item(ccx: &CrateCtxt, it: &hir::Item) {
// Convert all the associated consts. // Convert all the associated consts.
// Also, check if there are any duplicate associated items // Also, check if there are any duplicate associated items
let mut seen_type_items = FnvHashMap(); let mut seen_type_items = FxHashMap();
let mut seen_value_items = FnvHashMap(); let mut seen_value_items = FxHashMap();
for impl_item in impl_items { for impl_item in impl_items {
let seen_items = match impl_item.node { let seen_items = match impl_item.node {
@ -1038,7 +1038,7 @@ fn convert_struct_variant<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>,
disr_val: ty::Disr, disr_val: ty::Disr,
def: &hir::VariantData) def: &hir::VariantData)
-> ty::VariantDefData<'tcx, 'tcx> { -> ty::VariantDefData<'tcx, 'tcx> {
let mut seen_fields: FnvHashMap<ast::Name, Span> = FnvHashMap(); let mut seen_fields: FxHashMap<ast::Name, Span> = FxHashMap();
let node_id = ccx.tcx.map.as_local_node_id(did).unwrap(); let node_id = ccx.tcx.map.as_local_node_id(did).unwrap();
let fields = def.fields().iter().map(|f| { let fields = def.fields().iter().map(|f| {
let fid = ccx.tcx.map.local_def_id(f.id); let fid = ccx.tcx.map.local_def_id(f.id);
@ -1952,9 +1952,9 @@ fn compute_object_lifetime_default<'a,'tcx>(ccx: &CrateCtxt<'a,'tcx>,
{ {
let inline_bounds = from_bounds(ccx, param_bounds); let inline_bounds = from_bounds(ccx, param_bounds);
let where_bounds = from_predicates(ccx, param_id, &where_clause.predicates); let where_bounds = from_predicates(ccx, param_id, &where_clause.predicates);
let all_bounds: FnvHashSet<_> = inline_bounds.into_iter() let all_bounds: FxHashSet<_> = inline_bounds.into_iter()
.chain(where_bounds) .chain(where_bounds)
.collect(); .collect();
return if all_bounds.len() > 1 { return if all_bounds.len() > 1 {
ty::ObjectLifetimeDefault::Ambiguous ty::ObjectLifetimeDefault::Ambiguous
} else if all_bounds.len() == 0 { } else if all_bounds.len() == 0 {
@ -2171,7 +2171,7 @@ fn enforce_impl_params_are_constrained<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>,
// The trait reference is an input, so find all type parameters // The trait reference is an input, so find all type parameters
// reachable from there, to start (if this is an inherent impl, // reachable from there, to start (if this is an inherent impl,
// then just examine the self type). // then just examine the self type).
let mut input_parameters: FnvHashSet<_> = let mut input_parameters: FxHashSet<_> =
ctp::parameters_for(&impl_scheme.ty, false).into_iter().collect(); ctp::parameters_for(&impl_scheme.ty, false).into_iter().collect();
if let Some(ref trait_ref) = impl_trait_ref { if let Some(ref trait_ref) = impl_trait_ref {
input_parameters.extend(ctp::parameters_for(trait_ref, false)); input_parameters.extend(ctp::parameters_for(trait_ref, false));
@ -2200,7 +2200,7 @@ fn enforce_impl_lifetimes_are_constrained<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>,
let impl_predicates = ccx.tcx.lookup_predicates(impl_def_id); let impl_predicates = ccx.tcx.lookup_predicates(impl_def_id);
let impl_trait_ref = ccx.tcx.impl_trait_ref(impl_def_id); let impl_trait_ref = ccx.tcx.impl_trait_ref(impl_def_id);
let mut input_parameters: FnvHashSet<_> = let mut input_parameters: FxHashSet<_> =
ctp::parameters_for(&impl_scheme.ty, false).into_iter().collect(); ctp::parameters_for(&impl_scheme.ty, false).into_iter().collect();
if let Some(ref trait_ref) = impl_trait_ref { if let Some(ref trait_ref) = impl_trait_ref {
input_parameters.extend(ctp::parameters_for(trait_ref, false)); input_parameters.extend(ctp::parameters_for(trait_ref, false));
@ -2208,7 +2208,7 @@ fn enforce_impl_lifetimes_are_constrained<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>,
ctp::identify_constrained_type_params( ctp::identify_constrained_type_params(
&impl_predicates.predicates.as_slice(), impl_trait_ref, &mut input_parameters); &impl_predicates.predicates.as_slice(), impl_trait_ref, &mut input_parameters);
let lifetimes_in_associated_types: FnvHashSet<_> = impl_items.iter() let lifetimes_in_associated_types: FxHashSet<_> = impl_items.iter()
.map(|item| ccx.tcx.impl_or_trait_item(ccx.tcx.map.local_def_id(item.id))) .map(|item| ccx.tcx.impl_or_trait_item(ccx.tcx.map.local_def_id(item.id)))
.filter_map(|item| match item { .filter_map(|item| match item {
ty::TypeTraitItem(ref assoc_ty) => assoc_ty.ty, ty::TypeTraitItem(ref assoc_ty) => assoc_ty.ty,

View file

@ -10,7 +10,7 @@
use rustc::ty::{self, Ty}; use rustc::ty::{self, Ty};
use rustc::ty::fold::{TypeFoldable, TypeVisitor}; use rustc::ty::fold::{TypeFoldable, TypeVisitor};
use rustc::util::nodemap::FnvHashSet; use rustc::util::nodemap::FxHashSet;
#[derive(Clone, PartialEq, Eq, Hash, Debug)] #[derive(Clone, PartialEq, Eq, Hash, Debug)]
pub struct Parameter(pub u32); pub struct Parameter(pub u32);
@ -76,7 +76,7 @@ impl<'tcx> TypeVisitor<'tcx> for ParameterCollector {
pub fn identify_constrained_type_params<'tcx>(predicates: &[ty::Predicate<'tcx>], pub fn identify_constrained_type_params<'tcx>(predicates: &[ty::Predicate<'tcx>],
impl_trait_ref: Option<ty::TraitRef<'tcx>>, impl_trait_ref: Option<ty::TraitRef<'tcx>>,
input_parameters: &mut FnvHashSet<Parameter>) input_parameters: &mut FxHashSet<Parameter>)
{ {
let mut predicates = predicates.to_owned(); let mut predicates = predicates.to_owned();
setup_constraining_predicates(&mut predicates, impl_trait_ref, input_parameters); setup_constraining_predicates(&mut predicates, impl_trait_ref, input_parameters);
@ -125,7 +125,7 @@ pub fn identify_constrained_type_params<'tcx>(predicates: &[ty::Predicate<'tcx>]
/// think of any. /// think of any.
pub fn setup_constraining_predicates<'tcx>(predicates: &mut [ty::Predicate<'tcx>], pub fn setup_constraining_predicates<'tcx>(predicates: &mut [ty::Predicate<'tcx>],
impl_trait_ref: Option<ty::TraitRef<'tcx>>, impl_trait_ref: Option<ty::TraitRef<'tcx>>,
input_parameters: &mut FnvHashSet<Parameter>) input_parameters: &mut FxHashSet<Parameter>)
{ {
// The canonical way of doing the needed topological sort // The canonical way of doing the needed topological sort
// would be a DFS, but getting the graph and its ownership // would be a DFS, but getting the graph and its ownership

View file

@ -19,7 +19,7 @@ use rustc::hir::def::{Def, CtorKind};
use rustc::hir::def_id::DefId; use rustc::hir::def_id::DefId;
use rustc::hir::print as pprust; use rustc::hir::print as pprust;
use rustc::ty::{self, TyCtxt}; use rustc::ty::{self, TyCtxt};
use rustc::util::nodemap::FnvHashSet; use rustc::util::nodemap::FxHashSet;
use rustc_const_eval::lookup_const_by_id; use rustc_const_eval::lookup_const_by_id;
@ -460,7 +460,7 @@ pub fn build_impl<'a, 'tcx>(cx: &DocContext,
.into_iter() .into_iter()
.map(|meth| meth.name.to_string()) .map(|meth| meth.name.to_string())
.collect() .collect()
}).unwrap_or(FnvHashSet()); }).unwrap_or(FxHashSet());
ret.push(clean::Item { ret.push(clean::Item {
inner: clean::ImplItem(clean::Impl { inner: clean::ImplItem(clean::Impl {
@ -496,7 +496,7 @@ fn build_module<'a, 'tcx>(cx: &DocContext, tcx: TyCtxt<'a, 'tcx, 'tcx>,
// If we're reexporting a reexport it may actually reexport something in // If we're reexporting a reexport it may actually reexport something in
// two namespaces, so the target may be listed twice. Make sure we only // two namespaces, so the target may be listed twice. Make sure we only
// visit each node at most once. // visit each node at most once.
let mut visited = FnvHashSet(); let mut visited = FxHashSet();
for item in tcx.sess.cstore.item_children(did) { for item in tcx.sess.cstore.item_children(did) {
let def_id = item.def.def_id(); let def_id = item.def.def_id();
if tcx.sess.cstore.visibility(def_id) == ty::Visibility::Public { if tcx.sess.cstore.visibility(def_id) == ty::Visibility::Public {

View file

@ -38,7 +38,7 @@ use rustc::hir::print as pprust;
use rustc::ty::subst::Substs; use rustc::ty::subst::Substs;
use rustc::ty::{self, AdtKind}; use rustc::ty::{self, AdtKind};
use rustc::middle::stability; use rustc::middle::stability;
use rustc::util::nodemap::{FnvHashMap, FnvHashSet}; use rustc::util::nodemap::{FxHashMap, FxHashSet};
use rustc::hir; use rustc::hir;
@ -116,7 +116,7 @@ pub struct Crate {
pub access_levels: Arc<AccessLevels<DefId>>, pub access_levels: Arc<AccessLevels<DefId>>,
// These are later on moved into `CACHEKEY`, leaving the map empty. // These are later on moved into `CACHEKEY`, leaving the map empty.
// Only here so that they can be filtered through the rustdoc passes. // Only here so that they can be filtered through the rustdoc passes.
pub external_traits: FnvHashMap<DefId, Trait>, pub external_traits: FxHashMap<DefId, Trait>,
} }
struct CrateNum(def_id::CrateNum); struct CrateNum(def_id::CrateNum);
@ -993,7 +993,7 @@ impl<'a, 'tcx> Clean<Generics> for (&'a ty::Generics<'tcx>,
// Note that associated types also have a sized bound by default, but we // Note that associated types also have a sized bound by default, but we
// don't actually know the set of associated types right here so that's // don't actually know the set of associated types right here so that's
// handled in cleaning associated types // handled in cleaning associated types
let mut sized_params = FnvHashSet(); let mut sized_params = FxHashSet();
where_predicates.retain(|pred| { where_predicates.retain(|pred| {
match *pred { match *pred {
WP::BoundPredicate { ty: Generic(ref g), ref bounds } => { WP::BoundPredicate { ty: Generic(ref g), ref bounds } => {
@ -1693,8 +1693,8 @@ impl Clean<Type> for hir::Ty {
}); });
if let Some((tcx, &hir::ItemTy(ref ty, ref generics))) = tcx_and_alias { if let Some((tcx, &hir::ItemTy(ref ty, ref generics))) = tcx_and_alias {
let provided_params = &path.segments.last().unwrap().parameters; let provided_params = &path.segments.last().unwrap().parameters;
let mut ty_substs = FnvHashMap(); let mut ty_substs = FxHashMap();
let mut lt_substs = FnvHashMap(); let mut lt_substs = FxHashMap();
for (i, ty_param) in generics.ty_params.iter().enumerate() { for (i, ty_param) in generics.ty_params.iter().enumerate() {
let ty_param_def = tcx.expect_def(ty_param.id); let ty_param_def = tcx.expect_def(ty_param.id);
if let Some(ty) = provided_params.types().get(i).cloned() if let Some(ty) = provided_params.types().get(i).cloned()
@ -2368,7 +2368,7 @@ impl Clean<ImplPolarity> for hir::ImplPolarity {
pub struct Impl { pub struct Impl {
pub unsafety: hir::Unsafety, pub unsafety: hir::Unsafety,
pub generics: Generics, pub generics: Generics,
pub provided_trait_methods: FnvHashSet<String>, pub provided_trait_methods: FxHashSet<String>,
pub trait_: Option<Type>, pub trait_: Option<Type>,
pub for_: Type, pub for_: Type,
pub items: Vec<Item>, pub items: Vec<Item>,
@ -2394,7 +2394,7 @@ impl Clean<Vec<Item>> for doctree::Impl {
.map(|meth| meth.name.to_string()) .map(|meth| meth.name.to_string())
.collect() .collect()
}) })
}).unwrap_or(FnvHashSet()); }).unwrap_or(FxHashSet());
ret.push(Item { ret.push(Item {
name: None, name: None,

View file

@ -19,7 +19,7 @@ use rustc::middle::privacy::AccessLevels;
use rustc::ty::{self, TyCtxt}; use rustc::ty::{self, TyCtxt};
use rustc::hir::map as hir_map; use rustc::hir::map as hir_map;
use rustc::lint; use rustc::lint;
use rustc::util::nodemap::FnvHashMap; use rustc::util::nodemap::FxHashMap;
use rustc_trans::back::link; use rustc_trans::back::link;
use rustc_resolve as resolve; use rustc_resolve as resolve;
use rustc_metadata::cstore::CStore; use rustc_metadata::cstore::CStore;
@ -48,7 +48,7 @@ pub enum MaybeTyped<'a, 'tcx: 'a> {
NotTyped(&'a session::Session) NotTyped(&'a session::Session)
} }
pub type ExternalPaths = FnvHashMap<DefId, (Vec<String>, clean::TypeKind)>; pub type ExternalPaths = FxHashMap<DefId, (Vec<String>, clean::TypeKind)>;
pub struct DocContext<'a, 'tcx: 'a> { pub struct DocContext<'a, 'tcx: 'a> {
pub map: &'a hir_map::Map<'tcx>, pub map: &'a hir_map::Map<'tcx>,
@ -65,15 +65,15 @@ pub struct DocContext<'a, 'tcx: 'a> {
/// Later on moved into `html::render::CACHE_KEY` /// Later on moved into `html::render::CACHE_KEY`
pub renderinfo: RefCell<RenderInfo>, pub renderinfo: RefCell<RenderInfo>,
/// Later on moved through `clean::Crate` into `html::render::CACHE_KEY` /// Later on moved through `clean::Crate` into `html::render::CACHE_KEY`
pub external_traits: RefCell<FnvHashMap<DefId, clean::Trait>>, pub external_traits: RefCell<FxHashMap<DefId, clean::Trait>>,
// The current set of type and lifetime substitutions, // The current set of type and lifetime substitutions,
// for expanding type aliases at the HIR level: // for expanding type aliases at the HIR level:
/// Table type parameter definition -> substituted type /// Table type parameter definition -> substituted type
pub ty_substs: RefCell<FnvHashMap<Def, clean::Type>>, pub ty_substs: RefCell<FxHashMap<Def, clean::Type>>,
/// Table node id of lifetime parameter definition -> substituted lifetime /// Table node id of lifetime parameter definition -> substituted lifetime
pub lt_substs: RefCell<FnvHashMap<ast::NodeId, clean::Lifetime>>, pub lt_substs: RefCell<FxHashMap<ast::NodeId, clean::Lifetime>>,
} }
impl<'b, 'tcx> DocContext<'b, 'tcx> { impl<'b, 'tcx> DocContext<'b, 'tcx> {
@ -99,8 +99,8 @@ impl<'b, 'tcx> DocContext<'b, 'tcx> {
/// Call the closure with the given parameters set as /// Call the closure with the given parameters set as
/// the substitutions for a type alias' RHS. /// the substitutions for a type alias' RHS.
pub fn enter_alias<F, R>(&self, pub fn enter_alias<F, R>(&self,
ty_substs: FnvHashMap<Def, clean::Type>, ty_substs: FxHashMap<Def, clean::Type>,
lt_substs: FnvHashMap<ast::NodeId, clean::Lifetime>, lt_substs: FxHashMap<ast::NodeId, clean::Lifetime>,
f: F) -> R f: F) -> R
where F: FnOnce() -> R { where F: FnOnce() -> R {
let (old_tys, old_lts) = let (old_tys, old_lts) =

View file

@ -59,7 +59,7 @@ use rustc::hir::def_id::{CrateNum, CRATE_DEF_INDEX, DefId, LOCAL_CRATE};
use rustc::middle::privacy::AccessLevels; use rustc::middle::privacy::AccessLevels;
use rustc::middle::stability; use rustc::middle::stability;
use rustc::hir; use rustc::hir;
use rustc::util::nodemap::{FnvHashMap, FnvHashSet}; use rustc::util::nodemap::{FxHashMap, FxHashSet};
use rustc_data_structures::flock; use rustc_data_structures::flock;
use clean::{self, Attributes, GetDefId, SelfTy, Mutability}; use clean::{self, Attributes, GetDefId, SelfTy, Mutability};
@ -111,9 +111,9 @@ pub struct SharedContext {
/// `true`. /// `true`.
pub include_sources: bool, pub include_sources: bool,
/// The local file sources we've emitted and their respective url-paths. /// The local file sources we've emitted and their respective url-paths.
pub local_sources: FnvHashMap<PathBuf, String>, pub local_sources: FxHashMap<PathBuf, String>,
/// All the passes that were run on this crate. /// All the passes that were run on this crate.
pub passes: FnvHashSet<String>, pub passes: FxHashSet<String>,
/// The base-URL of the issue tracker for when an item has been tagged with /// The base-URL of the issue tracker for when an item has been tagged with
/// an issue number. /// an issue number.
pub issue_tracker_base_url: Option<String>, pub issue_tracker_base_url: Option<String>,
@ -208,7 +208,7 @@ pub struct Cache {
/// Mapping of typaram ids to the name of the type parameter. This is used /// Mapping of typaram ids to the name of the type parameter. This is used
/// when pretty-printing a type (so pretty printing doesn't have to /// when pretty-printing a type (so pretty printing doesn't have to
/// painfully maintain a context like this) /// painfully maintain a context like this)
pub typarams: FnvHashMap<DefId, String>, pub typarams: FxHashMap<DefId, String>,
/// Maps a type id to all known implementations for that type. This is only /// Maps a type id to all known implementations for that type. This is only
/// recognized for intra-crate `ResolvedPath` types, and is used to print /// recognized for intra-crate `ResolvedPath` types, and is used to print
@ -216,35 +216,35 @@ pub struct Cache {
/// ///
/// The values of the map are a list of implementations and documentation /// The values of the map are a list of implementations and documentation
/// found on that implementation. /// found on that implementation.
pub impls: FnvHashMap<DefId, Vec<Impl>>, pub impls: FxHashMap<DefId, Vec<Impl>>,
/// Maintains a mapping of local crate node ids to the fully qualified name /// Maintains a mapping of local crate node ids to the fully qualified name
/// and "short type description" of that node. This is used when generating /// and "short type description" of that node. This is used when generating
/// URLs when a type is being linked to. External paths are not located in /// URLs when a type is being linked to. External paths are not located in
/// this map because the `External` type itself has all the information /// this map because the `External` type itself has all the information
/// necessary. /// necessary.
pub paths: FnvHashMap<DefId, (Vec<String>, ItemType)>, pub paths: FxHashMap<DefId, (Vec<String>, ItemType)>,
/// Similar to `paths`, but only holds external paths. This is only used for /// Similar to `paths`, but only holds external paths. This is only used for
/// generating explicit hyperlinks to other crates. /// generating explicit hyperlinks to other crates.
pub external_paths: FnvHashMap<DefId, (Vec<String>, ItemType)>, pub external_paths: FxHashMap<DefId, (Vec<String>, ItemType)>,
/// This map contains information about all known traits of this crate. /// This map contains information about all known traits of this crate.
/// Implementations of a crate should inherit the documentation of the /// Implementations of a crate should inherit the documentation of the
/// parent trait if no extra documentation is specified, and default methods /// parent trait if no extra documentation is specified, and default methods
/// should show up in documentation about trait implementations. /// should show up in documentation about trait implementations.
pub traits: FnvHashMap<DefId, clean::Trait>, pub traits: FxHashMap<DefId, clean::Trait>,
/// When rendering traits, it's often useful to be able to list all /// When rendering traits, it's often useful to be able to list all
/// implementors of the trait, and this mapping is exactly, that: a mapping /// implementors of the trait, and this mapping is exactly, that: a mapping
/// of trait ids to the list of known implementors of the trait /// of trait ids to the list of known implementors of the trait
pub implementors: FnvHashMap<DefId, Vec<Implementor>>, pub implementors: FxHashMap<DefId, Vec<Implementor>>,
/// Cache of where external crate documentation can be found. /// Cache of where external crate documentation can be found.
pub extern_locations: FnvHashMap<CrateNum, (String, ExternalLocation)>, pub extern_locations: FxHashMap<CrateNum, (String, ExternalLocation)>,
/// Cache of where documentation for primitives can be found. /// Cache of where documentation for primitives can be found.
pub primitive_locations: FnvHashMap<clean::PrimitiveType, CrateNum>, pub primitive_locations: FxHashMap<clean::PrimitiveType, CrateNum>,
// Note that external items for which `doc(hidden)` applies to are shown as // Note that external items for which `doc(hidden)` applies to are shown as
// non-reachable while local items aren't. This is because we're reusing // non-reachable while local items aren't. This is because we're reusing
@ -257,7 +257,7 @@ pub struct Cache {
parent_stack: Vec<DefId>, parent_stack: Vec<DefId>,
parent_is_trait_impl: bool, parent_is_trait_impl: bool,
search_index: Vec<IndexItem>, search_index: Vec<IndexItem>,
seen_modules: FnvHashSet<DefId>, seen_modules: FxHashSet<DefId>,
seen_mod: bool, seen_mod: bool,
stripped_mod: bool, stripped_mod: bool,
deref_trait_did: Option<DefId>, deref_trait_did: Option<DefId>,
@ -275,9 +275,9 @@ pub struct Cache {
/// Later on moved into `CACHE_KEY`. /// Later on moved into `CACHE_KEY`.
#[derive(Default)] #[derive(Default)]
pub struct RenderInfo { pub struct RenderInfo {
pub inlined: FnvHashSet<DefId>, pub inlined: FxHashSet<DefId>,
pub external_paths: ::core::ExternalPaths, pub external_paths: ::core::ExternalPaths,
pub external_typarams: FnvHashMap<DefId, String>, pub external_typarams: FxHashMap<DefId, String>,
pub deref_trait_did: Option<DefId>, pub deref_trait_did: Option<DefId>,
pub deref_mut_trait_did: Option<DefId>, pub deref_mut_trait_did: Option<DefId>,
} }
@ -376,10 +376,10 @@ impl ToJson for IndexItemFunctionType {
thread_local!(static CACHE_KEY: RefCell<Arc<Cache>> = Default::default()); thread_local!(static CACHE_KEY: RefCell<Arc<Cache>> = Default::default());
thread_local!(pub static CURRENT_LOCATION_KEY: RefCell<Vec<String>> = thread_local!(pub static CURRENT_LOCATION_KEY: RefCell<Vec<String>> =
RefCell::new(Vec::new())); RefCell::new(Vec::new()));
thread_local!(static USED_ID_MAP: RefCell<FnvHashMap<String, usize>> = thread_local!(static USED_ID_MAP: RefCell<FxHashMap<String, usize>> =
RefCell::new(init_ids())); RefCell::new(init_ids()));
fn init_ids() -> FnvHashMap<String, usize> { fn init_ids() -> FxHashMap<String, usize> {
[ [
"main", "main",
"search", "search",
@ -406,7 +406,7 @@ pub fn reset_ids(embedded: bool) {
*s.borrow_mut() = if embedded { *s.borrow_mut() = if embedded {
init_ids() init_ids()
} else { } else {
FnvHashMap() FxHashMap()
}; };
}); });
} }
@ -431,7 +431,7 @@ pub fn derive_id(candidate: String) -> String {
pub fn run(mut krate: clean::Crate, pub fn run(mut krate: clean::Crate,
external_html: &ExternalHtml, external_html: &ExternalHtml,
dst: PathBuf, dst: PathBuf,
passes: FnvHashSet<String>, passes: FxHashSet<String>,
css_file_extension: Option<PathBuf>, css_file_extension: Option<PathBuf>,
renderinfo: RenderInfo) -> Result<(), Error> { renderinfo: RenderInfo) -> Result<(), Error> {
let src_root = match krate.src.parent() { let src_root = match krate.src.parent() {
@ -442,7 +442,7 @@ pub fn run(mut krate: clean::Crate,
src_root: src_root, src_root: src_root,
passes: passes, passes: passes,
include_sources: true, include_sources: true,
local_sources: FnvHashMap(), local_sources: FxHashMap(),
issue_tracker_base_url: None, issue_tracker_base_url: None,
layout: layout::Layout { layout: layout::Layout {
logo: "".to_string(), logo: "".to_string(),
@ -510,22 +510,22 @@ pub fn run(mut krate: clean::Crate,
.collect(); .collect();
let mut cache = Cache { let mut cache = Cache {
impls: FnvHashMap(), impls: FxHashMap(),
external_paths: external_paths, external_paths: external_paths,
paths: FnvHashMap(), paths: FxHashMap(),
implementors: FnvHashMap(), implementors: FxHashMap(),
stack: Vec::new(), stack: Vec::new(),
parent_stack: Vec::new(), parent_stack: Vec::new(),
search_index: Vec::new(), search_index: Vec::new(),
parent_is_trait_impl: false, parent_is_trait_impl: false,
extern_locations: FnvHashMap(), extern_locations: FxHashMap(),
primitive_locations: FnvHashMap(), primitive_locations: FxHashMap(),
seen_modules: FnvHashSet(), seen_modules: FxHashSet(),
seen_mod: false, seen_mod: false,
stripped_mod: false, stripped_mod: false,
access_levels: krate.access_levels.clone(), access_levels: krate.access_levels.clone(),
orphan_impl_items: Vec::new(), orphan_impl_items: Vec::new(),
traits: mem::replace(&mut krate.external_traits, FnvHashMap()), traits: mem::replace(&mut krate.external_traits, FxHashMap()),
deref_trait_did: deref_trait_did, deref_trait_did: deref_trait_did,
deref_mut_trait_did: deref_mut_trait_did, deref_mut_trait_did: deref_mut_trait_did,
typarams: external_typarams, typarams: external_typarams,
@ -572,7 +572,7 @@ pub fn run(mut krate: clean::Crate,
/// Build the search index from the collected metadata /// Build the search index from the collected metadata
fn build_index(krate: &clean::Crate, cache: &mut Cache) -> String { fn build_index(krate: &clean::Crate, cache: &mut Cache) -> String {
let mut nodeid_to_pathid = FnvHashMap(); let mut nodeid_to_pathid = FxHashMap();
let mut crate_items = Vec::with_capacity(cache.search_index.len()); let mut crate_items = Vec::with_capacity(cache.search_index.len());
let mut crate_paths = Vec::<Json>::new(); let mut crate_paths = Vec::<Json>::new();
@ -2655,7 +2655,7 @@ fn render_union(w: &mut fmt::Formatter, it: &clean::Item,
#[derive(Copy, Clone)] #[derive(Copy, Clone)]
enum AssocItemLink<'a> { enum AssocItemLink<'a> {
Anchor(Option<&'a str>), Anchor(Option<&'a str>),
GotoSource(DefId, &'a FnvHashSet<String>), GotoSource(DefId, &'a FxHashSet<String>),
} }
impl<'a> AssocItemLink<'a> { impl<'a> AssocItemLink<'a> {

View file

@ -22,7 +22,7 @@ use rustc::hir::map as hir_map;
use rustc::hir::def::Def; use rustc::hir::def::Def;
use rustc::hir::def_id::LOCAL_CRATE; use rustc::hir::def_id::LOCAL_CRATE;
use rustc::middle::privacy::AccessLevel; use rustc::middle::privacy::AccessLevel;
use rustc::util::nodemap::FnvHashSet; use rustc::util::nodemap::FxHashSet;
use rustc::hir; use rustc::hir;
@ -42,14 +42,14 @@ pub struct RustdocVisitor<'a, 'tcx: 'a> {
pub module: Module, pub module: Module,
pub attrs: hir::HirVec<ast::Attribute>, pub attrs: hir::HirVec<ast::Attribute>,
pub cx: &'a core::DocContext<'a, 'tcx>, pub cx: &'a core::DocContext<'a, 'tcx>,
view_item_stack: FnvHashSet<ast::NodeId>, view_item_stack: FxHashSet<ast::NodeId>,
inlining_from_glob: bool, inlining_from_glob: bool,
} }
impl<'a, 'tcx> RustdocVisitor<'a, 'tcx> { impl<'a, 'tcx> RustdocVisitor<'a, 'tcx> {
pub fn new(cx: &'a core::DocContext<'a, 'tcx>) -> RustdocVisitor<'a, 'tcx> { pub fn new(cx: &'a core::DocContext<'a, 'tcx>) -> RustdocVisitor<'a, 'tcx> {
// If the root is reexported, terminate all recursion. // If the root is reexported, terminate all recursion.
let mut stack = FnvHashSet(); let mut stack = FxHashSet();
stack.insert(ast::CRATE_NODE_ID); stack.insert(ast::CRATE_NODE_ID);
RustdocVisitor { RustdocVisitor {
module: Module::new(None), module: Module::new(None),