Rollup merge of #41355 - nikomatsakis:incr-comp-refactor-trans-2, r=eddyb
Refactor trans some more to pave way for incremental compilation Various refactorings paving the way for the newer approach to incremental compilation (And, in particular, to "query-ifying" trans). My partial goal is to remove `SharedCrateContext`; this PR gets about as far as I can easily get before starting to really want the red/green algorithm. r? @eddyb cc @michaelwoerister
This commit is contained in:
commit
e282817186
@ -88,6 +88,7 @@ pub enum DepNode<D: Clone + Debug> {
|
||||
// predicates for an item wind up in `ItemSignature`).
|
||||
AssociatedItems(D),
|
||||
ItemSignature(D),
|
||||
IsForeignItem(D),
|
||||
TypeParamPredicates((D, D)),
|
||||
SizedConstraint(D),
|
||||
AdtDestructor(D),
|
||||
@ -171,6 +172,7 @@ impl<D: Clone + Debug> DepNode<D> {
|
||||
TransCrateItem,
|
||||
AssociatedItems,
|
||||
ItemSignature,
|
||||
IsForeignItem,
|
||||
AssociatedItemDefIds,
|
||||
InherentImpls,
|
||||
TypeckTables,
|
||||
@ -221,6 +223,7 @@ impl<D: Clone + Debug> DepNode<D> {
|
||||
TransInlinedItem(ref d) => op(d).map(TransInlinedItem),
|
||||
AssociatedItems(ref d) => op(d).map(AssociatedItems),
|
||||
ItemSignature(ref d) => op(d).map(ItemSignature),
|
||||
IsForeignItem(ref d) => op(d).map(IsForeignItem),
|
||||
TypeParamPredicates((ref item, ref param)) => {
|
||||
Some(TypeParamPredicates((try_opt!(op(item)), try_opt!(op(param)))))
|
||||
}
|
||||
|
@ -18,6 +18,7 @@
|
||||
use hir::map as hir_map;
|
||||
use hir::def::Def;
|
||||
use hir::def_id::{DefId, CrateNum};
|
||||
use std::rc::Rc;
|
||||
use ty::{self, TyCtxt};
|
||||
use ty::maps::Providers;
|
||||
use middle::privacy;
|
||||
@ -362,11 +363,11 @@ impl<'a, 'tcx: 'a> ItemLikeVisitor<'tcx> for CollectPrivateImplItemsVisitor<'a,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn find_reachable<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) -> NodeSet {
|
||||
pub fn find_reachable<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) -> Rc<NodeSet> {
|
||||
ty::queries::reachable_set::get(tcx, DUMMY_SP, LOCAL_CRATE)
|
||||
}
|
||||
|
||||
fn reachable_set<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, crate_num: CrateNum) -> NodeSet {
|
||||
fn reachable_set<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, crate_num: CrateNum) -> Rc<NodeSet> {
|
||||
debug_assert!(crate_num == LOCAL_CRATE);
|
||||
|
||||
let access_levels = &ty::queries::privacy_access_levels::get(tcx, DUMMY_SP, LOCAL_CRATE);
|
||||
@ -411,7 +412,7 @@ fn reachable_set<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, crate_num: CrateNum) ->
|
||||
reachable_context.propagate();
|
||||
|
||||
// Return the set of reachable symbols.
|
||||
reachable_context.reachable_symbols
|
||||
Rc::new(reachable_context.reachable_symbols)
|
||||
}
|
||||
|
||||
pub fn provide(providers: &mut Providers) {
|
||||
|
@ -55,6 +55,7 @@ mod object_safety;
|
||||
mod select;
|
||||
mod specialize;
|
||||
mod structural_impls;
|
||||
pub mod trans;
|
||||
mod util;
|
||||
|
||||
/// An `Obligation` represents some trait reference (e.g. `int:Eq`) for
|
||||
|
212
src/librustc/traits/trans/mod.rs
Normal file
212
src/librustc/traits/trans/mod.rs
Normal file
@ -0,0 +1,212 @@
|
||||
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
|
||||
// file at the top-level directory of this distribution and at
|
||||
// http://rust-lang.org/COPYRIGHT.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
// This file contains various trait resolution methods used by trans.
|
||||
// They all assume regions can be erased and monomorphic types. It
|
||||
// seems likely that they should eventually be merged into more
|
||||
// general routines.
|
||||
|
||||
use dep_graph::{DepGraph, DepNode, DepTrackingMap, DepTrackingMapConfig};
|
||||
use hir::def_id::DefId;
|
||||
use infer::TransNormalize;
|
||||
use std::cell::RefCell;
|
||||
use std::marker::PhantomData;
|
||||
use syntax::ast;
|
||||
use syntax_pos::Span;
|
||||
use traits::{FulfillmentContext, Obligation, ObligationCause, Reveal, SelectionContext, Vtable};
|
||||
use ty::{self, Ty, TyCtxt};
|
||||
use ty::subst::{Subst, Substs};
|
||||
use ty::fold::{TypeFoldable, TypeFolder};
|
||||
use util::common::MemoizationMap;
|
||||
|
||||
impl<'a, 'tcx> TyCtxt<'a, 'tcx, 'tcx> {
|
||||
/// Attempts to resolve an obligation to a vtable.. The result is
|
||||
/// a shallow vtable resolution -- meaning that we do not
|
||||
/// (necessarily) resolve all nested obligations on the impl. Note
|
||||
/// that type check should guarantee to us that all nested
|
||||
/// obligations *could be* resolved if we wanted to.
|
||||
pub fn trans_fulfill_obligation(self,
|
||||
span: Span,
|
||||
trait_ref: ty::PolyTraitRef<'tcx>)
|
||||
-> Vtable<'tcx, ()>
|
||||
{
|
||||
// Remove any references to regions; this helps improve caching.
|
||||
let trait_ref = self.erase_regions(&trait_ref);
|
||||
|
||||
self.trans_trait_caches.trait_cache.memoize(trait_ref, || {
|
||||
debug!("trans::fulfill_obligation(trait_ref={:?}, def_id={:?})",
|
||||
trait_ref, trait_ref.def_id());
|
||||
|
||||
// Do the initial selection for the obligation. This yields the
|
||||
// shallow result we are looking for -- that is, what specific impl.
|
||||
self.infer_ctxt((), Reveal::All).enter(|infcx| {
|
||||
let mut selcx = SelectionContext::new(&infcx);
|
||||
|
||||
let obligation_cause = ObligationCause::misc(span,
|
||||
ast::DUMMY_NODE_ID);
|
||||
let obligation = Obligation::new(obligation_cause,
|
||||
trait_ref.to_poly_trait_predicate());
|
||||
|
||||
let selection = match selcx.select(&obligation) {
|
||||
Ok(Some(selection)) => selection,
|
||||
Ok(None) => {
|
||||
// Ambiguity can happen when monomorphizing during trans
|
||||
// expands to some humongo type that never occurred
|
||||
// statically -- this humongo type can then overflow,
|
||||
// leading to an ambiguous result. So report this as an
|
||||
// overflow bug, since I believe this is the only case
|
||||
// where ambiguity can result.
|
||||
debug!("Encountered ambiguity selecting `{:?}` during trans, \
|
||||
presuming due to overflow",
|
||||
trait_ref);
|
||||
self.sess.span_fatal(span,
|
||||
"reached the recursion limit during monomorphization \
|
||||
(selection ambiguity)");
|
||||
}
|
||||
Err(e) => {
|
||||
span_bug!(span, "Encountered error `{:?}` selecting `{:?}` during trans",
|
||||
e, trait_ref)
|
||||
}
|
||||
};
|
||||
|
||||
debug!("fulfill_obligation: selection={:?}", selection);
|
||||
|
||||
// Currently, we use a fulfillment context to completely resolve
|
||||
// all nested obligations. This is because they can inform the
|
||||
// inference of the impl's type parameters.
|
||||
let mut fulfill_cx = FulfillmentContext::new();
|
||||
let vtable = selection.map(|predicate| {
|
||||
debug!("fulfill_obligation: register_predicate_obligation {:?}", predicate);
|
||||
fulfill_cx.register_predicate_obligation(&infcx, predicate);
|
||||
});
|
||||
let vtable = infcx.drain_fulfillment_cx_or_panic(span, &mut fulfill_cx, &vtable);
|
||||
|
||||
info!("Cache miss: {:?} => {:?}", trait_ref, vtable);
|
||||
vtable
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
/// Monomorphizes a type from the AST by first applying the in-scope
|
||||
/// substitutions and then normalizing any associated types.
|
||||
pub fn trans_apply_param_substs<T>(self,
|
||||
param_substs: &Substs<'tcx>,
|
||||
value: &T)
|
||||
-> T
|
||||
where T: TransNormalize<'tcx>
|
||||
{
|
||||
debug!("apply_param_substs(param_substs={:?}, value={:?})", param_substs, value);
|
||||
let substituted = value.subst(self, param_substs);
|
||||
let substituted = self.erase_regions(&substituted);
|
||||
AssociatedTypeNormalizer::new(self).fold(&substituted)
|
||||
}
|
||||
}
|
||||
|
||||
struct AssociatedTypeNormalizer<'a, 'gcx: 'a> {
|
||||
tcx: TyCtxt<'a, 'gcx, 'gcx>,
|
||||
}
|
||||
|
||||
impl<'a, 'gcx> AssociatedTypeNormalizer<'a, 'gcx> {
|
||||
fn new(tcx: TyCtxt<'a, 'gcx, 'gcx>) -> Self {
|
||||
AssociatedTypeNormalizer { tcx }
|
||||
}
|
||||
|
||||
fn fold<T:TypeFoldable<'gcx>>(&mut self, value: &T) -> T {
|
||||
if !value.has_projection_types() {
|
||||
value.clone()
|
||||
} else {
|
||||
value.fold_with(self)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'gcx> TypeFolder<'gcx, 'gcx> for AssociatedTypeNormalizer<'a, 'gcx> {
|
||||
fn tcx<'c>(&'c self) -> TyCtxt<'c, 'gcx, 'gcx> {
|
||||
self.tcx
|
||||
}
|
||||
|
||||
fn fold_ty(&mut self, ty: Ty<'gcx>) -> Ty<'gcx> {
|
||||
if !ty.has_projection_types() {
|
||||
ty
|
||||
} else {
|
||||
self.tcx.trans_trait_caches.project_cache.memoize(ty, || {
|
||||
debug!("AssociatedTypeNormalizer: ty={:?}", ty);
|
||||
self.tcx.normalize_associated_type(&ty)
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Specializes caches used in trans -- in particular, they assume all
|
||||
/// types are fully monomorphized and that free regions can be erased.
|
||||
pub struct TransTraitCaches<'tcx> {
|
||||
trait_cache: RefCell<DepTrackingMap<TraitSelectionCache<'tcx>>>,
|
||||
project_cache: RefCell<DepTrackingMap<ProjectionCache<'tcx>>>,
|
||||
}
|
||||
|
||||
impl<'tcx> TransTraitCaches<'tcx> {
|
||||
pub fn new(graph: DepGraph) -> Self {
|
||||
TransTraitCaches {
|
||||
trait_cache: RefCell::new(DepTrackingMap::new(graph.clone())),
|
||||
project_cache: RefCell::new(DepTrackingMap::new(graph)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Implement DepTrackingMapConfig for `trait_cache`
|
||||
pub struct TraitSelectionCache<'tcx> {
|
||||
data: PhantomData<&'tcx ()>
|
||||
}
|
||||
|
||||
impl<'tcx> DepTrackingMapConfig for TraitSelectionCache<'tcx> {
|
||||
type Key = ty::PolyTraitRef<'tcx>;
|
||||
type Value = Vtable<'tcx, ()>;
|
||||
fn to_dep_node(key: &ty::PolyTraitRef<'tcx>) -> DepNode<DefId> {
|
||||
key.to_poly_trait_predicate().dep_node()
|
||||
}
|
||||
}
|
||||
|
||||
// # Global Cache
|
||||
|
||||
pub struct ProjectionCache<'gcx> {
|
||||
data: PhantomData<&'gcx ()>
|
||||
}
|
||||
|
||||
impl<'gcx> DepTrackingMapConfig for ProjectionCache<'gcx> {
|
||||
type Key = Ty<'gcx>;
|
||||
type Value = Ty<'gcx>;
|
||||
fn to_dep_node(key: &Self::Key) -> DepNode<DefId> {
|
||||
// Ideally, we'd just put `key` into the dep-node, but we
|
||||
// can't put full types in there. So just collect up all the
|
||||
// def-ids of structs/enums as well as any traits that we
|
||||
// project out of. It doesn't matter so much what we do here,
|
||||
// except that if we are too coarse, we'll create overly
|
||||
// coarse edges between impls and the trans. For example, if
|
||||
// we just used the def-id of things we are projecting out of,
|
||||
// then the key for `<Foo as SomeTrait>::T` and `<Bar as
|
||||
// SomeTrait>::T` would both share a dep-node
|
||||
// (`TraitSelect(SomeTrait)`), and hence the impls for both
|
||||
// `Foo` and `Bar` would be considered inputs. So a change to
|
||||
// `Bar` would affect things that just normalized `Foo`.
|
||||
// Anyway, this heuristic is not ideal, but better than
|
||||
// nothing.
|
||||
let def_ids: Vec<DefId> =
|
||||
key.walk()
|
||||
.filter_map(|t| match t.sty {
|
||||
ty::TyAdt(adt_def, _) => Some(adt_def.did),
|
||||
ty::TyProjection(ref proj) => Some(proj.trait_ref.def_id),
|
||||
_ => None,
|
||||
})
|
||||
.collect();
|
||||
|
||||
DepNode::ProjectionCache { def_ids: def_ids }
|
||||
}
|
||||
}
|
||||
|
@ -407,6 +407,8 @@ pub struct GlobalCtxt<'tcx> {
|
||||
|
||||
pub specializes_cache: RefCell<traits::SpecializesCache>,
|
||||
|
||||
pub trans_trait_caches: traits::trans::TransTraitCaches<'tcx>,
|
||||
|
||||
pub dep_graph: DepGraph,
|
||||
|
||||
/// Common types, pre-interned for your convenience.
|
||||
@ -689,6 +691,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
|
||||
providers[LOCAL_CRATE] = local_providers;
|
||||
tls::enter_global(GlobalCtxt {
|
||||
sess: s,
|
||||
trans_trait_caches: traits::trans::TransTraitCaches::new(dep_graph.clone()),
|
||||
specializes_cache: RefCell::new(traits::SpecializesCache::new()),
|
||||
global_arenas: arenas,
|
||||
global_interners: interners,
|
||||
|
@ -380,6 +380,9 @@ define_maps! { <'tcx>
|
||||
pub adt_destructor: AdtDestructor(DefId) -> Option<ty::Destructor>,
|
||||
pub adt_sized_constraint: SizedConstraint(DefId) -> Ty<'tcx>,
|
||||
|
||||
/// True if this is a foreign item (i.e., linked via `extern { ... }`).
|
||||
pub is_foreign_item: IsForeignItem(DefId) -> bool,
|
||||
|
||||
/// Maps from def-id of a type or region parameter to its
|
||||
/// (inferred) variance.
|
||||
pub variances: ItemSignature(DefId) -> Rc<Vec<ty::Variance>>,
|
||||
@ -448,7 +451,7 @@ define_maps! { <'tcx>
|
||||
/// Performs the privacy check and computes "access levels".
|
||||
pub privacy_access_levels: PrivacyAccessLevels(CrateNum) -> Rc<AccessLevels>,
|
||||
|
||||
pub reachable_set: reachability_dep_node(CrateNum) -> NodeSet,
|
||||
pub reachable_set: reachability_dep_node(CrateNum) -> Rc<NodeSet>,
|
||||
|
||||
pub mir_shims: mir_shim(ty::InstanceDef<'tcx>) -> &'tcx RefCell<mir::Mir<'tcx>>
|
||||
}
|
||||
|
@ -114,7 +114,7 @@ mod sty;
|
||||
#[derive(Clone)]
|
||||
pub struct CrateAnalysis {
|
||||
pub access_levels: Rc<AccessLevels>,
|
||||
pub reachable: NodeSet,
|
||||
pub reachable: Rc<NodeSet>,
|
||||
pub name: String,
|
||||
pub glob_map: Option<hir::GlobMap>,
|
||||
}
|
||||
|
@ -810,7 +810,7 @@ pub fn phase_2_configure_and_expand<F>(sess: &Session,
|
||||
defs: resolver.definitions,
|
||||
analysis: ty::CrateAnalysis {
|
||||
access_levels: Rc::new(AccessLevels::default()),
|
||||
reachable: NodeSet(),
|
||||
reachable: Rc::new(NodeSet()),
|
||||
name: crate_name.to_string(),
|
||||
glob_map: if resolver.make_glob_map { Some(resolver.glob_map) } else { None },
|
||||
},
|
||||
@ -889,9 +889,10 @@ pub fn phase_3_run_analysis_passes<'tcx, F, R>(sess: &'tcx Session,
|
||||
let index = stability::Index::new(&hir_map);
|
||||
|
||||
let mut local_providers = ty::maps::Providers::default();
|
||||
mir::provide(&mut local_providers);
|
||||
rustc_privacy::provide(&mut local_providers);
|
||||
borrowck::provide(&mut local_providers);
|
||||
mir::provide(&mut local_providers);
|
||||
reachable::provide(&mut local_providers);
|
||||
rustc_privacy::provide(&mut local_providers);
|
||||
typeck::provide(&mut local_providers);
|
||||
ty::provide(&mut local_providers);
|
||||
reachable::provide(&mut local_providers);
|
||||
|
@ -111,6 +111,7 @@ provide! { <'tcx> tcx, def_id, cdata
|
||||
closure_kind => { cdata.closure_kind(def_id.index) }
|
||||
closure_type => { cdata.closure_ty(def_id.index, tcx) }
|
||||
inherent_impls => { Rc::new(cdata.get_inherent_implementations_for_type(def_id.index)) }
|
||||
is_foreign_item => { cdata.is_foreign_item(def_id.index) }
|
||||
}
|
||||
|
||||
impl CrateStore for cstore::CStore {
|
||||
|
@ -53,7 +53,7 @@ impl ExportedSymbols {
|
||||
scx.tcx().hir.local_def_id(node_id)
|
||||
})
|
||||
.map(|def_id| {
|
||||
let name = symbol_for_def_id(scx, def_id, symbol_map);
|
||||
let name = symbol_for_def_id(scx.tcx(), def_id, symbol_map);
|
||||
let export_level = export_level(scx, def_id);
|
||||
debug!("EXPORTED SYMBOL (local): {} ({:?})", name, export_level);
|
||||
(name, export_level)
|
||||
@ -108,7 +108,7 @@ impl ExportedSymbols {
|
||||
.exported_symbols(cnum)
|
||||
.iter()
|
||||
.map(|&def_id| {
|
||||
let name = symbol_name(Instance::mono(scx.tcx(), def_id), scx);
|
||||
let name = symbol_name(Instance::mono(scx.tcx(), def_id), scx.tcx());
|
||||
let export_level = if special_runtime_crate {
|
||||
// We can probably do better here by just ensuring that
|
||||
// it has hidden visibility rather than public
|
||||
@ -214,21 +214,21 @@ pub fn is_below_threshold(level: SymbolExportLevel,
|
||||
}
|
||||
}
|
||||
|
||||
fn symbol_for_def_id<'a, 'tcx>(scx: &SharedCrateContext<'a, 'tcx>,
|
||||
fn symbol_for_def_id<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
def_id: DefId,
|
||||
symbol_map: &SymbolMap<'tcx>)
|
||||
-> String {
|
||||
// Just try to look things up in the symbol map. If nothing's there, we
|
||||
// recompute.
|
||||
if let Some(node_id) = scx.tcx().hir.as_local_node_id(def_id) {
|
||||
if let Some(node_id) = tcx.hir.as_local_node_id(def_id) {
|
||||
if let Some(sym) = symbol_map.get(TransItem::Static(node_id)) {
|
||||
return sym.to_owned();
|
||||
}
|
||||
}
|
||||
|
||||
let instance = Instance::mono(scx.tcx(), def_id);
|
||||
let instance = Instance::mono(tcx, def_id);
|
||||
|
||||
symbol_map.get(TransItem::Fn(instance))
|
||||
.map(str::to_owned)
|
||||
.unwrap_or_else(|| symbol_name(instance, scx))
|
||||
.unwrap_or_else(|| symbol_name(instance, tcx))
|
||||
}
|
||||
|
@ -97,13 +97,12 @@
|
||||
//! virtually impossible. Thus, symbol hash generation exclusively relies on
|
||||
//! DefPaths which are much more robust in the face of changes to the code base.
|
||||
|
||||
use common::SharedCrateContext;
|
||||
use monomorphize::Instance;
|
||||
|
||||
use rustc::middle::weak_lang_items;
|
||||
use rustc::hir::def_id::DefId;
|
||||
use rustc::hir::map as hir_map;
|
||||
use rustc::ty::{self, Ty, TypeFoldable};
|
||||
use rustc::ty::{self, Ty, TyCtxt, TypeFoldable};
|
||||
use rustc::ty::fold::TypeVisitor;
|
||||
use rustc::ty::item_path::{self, ItemPathBuffer, RootMode};
|
||||
use rustc::ty::subst::Substs;
|
||||
@ -113,7 +112,7 @@ use rustc::util::common::record_time;
|
||||
use syntax::attr;
|
||||
use syntax::symbol::{Symbol, InternedString};
|
||||
|
||||
fn get_symbol_hash<'a, 'tcx>(scx: &SharedCrateContext<'a, 'tcx>,
|
||||
fn get_symbol_hash<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
|
||||
// the DefId of the item this name is for
|
||||
def_id: Option<DefId>,
|
||||
@ -130,8 +129,6 @@ fn get_symbol_hash<'a, 'tcx>(scx: &SharedCrateContext<'a, 'tcx>,
|
||||
-> String {
|
||||
debug!("get_symbol_hash(def_id={:?}, parameters={:?})", def_id, substs);
|
||||
|
||||
let tcx = scx.tcx();
|
||||
|
||||
let mut hasher = ty::util::TypeIdHasher::<u64>::new(tcx);
|
||||
|
||||
record_time(&tcx.sess.perf_stats.symbol_hash_time, || {
|
||||
@ -157,8 +154,8 @@ fn get_symbol_hash<'a, 'tcx>(scx: &SharedCrateContext<'a, 'tcx>,
|
||||
// in case the same instances is emitted in two crates of the same
|
||||
// project.
|
||||
if substs.types().next().is_some() {
|
||||
hasher.hash(scx.tcx().crate_name.as_str());
|
||||
hasher.hash(scx.sess().local_crate_disambiguator().as_str());
|
||||
hasher.hash(tcx.crate_name.as_str());
|
||||
hasher.hash(tcx.sess.local_crate_disambiguator().as_str());
|
||||
}
|
||||
}
|
||||
});
|
||||
@ -168,37 +165,37 @@ fn get_symbol_hash<'a, 'tcx>(scx: &SharedCrateContext<'a, 'tcx>,
|
||||
}
|
||||
|
||||
pub fn symbol_name<'a, 'tcx>(instance: Instance<'tcx>,
|
||||
scx: &SharedCrateContext<'a, 'tcx>) -> String {
|
||||
tcx: TyCtxt<'a, 'tcx, 'tcx>) -> String {
|
||||
let def_id = instance.def_id();
|
||||
let substs = instance.substs;
|
||||
|
||||
debug!("symbol_name(def_id={:?}, substs={:?})",
|
||||
def_id, substs);
|
||||
|
||||
let node_id = scx.tcx().hir.as_local_node_id(def_id);
|
||||
let node_id = tcx.hir.as_local_node_id(def_id);
|
||||
|
||||
if let Some(id) = node_id {
|
||||
if scx.sess().plugin_registrar_fn.get() == Some(id) {
|
||||
if tcx.sess.plugin_registrar_fn.get() == Some(id) {
|
||||
let idx = def_id.index;
|
||||
let disambiguator = scx.sess().local_crate_disambiguator();
|
||||
return scx.sess().generate_plugin_registrar_symbol(disambiguator, idx);
|
||||
let disambiguator = tcx.sess.local_crate_disambiguator();
|
||||
return tcx.sess.generate_plugin_registrar_symbol(disambiguator, idx);
|
||||
}
|
||||
if scx.sess().derive_registrar_fn.get() == Some(id) {
|
||||
if tcx.sess.derive_registrar_fn.get() == Some(id) {
|
||||
let idx = def_id.index;
|
||||
let disambiguator = scx.sess().local_crate_disambiguator();
|
||||
return scx.sess().generate_derive_registrar_symbol(disambiguator, idx);
|
||||
let disambiguator = tcx.sess.local_crate_disambiguator();
|
||||
return tcx.sess.generate_derive_registrar_symbol(disambiguator, idx);
|
||||
}
|
||||
}
|
||||
|
||||
// FIXME(eddyb) Precompute a custom symbol name based on attributes.
|
||||
let attrs = scx.tcx().get_attrs(def_id);
|
||||
let attrs = tcx.get_attrs(def_id);
|
||||
let is_foreign = if let Some(id) = node_id {
|
||||
match scx.tcx().hir.get(id) {
|
||||
match tcx.hir.get(id) {
|
||||
hir_map::NodeForeignItem(_) => true,
|
||||
_ => false
|
||||
}
|
||||
} else {
|
||||
scx.sess().cstore.is_foreign_item(def_id)
|
||||
tcx.sess.cstore.is_foreign_item(def_id)
|
||||
};
|
||||
|
||||
if let Some(name) = weak_lang_items::link_name(&attrs) {
|
||||
@ -210,17 +207,17 @@ pub fn symbol_name<'a, 'tcx>(instance: Instance<'tcx>,
|
||||
return name.to_string();
|
||||
}
|
||||
// Don't mangle foreign items.
|
||||
return scx.tcx().item_name(def_id).as_str().to_string();
|
||||
return tcx.item_name(def_id).as_str().to_string();
|
||||
}
|
||||
|
||||
if let Some(name) = attr::find_export_name_attr(scx.sess().diagnostic(), &attrs) {
|
||||
if let Some(name) = attr::find_export_name_attr(tcx.sess.diagnostic(), &attrs) {
|
||||
// Use provided name
|
||||
return name.to_string();
|
||||
}
|
||||
|
||||
if attr::contains_name(&attrs, "no_mangle") {
|
||||
// Don't mangle
|
||||
return scx.tcx().item_name(def_id).as_str().to_string();
|
||||
return tcx.item_name(def_id).as_str().to_string();
|
||||
}
|
||||
|
||||
// We want to compute the "type" of this item. Unfortunately, some
|
||||
@ -230,11 +227,11 @@ pub fn symbol_name<'a, 'tcx>(instance: Instance<'tcx>,
|
||||
let mut ty_def_id = def_id;
|
||||
let instance_ty;
|
||||
loop {
|
||||
let key = scx.tcx().def_key(ty_def_id);
|
||||
let key = tcx.def_key(ty_def_id);
|
||||
match key.disambiguated_data.data {
|
||||
DefPathData::TypeNs(_) |
|
||||
DefPathData::ValueNs(_) => {
|
||||
instance_ty = scx.tcx().item_type(ty_def_id);
|
||||
instance_ty = tcx.item_type(ty_def_id);
|
||||
break;
|
||||
}
|
||||
_ => {
|
||||
@ -251,16 +248,16 @@ pub fn symbol_name<'a, 'tcx>(instance: Instance<'tcx>,
|
||||
|
||||
// Erase regions because they may not be deterministic when hashed
|
||||
// and should not matter anyhow.
|
||||
let instance_ty = scx.tcx().erase_regions(&instance_ty);
|
||||
let instance_ty = tcx.erase_regions(&instance_ty);
|
||||
|
||||
let hash = get_symbol_hash(scx, Some(def_id), instance_ty, Some(substs));
|
||||
let hash = get_symbol_hash(tcx, Some(def_id), instance_ty, Some(substs));
|
||||
|
||||
let mut buffer = SymbolPathBuffer {
|
||||
names: Vec::new()
|
||||
};
|
||||
|
||||
item_path::with_forced_absolute_paths(|| {
|
||||
scx.tcx().push_item_path(&mut buffer, def_id);
|
||||
tcx.push_item_path(&mut buffer, def_id);
|
||||
});
|
||||
|
||||
mangle(buffer.names.into_iter(), &hash)
|
||||
@ -281,11 +278,11 @@ impl ItemPathBuffer for SymbolPathBuffer {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn exported_name_from_type_and_prefix<'a, 'tcx>(scx: &SharedCrateContext<'a, 'tcx>,
|
||||
pub fn exported_name_from_type_and_prefix<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
t: Ty<'tcx>,
|
||||
prefix: &str)
|
||||
-> String {
|
||||
let hash = get_symbol_hash(scx, None, t, None);
|
||||
let hash = get_symbol_hash(tcx, None, t, None);
|
||||
let path = [Symbol::intern(prefix).as_str()];
|
||||
mangle(path.iter().cloned(), &hash)
|
||||
}
|
||||
|
@ -65,6 +65,7 @@ use meth;
|
||||
use mir;
|
||||
use monomorphize::{self, Instance};
|
||||
use partitioning::{self, PartitioningStrategy, CodegenUnit};
|
||||
use symbol_cache::SymbolCache;
|
||||
use symbol_map::SymbolMap;
|
||||
use symbol_names_test;
|
||||
use trans_item::{TransItem, DefPathBasedNames};
|
||||
@ -75,7 +76,6 @@ use util::nodemap::{NodeSet, FxHashMap, FxHashSet};
|
||||
|
||||
use libc::c_uint;
|
||||
use std::ffi::{CStr, CString};
|
||||
use std::rc::Rc;
|
||||
use std::str;
|
||||
use std::i32;
|
||||
use syntax_pos::Span;
|
||||
@ -802,6 +802,7 @@ fn write_metadata<'a, 'gcx>(tcx: TyCtxt<'a, 'gcx, 'gcx>,
|
||||
/// in any other compilation unit. Give these symbols internal linkage.
|
||||
fn internalize_symbols<'a, 'tcx>(sess: &Session,
|
||||
scx: &SharedCrateContext<'a, 'tcx>,
|
||||
translation_items: &FxHashSet<TransItem<'tcx>>,
|
||||
llvm_modules: &[ModuleLlvm],
|
||||
symbol_map: &SymbolMap<'tcx>,
|
||||
exported_symbols: &ExportedSymbols) {
|
||||
@ -854,7 +855,7 @@ fn internalize_symbols<'a, 'tcx>(sess: &Session,
|
||||
let mut locally_defined_symbols = FxHashSet();
|
||||
let mut linkage_fixed_explicitly = FxHashSet();
|
||||
|
||||
for trans_item in scx.translation_items().borrow().iter() {
|
||||
for trans_item in translation_items {
|
||||
let symbol_name = symbol_map.get_or_compute(scx, *trans_item);
|
||||
if trans_item.explicit_linkage(tcx).is_some() {
|
||||
linkage_fixed_explicitly.insert(symbol_name.clone());
|
||||
@ -1011,8 +1012,8 @@ fn iter_functions(llmod: llvm::ModuleRef) -> ValueIter {
|
||||
///
|
||||
/// This list is later used by linkers to determine the set of symbols needed to
|
||||
/// be exposed from a dynamic library and it's also encoded into the metadata.
|
||||
pub fn find_exported_symbols(tcx: TyCtxt, reachable: NodeSet) -> NodeSet {
|
||||
reachable.into_iter().filter(|&id| {
|
||||
pub fn find_exported_symbols(tcx: TyCtxt, reachable: &NodeSet) -> NodeSet {
|
||||
reachable.iter().cloned().filter(|&id| {
|
||||
// Next, we want to ignore some FFI functions that are not exposed from
|
||||
// this crate. Reachable FFI functions can be lumped into two
|
||||
// categories:
|
||||
@ -1064,7 +1065,7 @@ pub fn trans_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
let krate = tcx.hir.krate();
|
||||
|
||||
let ty::CrateAnalysis { reachable, .. } = analysis;
|
||||
let exported_symbols = find_exported_symbols(tcx, reachable);
|
||||
let exported_symbols = find_exported_symbols(tcx, &reachable);
|
||||
|
||||
let check_overflow = tcx.sess.overflow_checks();
|
||||
|
||||
@ -1109,9 +1110,8 @@ pub fn trans_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
|
||||
// Run the translation item collector and partition the collected items into
|
||||
// codegen units.
|
||||
let (codegen_units, symbol_map) = collect_and_partition_translation_items(&shared_ccx);
|
||||
|
||||
let symbol_map = Rc::new(symbol_map);
|
||||
let (translation_items, codegen_units, symbol_map) =
|
||||
collect_and_partition_translation_items(&shared_ccx);
|
||||
|
||||
let mut all_stats = Stats::default();
|
||||
let modules: Vec<ModuleTranslation> = codegen_units
|
||||
@ -1121,7 +1121,7 @@ pub fn trans_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
let (stats, module) =
|
||||
tcx.dep_graph.with_task(dep_node,
|
||||
AssertDepGraphSafe(&shared_ccx),
|
||||
AssertDepGraphSafe((cgu, symbol_map.clone())),
|
||||
AssertDepGraphSafe(cgu),
|
||||
module_translation);
|
||||
all_stats.extend(stats);
|
||||
module
|
||||
@ -1130,16 +1130,17 @@ pub fn trans_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
|
||||
fn module_translation<'a, 'tcx>(
|
||||
scx: AssertDepGraphSafe<&SharedCrateContext<'a, 'tcx>>,
|
||||
args: AssertDepGraphSafe<(CodegenUnit<'tcx>, Rc<SymbolMap<'tcx>>)>)
|
||||
args: AssertDepGraphSafe<CodegenUnit<'tcx>>)
|
||||
-> (Stats, ModuleTranslation)
|
||||
{
|
||||
// FIXME(#40304): We ought to be using the id as a key and some queries, I think.
|
||||
let AssertDepGraphSafe(scx) = scx;
|
||||
let AssertDepGraphSafe((cgu, symbol_map)) = args;
|
||||
let AssertDepGraphSafe(cgu) = args;
|
||||
|
||||
let cgu_name = String::from(cgu.name());
|
||||
let cgu_id = cgu.work_product_id();
|
||||
let symbol_name_hash = cgu.compute_symbol_name_hash(scx, &symbol_map);
|
||||
let symbol_cache = SymbolCache::new(scx.tcx());
|
||||
let symbol_name_hash = cgu.compute_symbol_name_hash(scx, &symbol_cache);
|
||||
|
||||
// Check whether there is a previous work-product we can
|
||||
// re-use. Not only must the file exist, and the inputs not
|
||||
@ -1174,11 +1175,11 @@ pub fn trans_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
}
|
||||
|
||||
// Instantiate translation items without filling out definitions yet...
|
||||
let lcx = LocalCrateContext::new(scx, cgu, symbol_map.clone());
|
||||
let lcx = LocalCrateContext::new(scx, cgu, &symbol_cache);
|
||||
let module = {
|
||||
let ccx = CrateContext::new(scx, &lcx);
|
||||
let trans_items = ccx.codegen_unit()
|
||||
.items_in_deterministic_order(ccx.tcx(), &symbol_map);
|
||||
.items_in_deterministic_order(ccx.tcx(), &symbol_cache);
|
||||
for &(trans_item, linkage) in &trans_items {
|
||||
trans_item.predefine(&ccx, linkage);
|
||||
}
|
||||
@ -1238,7 +1239,7 @@ pub fn trans_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
|
||||
assert_module_sources::assert_module_sources(tcx, &modules);
|
||||
|
||||
symbol_names_test::report_symbol_names(&shared_ccx);
|
||||
symbol_names_test::report_symbol_names(tcx);
|
||||
|
||||
if shared_ccx.sess().trans_stats() {
|
||||
println!("--- trans stats ---");
|
||||
@ -1289,6 +1290,7 @@ pub fn trans_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
time(shared_ccx.sess().time_passes(), "internalize symbols", || {
|
||||
internalize_symbols(sess,
|
||||
&shared_ccx,
|
||||
&translation_items,
|
||||
&llvm_modules,
|
||||
&symbol_map,
|
||||
&exported_symbols);
|
||||
@ -1517,7 +1519,9 @@ fn gather_type_sizes<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) {
|
||||
}
|
||||
|
||||
fn collect_and_partition_translation_items<'a, 'tcx>(scx: &SharedCrateContext<'a, 'tcx>)
|
||||
-> (Vec<CodegenUnit<'tcx>>, SymbolMap<'tcx>) {
|
||||
-> (FxHashSet<TransItem<'tcx>>,
|
||||
Vec<CodegenUnit<'tcx>>,
|
||||
SymbolMap<'tcx>) {
|
||||
let time_passes = scx.sess().time_passes();
|
||||
|
||||
let collection_mode = match scx.sess().opts.debugging_opts.print_trans_items {
|
||||
@ -1563,13 +1567,7 @@ fn collect_and_partition_translation_items<'a, 'tcx>(scx: &SharedCrateContext<'a
|
||||
assert!(scx.tcx().sess.opts.cg.codegen_units == codegen_units.len() ||
|
||||
scx.tcx().sess.opts.debugging_opts.incremental.is_some());
|
||||
|
||||
{
|
||||
let mut ccx_map = scx.translation_items().borrow_mut();
|
||||
|
||||
for trans_item in items.iter().cloned() {
|
||||
ccx_map.insert(trans_item);
|
||||
}
|
||||
}
|
||||
let translation_items: FxHashSet<TransItem<'tcx>> = items.iter().cloned().collect();
|
||||
|
||||
if scx.sess().opts.debugging_opts.print_trans_items.is_some() {
|
||||
let mut item_to_cgus = FxHashMap();
|
||||
@ -1624,5 +1622,5 @@ fn collect_and_partition_translation_items<'a, 'tcx>(scx: &SharedCrateContext<'a
|
||||
}
|
||||
}
|
||||
|
||||
(codegen_units, symbol_map)
|
||||
(translation_items, codegen_units, symbol_map)
|
||||
}
|
||||
|
@ -14,18 +14,18 @@
|
||||
//! and methods are represented as just a fn ptr and not a full
|
||||
//! closure.
|
||||
|
||||
use llvm::{self, ValueRef};
|
||||
use rustc::hir::def_id::DefId;
|
||||
use rustc::ty::subst::Substs;
|
||||
use attributes;
|
||||
use common::{self, CrateContext};
|
||||
use monomorphize;
|
||||
use consts;
|
||||
use declare;
|
||||
use monomorphize::Instance;
|
||||
use llvm::{self, ValueRef};
|
||||
use monomorphize::{self, Instance};
|
||||
use rustc::hir::def_id::DefId;
|
||||
use rustc::ty::{self, TypeFoldable};
|
||||
use rustc::ty::subst::Substs;
|
||||
use syntax_pos::DUMMY_SP;
|
||||
use trans_item::TransItem;
|
||||
use type_of;
|
||||
use rustc::ty::TypeFoldable;
|
||||
|
||||
/// Translates a reference to a fn/method item, monomorphizing and
|
||||
/// inlining as it goes.
|
||||
@ -51,8 +51,7 @@ pub fn get_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
|
||||
return llfn;
|
||||
}
|
||||
|
||||
let sym = ccx.symbol_map().get_or_compute(ccx.shared(),
|
||||
TransItem::Fn(instance));
|
||||
let sym = ccx.symbol_cache().get(TransItem::Fn(instance));
|
||||
debug!("get_fn({:?}: {:?}) => {}", instance, fn_ty, sym);
|
||||
|
||||
// This is subtle and surprising, but sometimes we have to bitcast
|
||||
@ -102,15 +101,17 @@ pub fn get_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
|
||||
let attrs = instance.def.attrs(ccx.tcx());
|
||||
attributes::from_fn_attrs(ccx, &attrs, llfn);
|
||||
|
||||
let is_local_def = ccx.shared().translation_items().borrow()
|
||||
.contains(&TransItem::Fn(instance));
|
||||
if is_local_def {
|
||||
// FIXME(eddyb) Doubt all extern fn should allow unwinding.
|
||||
// Perhaps questionable, but we assume that anything defined
|
||||
// *in Rust code* may unwind. Foreign items like `extern "C" {
|
||||
// fn foo(); }` are assumed not to unwind **unless** they have
|
||||
// a `#[unwind]` attribute.
|
||||
if !ty::queries::is_foreign_item::get(tcx, DUMMY_SP, instance.def_id()) {
|
||||
attributes::unwind(llfn, true);
|
||||
unsafe {
|
||||
llvm::LLVMRustSetLinkage(llfn, llvm::Linkage::ExternalLinkage);
|
||||
}
|
||||
}
|
||||
|
||||
if ccx.use_dll_storage_attrs() &&
|
||||
ccx.sess().cstore.is_dllimport_foreign_item(instance.def_id())
|
||||
{
|
||||
|
@ -467,12 +467,10 @@ impl<'a, 'tcx> MirVisitor<'tcx> for MirNeighborCollector<'a, 'tcx> {
|
||||
// have to instantiate all methods of the trait being cast to, so we
|
||||
// can build the appropriate vtable.
|
||||
mir::Rvalue::Cast(mir::CastKind::Unsize, ref operand, target_ty) => {
|
||||
let target_ty = monomorphize::apply_param_substs(self.scx,
|
||||
self.param_substs,
|
||||
let target_ty = self.scx.tcx().trans_apply_param_substs(self.param_substs,
|
||||
&target_ty);
|
||||
let source_ty = operand.ty(self.mir, self.scx.tcx());
|
||||
let source_ty = monomorphize::apply_param_substs(self.scx,
|
||||
self.param_substs,
|
||||
let source_ty = self.scx.tcx().trans_apply_param_substs(self.param_substs,
|
||||
&source_ty);
|
||||
let (source_ty, target_ty) = find_vtable_types_for_unsizing(self.scx,
|
||||
source_ty,
|
||||
@ -489,9 +487,7 @@ impl<'a, 'tcx> MirVisitor<'tcx> for MirNeighborCollector<'a, 'tcx> {
|
||||
}
|
||||
mir::Rvalue::Cast(mir::CastKind::ReifyFnPointer, ref operand, _) => {
|
||||
let fn_ty = operand.ty(self.mir, self.scx.tcx());
|
||||
let fn_ty = monomorphize::apply_param_substs(
|
||||
self.scx,
|
||||
self.param_substs,
|
||||
let fn_ty = self.scx.tcx().trans_apply_param_substs(self.param_substs,
|
||||
&fn_ty);
|
||||
visit_fn_use(self.scx, fn_ty, false, &mut self.output);
|
||||
}
|
||||
@ -534,8 +530,7 @@ impl<'a, 'tcx> MirVisitor<'tcx> for MirNeighborCollector<'a, 'tcx> {
|
||||
}
|
||||
|
||||
if let mir::Literal::Item { def_id, substs } = constant.literal {
|
||||
let substs = monomorphize::apply_param_substs(self.scx,
|
||||
self.param_substs,
|
||||
let substs = self.scx.tcx().trans_apply_param_substs(self.param_substs,
|
||||
&substs);
|
||||
let instance = monomorphize::resolve(self.scx, def_id, substs);
|
||||
collect_neighbours(self.scx, instance, self.output);
|
||||
@ -552,17 +547,14 @@ impl<'a, 'tcx> MirVisitor<'tcx> for MirNeighborCollector<'a, 'tcx> {
|
||||
match *kind {
|
||||
mir::TerminatorKind::Call { ref func, .. } => {
|
||||
let callee_ty = func.ty(self.mir, tcx);
|
||||
let callee_ty = monomorphize::apply_param_substs(
|
||||
self.scx, self.param_substs, &callee_ty);
|
||||
let callee_ty = tcx.trans_apply_param_substs(self.param_substs, &callee_ty);
|
||||
visit_fn_use(self.scx, callee_ty, true, &mut self.output);
|
||||
}
|
||||
mir::TerminatorKind::Drop { ref location, .. } |
|
||||
mir::TerminatorKind::DropAndReplace { ref location, .. } => {
|
||||
let ty = location.ty(self.mir, self.scx.tcx())
|
||||
.to_ty(self.scx.tcx());
|
||||
let ty = monomorphize::apply_param_substs(self.scx,
|
||||
self.param_substs,
|
||||
&ty);
|
||||
let ty = tcx.trans_apply_param_substs(self.param_substs, &ty);
|
||||
visit_drop_use(self.scx, ty, true, self.output);
|
||||
}
|
||||
mir::TerminatorKind::Goto { .. } |
|
||||
|
@ -564,7 +564,7 @@ pub fn def_ty<'a, 'tcx>(shared: &SharedCrateContext<'a, 'tcx>,
|
||||
-> Ty<'tcx>
|
||||
{
|
||||
let ty = shared.tcx().item_type(def_id);
|
||||
monomorphize::apply_param_substs(shared, substs, &ty)
|
||||
shared.tcx().trans_apply_param_substs(substs, &ty)
|
||||
}
|
||||
|
||||
/// Return the substituted type of an instance.
|
||||
@ -573,5 +573,5 @@ pub fn instance_ty<'a, 'tcx>(shared: &SharedCrateContext<'a, 'tcx>,
|
||||
-> Ty<'tcx>
|
||||
{
|
||||
let ty = instance.def.def_ty(shared.tcx());
|
||||
monomorphize::apply_param_substs(shared, instance.substs, &ty)
|
||||
shared.tcx().trans_apply_param_substs(instance.substs, &ty)
|
||||
}
|
||||
|
@ -93,20 +93,19 @@ pub fn get_static(ccx: &CrateContext, def_id: DefId) -> ValueRef {
|
||||
hir_map::NodeItem(&hir::Item {
|
||||
ref attrs, span, node: hir::ItemStatic(..), ..
|
||||
}) => {
|
||||
let sym = ccx.symbol_map()
|
||||
.get(TransItem::Static(id))
|
||||
.expect("Local statics should always be in the SymbolMap");
|
||||
let sym = ccx.symbol_cache()
|
||||
.get(TransItem::Static(id));
|
||||
|
||||
let defined_in_current_codegen_unit = ccx.codegen_unit()
|
||||
.items()
|
||||
.contains_key(&TransItem::Static(id));
|
||||
assert!(!defined_in_current_codegen_unit);
|
||||
|
||||
if declare::get_declared_value(ccx, sym).is_some() {
|
||||
if declare::get_declared_value(ccx, &sym[..]).is_some() {
|
||||
span_bug!(span, "trans: Conflicting symbol names for static?");
|
||||
}
|
||||
|
||||
let g = declare::define_global(ccx, sym, llty).unwrap();
|
||||
let g = declare::define_global(ccx, &sym[..], llty).unwrap();
|
||||
|
||||
(g, attrs)
|
||||
}
|
||||
@ -114,7 +113,7 @@ pub fn get_static(ccx: &CrateContext, def_id: DefId) -> ValueRef {
|
||||
hir_map::NodeForeignItem(&hir::ForeignItem {
|
||||
ref attrs, span, node: hir::ForeignItemStatic(..), ..
|
||||
}) => {
|
||||
let sym = symbol_names::symbol_name(instance, ccx.shared());
|
||||
let sym = symbol_names::symbol_name(instance, ccx.tcx());
|
||||
let g = if let Some(name) =
|
||||
attr::first_attr_value_str_by_name(&attrs, "linkage") {
|
||||
// If this is a static with a linkage specified, then we need to handle
|
||||
@ -174,7 +173,7 @@ pub fn get_static(ccx: &CrateContext, def_id: DefId) -> ValueRef {
|
||||
|
||||
g
|
||||
} else {
|
||||
let sym = symbol_names::symbol_name(instance, ccx.shared());
|
||||
let sym = symbol_names::symbol_name(instance, ccx.tcx());
|
||||
|
||||
// FIXME(nagisa): perhaps the map of externs could be offloaded to llvm somehow?
|
||||
// FIXME(nagisa): investigate whether it can be changed into define_global
|
||||
|
@ -10,7 +10,7 @@
|
||||
|
||||
use llvm;
|
||||
use llvm::{ContextRef, ModuleRef, ValueRef};
|
||||
use rustc::dep_graph::{DepGraph, DepGraphSafe, DepNode, DepTrackingMap, DepTrackingMapConfig};
|
||||
use rustc::dep_graph::{DepGraph, DepGraphSafe};
|
||||
use rustc::hir;
|
||||
use rustc::hir::def_id::DefId;
|
||||
use rustc::traits;
|
||||
@ -21,7 +21,6 @@ use declare;
|
||||
use monomorphize::Instance;
|
||||
|
||||
use partitioning::CodegenUnit;
|
||||
use trans_item::TransItem;
|
||||
use type_::Type;
|
||||
use rustc_data_structures::base_n;
|
||||
use rustc::ty::subst::Substs;
|
||||
@ -30,15 +29,13 @@ use rustc::ty::layout::{LayoutTyper, TyLayout};
|
||||
use session::config::NoDebugInfo;
|
||||
use session::Session;
|
||||
use session::config;
|
||||
use symbol_map::SymbolMap;
|
||||
use util::nodemap::{NodeSet, DefIdMap, FxHashMap, FxHashSet};
|
||||
use symbol_cache::SymbolCache;
|
||||
use util::nodemap::{NodeSet, DefIdMap, FxHashMap};
|
||||
|
||||
use std::ffi::{CStr, CString};
|
||||
use std::cell::{Cell, RefCell};
|
||||
use std::marker::PhantomData;
|
||||
use std::ptr;
|
||||
use std::iter;
|
||||
use std::rc::Rc;
|
||||
use std::str;
|
||||
use syntax::ast;
|
||||
use syntax::symbol::InternedString;
|
||||
@ -86,17 +83,13 @@ pub struct SharedCrateContext<'a, 'tcx: 'a> {
|
||||
check_overflow: bool,
|
||||
|
||||
use_dll_storage_attrs: bool,
|
||||
|
||||
translation_items: RefCell<FxHashSet<TransItem<'tcx>>>,
|
||||
trait_cache: RefCell<DepTrackingMap<TraitSelectionCache<'tcx>>>,
|
||||
project_cache: RefCell<DepTrackingMap<ProjectionCache<'tcx>>>,
|
||||
}
|
||||
|
||||
/// The local portion of a `CrateContext`. There is one `LocalCrateContext`
|
||||
/// per compilation unit. Each one has its own LLVM `ContextRef` so that
|
||||
/// several compilation units may be optimized in parallel. All other LLVM
|
||||
/// data structures in the `LocalCrateContext` are tied to that `ContextRef`.
|
||||
pub struct LocalCrateContext<'tcx> {
|
||||
pub struct LocalCrateContext<'a, 'tcx: 'a> {
|
||||
llmod: ModuleRef,
|
||||
llcx: ContextRef,
|
||||
stats: Stats,
|
||||
@ -168,60 +161,10 @@ pub struct LocalCrateContext<'tcx> {
|
||||
/// Depth of the current type-of computation - used to bail out
|
||||
type_of_depth: Cell<usize>,
|
||||
|
||||
symbol_map: Rc<SymbolMap<'tcx>>,
|
||||
|
||||
/// A counter that is used for generating local symbol names
|
||||
local_gen_sym_counter: Cell<usize>,
|
||||
}
|
||||
|
||||
// Implement DepTrackingMapConfig for `trait_cache`
|
||||
pub struct TraitSelectionCache<'tcx> {
|
||||
data: PhantomData<&'tcx ()>
|
||||
}
|
||||
|
||||
impl<'tcx> DepTrackingMapConfig for TraitSelectionCache<'tcx> {
|
||||
type Key = ty::PolyTraitRef<'tcx>;
|
||||
type Value = traits::Vtable<'tcx, ()>;
|
||||
fn to_dep_node(key: &ty::PolyTraitRef<'tcx>) -> DepNode<DefId> {
|
||||
key.to_poly_trait_predicate().dep_node()
|
||||
}
|
||||
}
|
||||
|
||||
// # Global Cache
|
||||
|
||||
pub struct ProjectionCache<'gcx> {
|
||||
data: PhantomData<&'gcx ()>
|
||||
}
|
||||
|
||||
impl<'gcx> DepTrackingMapConfig for ProjectionCache<'gcx> {
|
||||
type Key = Ty<'gcx>;
|
||||
type Value = Ty<'gcx>;
|
||||
fn to_dep_node(key: &Self::Key) -> DepNode<DefId> {
|
||||
// Ideally, we'd just put `key` into the dep-node, but we
|
||||
// can't put full types in there. So just collect up all the
|
||||
// def-ids of structs/enums as well as any traits that we
|
||||
// project out of. It doesn't matter so much what we do here,
|
||||
// except that if we are too coarse, we'll create overly
|
||||
// coarse edges between impls and the trans. For example, if
|
||||
// we just used the def-id of things we are projecting out of,
|
||||
// then the key for `<Foo as SomeTrait>::T` and `<Bar as
|
||||
// SomeTrait>::T` would both share a dep-node
|
||||
// (`TraitSelect(SomeTrait)`), and hence the impls for both
|
||||
// `Foo` and `Bar` would be considered inputs. So a change to
|
||||
// `Bar` would affect things that just normalized `Foo`.
|
||||
// Anyway, this heuristic is not ideal, but better than
|
||||
// nothing.
|
||||
let def_ids: Vec<DefId> =
|
||||
key.walk()
|
||||
.filter_map(|t| match t.sty {
|
||||
ty::TyAdt(adt_def, _) => Some(adt_def.did),
|
||||
ty::TyProjection(ref proj) => Some(proj.trait_ref.def_id),
|
||||
_ => None,
|
||||
})
|
||||
.collect();
|
||||
|
||||
DepNode::ProjectionCache { def_ids: def_ids }
|
||||
}
|
||||
symbol_cache: &'a SymbolCache<'a, 'tcx>,
|
||||
}
|
||||
|
||||
/// A CrateContext value binds together one LocalCrateContext with the
|
||||
@ -229,12 +172,12 @@ impl<'gcx> DepTrackingMapConfig for ProjectionCache<'gcx> {
|
||||
/// pass around (SharedCrateContext, LocalCrateContext) tuples all over trans.
|
||||
pub struct CrateContext<'a, 'tcx: 'a> {
|
||||
shared: &'a SharedCrateContext<'a, 'tcx>,
|
||||
local_ccx: &'a LocalCrateContext<'tcx>,
|
||||
local_ccx: &'a LocalCrateContext<'a, 'tcx>,
|
||||
}
|
||||
|
||||
impl<'a, 'tcx> CrateContext<'a, 'tcx> {
|
||||
pub fn new(shared: &'a SharedCrateContext<'a, 'tcx>,
|
||||
local_ccx: &'a LocalCrateContext<'tcx>)
|
||||
local_ccx: &'a LocalCrateContext<'a, 'tcx>)
|
||||
-> Self {
|
||||
CrateContext { shared, local_ccx }
|
||||
}
|
||||
@ -385,9 +328,6 @@ impl<'b, 'tcx> SharedCrateContext<'b, 'tcx> {
|
||||
tcx: tcx,
|
||||
check_overflow: check_overflow,
|
||||
use_dll_storage_attrs: use_dll_storage_attrs,
|
||||
translation_items: RefCell::new(FxHashSet()),
|
||||
trait_cache: RefCell::new(DepTrackingMap::new(tcx.dep_graph.clone())),
|
||||
project_cache: RefCell::new(DepTrackingMap::new(tcx.dep_graph.clone())),
|
||||
}
|
||||
}
|
||||
|
||||
@ -407,14 +347,6 @@ impl<'b, 'tcx> SharedCrateContext<'b, 'tcx> {
|
||||
&self.exported_symbols
|
||||
}
|
||||
|
||||
pub fn trait_cache(&self) -> &RefCell<DepTrackingMap<TraitSelectionCache<'tcx>>> {
|
||||
&self.trait_cache
|
||||
}
|
||||
|
||||
pub fn project_cache(&self) -> &RefCell<DepTrackingMap<ProjectionCache<'tcx>>> {
|
||||
&self.project_cache
|
||||
}
|
||||
|
||||
pub fn tcx<'a>(&'a self) -> TyCtxt<'a, 'tcx, 'tcx> {
|
||||
self.tcx
|
||||
}
|
||||
@ -430,17 +362,13 @@ impl<'b, 'tcx> SharedCrateContext<'b, 'tcx> {
|
||||
pub fn use_dll_storage_attrs(&self) -> bool {
|
||||
self.use_dll_storage_attrs
|
||||
}
|
||||
|
||||
pub fn translation_items(&self) -> &RefCell<FxHashSet<TransItem<'tcx>>> {
|
||||
&self.translation_items
|
||||
}
|
||||
}
|
||||
|
||||
impl<'tcx> LocalCrateContext<'tcx> {
|
||||
pub fn new<'a>(shared: &SharedCrateContext<'a, 'tcx>,
|
||||
impl<'a, 'tcx> LocalCrateContext<'a, 'tcx> {
|
||||
pub fn new(shared: &SharedCrateContext<'a, 'tcx>,
|
||||
codegen_unit: CodegenUnit<'tcx>,
|
||||
symbol_map: Rc<SymbolMap<'tcx>>)
|
||||
-> LocalCrateContext<'tcx> {
|
||||
symbol_cache: &'a SymbolCache<'a, 'tcx>)
|
||||
-> LocalCrateContext<'a, 'tcx> {
|
||||
unsafe {
|
||||
// Append ".rs" to LLVM module identifier.
|
||||
//
|
||||
@ -494,8 +422,8 @@ impl<'tcx> LocalCrateContext<'tcx> {
|
||||
rust_try_fn: Cell::new(None),
|
||||
intrinsics: RefCell::new(FxHashMap()),
|
||||
type_of_depth: Cell::new(0),
|
||||
symbol_map: symbol_map,
|
||||
local_gen_sym_counter: Cell::new(0),
|
||||
symbol_cache: symbol_cache,
|
||||
};
|
||||
|
||||
let (int_type, opaque_vec_type, str_slice_ty, mut local_ccx) = {
|
||||
@ -529,8 +457,8 @@ impl<'tcx> LocalCrateContext<'tcx> {
|
||||
/// This is used in the `LocalCrateContext` constructor to allow calling
|
||||
/// functions that expect a complete `CrateContext`, even before the local
|
||||
/// portion is fully initialized and attached to the `SharedCrateContext`.
|
||||
fn dummy_ccx<'a>(shared: &'a SharedCrateContext<'a, 'tcx>,
|
||||
local_ccxs: &'a [LocalCrateContext<'tcx>])
|
||||
fn dummy_ccx(shared: &'a SharedCrateContext<'a, 'tcx>,
|
||||
local_ccxs: &'a [LocalCrateContext<'a, 'tcx>])
|
||||
-> CrateContext<'a, 'tcx> {
|
||||
assert!(local_ccxs.len() == 1);
|
||||
CrateContext {
|
||||
@ -549,7 +477,7 @@ impl<'b, 'tcx> CrateContext<'b, 'tcx> {
|
||||
self.shared
|
||||
}
|
||||
|
||||
fn local(&self) -> &'b LocalCrateContext<'tcx> {
|
||||
fn local(&self) -> &'b LocalCrateContext<'b, 'tcx> {
|
||||
self.local_ccx
|
||||
}
|
||||
|
||||
@ -716,12 +644,8 @@ impl<'b, 'tcx> CrateContext<'b, 'tcx> {
|
||||
self.shared.use_dll_storage_attrs()
|
||||
}
|
||||
|
||||
pub fn symbol_map(&self) -> &SymbolMap<'tcx> {
|
||||
&*self.local().symbol_map
|
||||
}
|
||||
|
||||
pub fn translation_items(&self) -> &RefCell<FxHashSet<TransItem<'tcx>>> {
|
||||
&self.shared.translation_items
|
||||
pub fn symbol_cache(&self) -> &'b SymbolCache<'b, 'tcx> {
|
||||
self.local().symbol_cache
|
||||
}
|
||||
|
||||
/// Given the def-id of some item that has no type parameters, make
|
||||
@ -867,7 +791,7 @@ impl<'a, 'tcx> LayoutTyper<'tcx> for &'a CrateContext<'a, 'tcx> {
|
||||
}
|
||||
}
|
||||
|
||||
pub struct TypeOfDepthLock<'a, 'tcx: 'a>(&'a LocalCrateContext<'tcx>);
|
||||
pub struct TypeOfDepthLock<'a, 'tcx: 'a>(&'a LocalCrateContext<'a, 'tcx>);
|
||||
|
||||
impl<'a, 'tcx> Drop for TypeOfDepthLock<'a, 'tcx> {
|
||||
fn drop(&mut self) {
|
||||
|
@ -124,6 +124,7 @@ mod meth;
|
||||
mod mir;
|
||||
mod monomorphize;
|
||||
mod partitioning;
|
||||
mod symbol_cache;
|
||||
mod symbol_map;
|
||||
mod symbol_names_test;
|
||||
mod trans_item;
|
||||
|
@ -260,9 +260,7 @@ impl<'a, 'tcx> MirConstContext<'a, 'tcx> {
|
||||
fn monomorphize<T>(&self, value: &T) -> T
|
||||
where T: TransNormalize<'tcx>
|
||||
{
|
||||
monomorphize::apply_param_substs(self.ccx.shared(),
|
||||
self.substs,
|
||||
value)
|
||||
self.ccx.tcx().trans_apply_param_substs(self.substs, value)
|
||||
}
|
||||
|
||||
fn trans(&mut self) -> Result<Const<'tcx>, ConstEvalErr<'tcx>> {
|
||||
|
@ -22,7 +22,7 @@ use base;
|
||||
use builder::Builder;
|
||||
use common::{self, CrateContext, Funclet};
|
||||
use debuginfo::{self, declare_local, VariableAccess, VariableKind, FunctionDebugContext};
|
||||
use monomorphize::{self, Instance};
|
||||
use monomorphize::Instance;
|
||||
use abi::FnType;
|
||||
use type_of;
|
||||
|
||||
@ -102,8 +102,9 @@ pub struct MirContext<'a, 'tcx:'a> {
|
||||
|
||||
impl<'a, 'tcx> MirContext<'a, 'tcx> {
|
||||
pub fn monomorphize<T>(&self, value: &T) -> T
|
||||
where T: TransNormalize<'tcx> {
|
||||
monomorphize::apply_param_substs(self.ccx.shared(), self.param_substs, value)
|
||||
where T: TransNormalize<'tcx>
|
||||
{
|
||||
self.ccx.tcx().trans_apply_param_substs(self.param_substs, value)
|
||||
}
|
||||
|
||||
pub fn set_debug_loc(&mut self, bcx: &Builder, source_info: mir::SourceInfo) {
|
||||
|
@ -13,17 +13,13 @@ use common::*;
|
||||
use glue;
|
||||
|
||||
use rustc::hir::def_id::DefId;
|
||||
use rustc::infer::TransNormalize;
|
||||
use rustc::middle::lang_items::DropInPlaceFnLangItem;
|
||||
use rustc::traits::{self, SelectionContext, Reveal};
|
||||
use rustc::traits;
|
||||
use rustc::ty::adjustment::CustomCoerceUnsized;
|
||||
use rustc::ty::fold::{TypeFolder, TypeFoldable};
|
||||
use rustc::ty::subst::{Kind, Subst, Substs};
|
||||
use rustc::ty::{self, Ty, TyCtxt};
|
||||
use rustc::util::common::MemoizationMap;
|
||||
|
||||
use syntax::ast;
|
||||
use syntax::codemap::{Span, DUMMY_SP};
|
||||
use syntax::codemap::DUMMY_SP;
|
||||
|
||||
pub use rustc::ty::Instance;
|
||||
|
||||
@ -104,73 +100,6 @@ pub fn resolve_closure<'a, 'tcx> (
|
||||
}
|
||||
}
|
||||
|
||||
/// Attempts to resolve an obligation. The result is a shallow vtable resolution -- meaning that we
|
||||
/// do not (necessarily) resolve all nested obligations on the impl. Note that type check should
|
||||
/// guarantee to us that all nested obligations *could be* resolved if we wanted to.
|
||||
fn fulfill_obligation<'a, 'tcx>(scx: &SharedCrateContext<'a, 'tcx>,
|
||||
span: Span,
|
||||
trait_ref: ty::PolyTraitRef<'tcx>)
|
||||
-> traits::Vtable<'tcx, ()>
|
||||
{
|
||||
let tcx = scx.tcx();
|
||||
|
||||
// Remove any references to regions; this helps improve caching.
|
||||
let trait_ref = tcx.erase_regions(&trait_ref);
|
||||
|
||||
scx.trait_cache().memoize(trait_ref, || {
|
||||
debug!("trans::fulfill_obligation(trait_ref={:?}, def_id={:?})",
|
||||
trait_ref, trait_ref.def_id());
|
||||
|
||||
// Do the initial selection for the obligation. This yields the
|
||||
// shallow result we are looking for -- that is, what specific impl.
|
||||
tcx.infer_ctxt((), Reveal::All).enter(|infcx| {
|
||||
let mut selcx = SelectionContext::new(&infcx);
|
||||
|
||||
let obligation_cause = traits::ObligationCause::misc(span,
|
||||
ast::DUMMY_NODE_ID);
|
||||
let obligation = traits::Obligation::new(obligation_cause,
|
||||
trait_ref.to_poly_trait_predicate());
|
||||
|
||||
let selection = match selcx.select(&obligation) {
|
||||
Ok(Some(selection)) => selection,
|
||||
Ok(None) => {
|
||||
// Ambiguity can happen when monomorphizing during trans
|
||||
// expands to some humongo type that never occurred
|
||||
// statically -- this humongo type can then overflow,
|
||||
// leading to an ambiguous result. So report this as an
|
||||
// overflow bug, since I believe this is the only case
|
||||
// where ambiguity can result.
|
||||
debug!("Encountered ambiguity selecting `{:?}` during trans, \
|
||||
presuming due to overflow",
|
||||
trait_ref);
|
||||
tcx.sess.span_fatal(span,
|
||||
"reached the recursion limit during monomorphization \
|
||||
(selection ambiguity)");
|
||||
}
|
||||
Err(e) => {
|
||||
span_bug!(span, "Encountered error `{:?}` selecting `{:?}` during trans",
|
||||
e, trait_ref)
|
||||
}
|
||||
};
|
||||
|
||||
debug!("fulfill_obligation: selection={:?}", selection);
|
||||
|
||||
// Currently, we use a fulfillment context to completely resolve
|
||||
// all nested obligations. This is because they can inform the
|
||||
// inference of the impl's type parameters.
|
||||
let mut fulfill_cx = traits::FulfillmentContext::new();
|
||||
let vtable = selection.map(|predicate| {
|
||||
debug!("fulfill_obligation: register_predicate_obligation {:?}", predicate);
|
||||
fulfill_cx.register_predicate_obligation(&infcx, predicate);
|
||||
});
|
||||
let vtable = infcx.drain_fulfillment_cx_or_panic(span, &mut fulfill_cx, &vtable);
|
||||
|
||||
info!("Cache miss: {:?} => {:?}", trait_ref, vtable);
|
||||
vtable
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
fn resolve_associated_item<'a, 'tcx>(
|
||||
scx: &SharedCrateContext<'a, 'tcx>,
|
||||
trait_item: &ty::AssociatedItem,
|
||||
@ -185,7 +114,7 @@ fn resolve_associated_item<'a, 'tcx>(
|
||||
def_id, trait_id, rcvr_substs);
|
||||
|
||||
let trait_ref = ty::TraitRef::from_method(tcx, trait_id, rcvr_substs);
|
||||
let vtbl = fulfill_obligation(scx, DUMMY_SP, ty::Binder(trait_ref));
|
||||
let vtbl = tcx.trans_fulfill_obligation(DUMMY_SP, ty::Binder(trait_ref));
|
||||
|
||||
// Now that we know which impl is being used, we can dispatch to
|
||||
// the actual function:
|
||||
@ -285,7 +214,7 @@ pub fn custom_coerce_unsize_info<'scx, 'tcx>(scx: &SharedCrateContext<'scx, 'tcx
|
||||
substs: scx.tcx().mk_substs_trait(source_ty, &[target_ty])
|
||||
});
|
||||
|
||||
match fulfill_obligation(scx, DUMMY_SP, trait_ref) {
|
||||
match scx.tcx().trans_fulfill_obligation(DUMMY_SP, trait_ref) {
|
||||
traits::VtableImpl(traits::VtableImplData { impl_def_id, .. }) => {
|
||||
scx.tcx().coerce_unsized_info(impl_def_id).custom_kind.unwrap()
|
||||
}
|
||||
@ -295,21 +224,6 @@ pub fn custom_coerce_unsize_info<'scx, 'tcx>(scx: &SharedCrateContext<'scx, 'tcx
|
||||
}
|
||||
}
|
||||
|
||||
/// Monomorphizes a type from the AST by first applying the in-scope
|
||||
/// substitutions and then normalizing any associated types.
|
||||
pub fn apply_param_substs<'a, 'tcx, T>(scx: &SharedCrateContext<'a, 'tcx>,
|
||||
param_substs: &Substs<'tcx>,
|
||||
value: &T)
|
||||
-> T
|
||||
where T: TransNormalize<'tcx>
|
||||
{
|
||||
let tcx = scx.tcx();
|
||||
debug!("apply_param_substs(param_substs={:?}, value={:?})", param_substs, value);
|
||||
let substituted = value.subst(tcx, param_substs);
|
||||
let substituted = scx.tcx().erase_regions(&substituted);
|
||||
AssociatedTypeNormalizer::new(scx).fold(&substituted)
|
||||
}
|
||||
|
||||
/// Returns the normalized type of a struct field
|
||||
pub fn field_ty<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
param_substs: &Substs<'tcx>,
|
||||
@ -319,39 +233,3 @@ pub fn field_ty<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
tcx.normalize_associated_type(&f.ty(tcx, param_substs))
|
||||
}
|
||||
|
||||
struct AssociatedTypeNormalizer<'a, 'b: 'a, 'gcx: 'b> {
|
||||
shared: &'a SharedCrateContext<'b, 'gcx>,
|
||||
}
|
||||
|
||||
impl<'a, 'b, 'gcx> AssociatedTypeNormalizer<'a, 'b, 'gcx> {
|
||||
fn new(shared: &'a SharedCrateContext<'b, 'gcx>) -> Self {
|
||||
AssociatedTypeNormalizer {
|
||||
shared: shared,
|
||||
}
|
||||
}
|
||||
|
||||
fn fold<T:TypeFoldable<'gcx>>(&mut self, value: &T) -> T {
|
||||
if !value.has_projection_types() {
|
||||
value.clone()
|
||||
} else {
|
||||
value.fold_with(self)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'b, 'gcx> TypeFolder<'gcx, 'gcx> for AssociatedTypeNormalizer<'a, 'b, 'gcx> {
|
||||
fn tcx<'c>(&'c self) -> TyCtxt<'c, 'gcx, 'gcx> {
|
||||
self.shared.tcx()
|
||||
}
|
||||
|
||||
fn fold_ty(&mut self, ty: Ty<'gcx>) -> Ty<'gcx> {
|
||||
if !ty.has_projection_types() {
|
||||
ty
|
||||
} else {
|
||||
self.shared.project_cache().memoize(ty, || {
|
||||
debug!("AssociatedTypeNormalizer: ty={:?}", ty);
|
||||
self.shared.tcx().normalize_associated_type(&ty)
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -116,7 +116,7 @@ use rustc_incremental::IchHasher;
|
||||
use std::cmp::Ordering;
|
||||
use std::hash::Hash;
|
||||
use std::sync::Arc;
|
||||
use symbol_map::SymbolMap;
|
||||
use symbol_cache::SymbolCache;
|
||||
use syntax::ast::NodeId;
|
||||
use syntax::symbol::{Symbol, InternedString};
|
||||
use trans_item::{TransItem, InstantiationMode};
|
||||
@ -174,14 +174,15 @@ impl<'tcx> CodegenUnit<'tcx> {
|
||||
DepNode::WorkProduct(self.work_product_id())
|
||||
}
|
||||
|
||||
pub fn compute_symbol_name_hash(&self,
|
||||
scx: &SharedCrateContext,
|
||||
symbol_map: &SymbolMap) -> u64 {
|
||||
pub fn compute_symbol_name_hash<'a>(&self,
|
||||
scx: &SharedCrateContext<'a, 'tcx>,
|
||||
symbol_cache: &SymbolCache<'a, 'tcx>)
|
||||
-> u64 {
|
||||
let mut state = IchHasher::new();
|
||||
let exported_symbols = scx.exported_symbols();
|
||||
let all_items = self.items_in_deterministic_order(scx.tcx(), symbol_map);
|
||||
let all_items = self.items_in_deterministic_order(scx.tcx(), symbol_cache);
|
||||
for (item, _) in all_items {
|
||||
let symbol_name = symbol_map.get(item).unwrap();
|
||||
let symbol_name = symbol_cache.get(item);
|
||||
symbol_name.len().hash(&mut state);
|
||||
symbol_name.hash(&mut state);
|
||||
let exported = match item {
|
||||
@ -201,9 +202,9 @@ impl<'tcx> CodegenUnit<'tcx> {
|
||||
state.finish().to_smaller_hash()
|
||||
}
|
||||
|
||||
pub fn items_in_deterministic_order(&self,
|
||||
pub fn items_in_deterministic_order<'a>(&self,
|
||||
tcx: TyCtxt,
|
||||
symbol_map: &SymbolMap)
|
||||
symbol_cache: &SymbolCache<'a, 'tcx>)
|
||||
-> Vec<(TransItem<'tcx>, llvm::Linkage)> {
|
||||
let mut items: Vec<(TransItem<'tcx>, llvm::Linkage)> =
|
||||
self.items.iter().map(|(item, linkage)| (*item, *linkage)).collect();
|
||||
@ -216,9 +217,9 @@ impl<'tcx> CodegenUnit<'tcx> {
|
||||
|
||||
match (node_id1, node_id2) {
|
||||
(None, None) => {
|
||||
let symbol_name1 = symbol_map.get(trans_item1).unwrap();
|
||||
let symbol_name2 = symbol_map.get(trans_item2).unwrap();
|
||||
symbol_name1.cmp(symbol_name2)
|
||||
let symbol_name1 = symbol_cache.get(trans_item1);
|
||||
let symbol_name2 = symbol_cache.get(trans_item2);
|
||||
symbol_name1.cmp(&symbol_name2)
|
||||
}
|
||||
// In the following two cases we can avoid looking up the symbol
|
||||
(None, Some(_)) => Ordering::Less,
|
||||
@ -230,9 +231,9 @@ impl<'tcx> CodegenUnit<'tcx> {
|
||||
return ordering;
|
||||
}
|
||||
|
||||
let symbol_name1 = symbol_map.get(trans_item1).unwrap();
|
||||
let symbol_name2 = symbol_map.get(trans_item2).unwrap();
|
||||
symbol_name1.cmp(symbol_name2)
|
||||
let symbol_name1 = symbol_cache.get(trans_item1);
|
||||
let symbol_name2 = symbol_cache.get(trans_item2);
|
||||
symbol_name1.cmp(&symbol_name2)
|
||||
}
|
||||
}
|
||||
});
|
||||
@ -271,14 +272,14 @@ pub fn partition<'a, 'tcx, I>(scx: &SharedCrateContext<'a, 'tcx>,
|
||||
let mut initial_partitioning = place_root_translation_items(scx,
|
||||
trans_items);
|
||||
|
||||
debug_dump(scx, "INITIAL PARTITONING:", initial_partitioning.codegen_units.iter());
|
||||
debug_dump(tcx, "INITIAL PARTITONING:", initial_partitioning.codegen_units.iter());
|
||||
|
||||
// If the partitioning should produce a fixed count of codegen units, merge
|
||||
// until that count is reached.
|
||||
if let PartitioningStrategy::FixedUnitCount(count) = strategy {
|
||||
merge_codegen_units(&mut initial_partitioning, count, &tcx.crate_name.as_str());
|
||||
|
||||
debug_dump(scx, "POST MERGING:", initial_partitioning.codegen_units.iter());
|
||||
debug_dump(tcx, "POST MERGING:", initial_partitioning.codegen_units.iter());
|
||||
}
|
||||
|
||||
// In the next step, we use the inlining map to determine which addtional
|
||||
@ -288,7 +289,7 @@ pub fn partition<'a, 'tcx, I>(scx: &SharedCrateContext<'a, 'tcx>,
|
||||
let post_inlining = place_inlined_translation_items(initial_partitioning,
|
||||
inlining_map);
|
||||
|
||||
debug_dump(scx, "POST INLINING:", post_inlining.0.iter());
|
||||
debug_dump(tcx, "POST INLINING:", post_inlining.0.iter());
|
||||
|
||||
// Finally, sort by codegen unit name, so that we get deterministic results
|
||||
let mut result = post_inlining.0;
|
||||
@ -528,7 +529,7 @@ fn numbered_codegen_unit_name(crate_name: &str, index: usize) -> InternedString
|
||||
Symbol::intern(&format!("{}{}{}", crate_name, NUMBERED_CODEGEN_UNIT_MARKER, index)).as_str()
|
||||
}
|
||||
|
||||
fn debug_dump<'a, 'b, 'tcx, I>(scx: &SharedCrateContext<'a, 'tcx>,
|
||||
fn debug_dump<'a, 'b, 'tcx, I>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
label: &str,
|
||||
cgus: I)
|
||||
where I: Iterator<Item=&'b CodegenUnit<'tcx>>,
|
||||
@ -536,20 +537,18 @@ fn debug_dump<'a, 'b, 'tcx, I>(scx: &SharedCrateContext<'a, 'tcx>,
|
||||
{
|
||||
if cfg!(debug_assertions) {
|
||||
debug!("{}", label);
|
||||
let symbol_cache = SymbolCache::new(tcx);
|
||||
for cgu in cgus {
|
||||
let symbol_map = SymbolMap::build(scx, cgu.items
|
||||
.iter()
|
||||
.map(|(&trans_item, _)| trans_item));
|
||||
debug!("CodegenUnit {}:", cgu.name);
|
||||
|
||||
for (trans_item, linkage) in &cgu.items {
|
||||
let symbol_name = symbol_map.get_or_compute(scx, *trans_item);
|
||||
let symbol_name = symbol_cache.get(*trans_item);
|
||||
let symbol_hash_start = symbol_name.rfind('h');
|
||||
let symbol_hash = symbol_hash_start.map(|i| &symbol_name[i ..])
|
||||
.unwrap_or("<no hash>");
|
||||
|
||||
debug!(" - {} [{:?}] [{}]",
|
||||
trans_item.to_string(scx.tcx()),
|
||||
trans_item.to_string(tcx),
|
||||
linkage,
|
||||
symbol_hash);
|
||||
}
|
||||
|
42
src/librustc_trans/symbol_cache.rs
Normal file
42
src/librustc_trans/symbol_cache.rs
Normal file
@ -0,0 +1,42 @@
|
||||
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
|
||||
// file at the top-level directory of this distribution and at
|
||||
// http://rust-lang.org/COPYRIGHT.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
use rustc::ty::TyCtxt;
|
||||
use std::cell::RefCell;
|
||||
use syntax_pos::symbol::{InternedString, Symbol};
|
||||
use trans_item::TransItem;
|
||||
use util::nodemap::FxHashMap;
|
||||
|
||||
// In the SymbolCache we collect the symbol names of translation items
|
||||
// and cache them for later reference. This is just a performance
|
||||
// optimization and the cache is populated lazilly; symbol names of
|
||||
// translation items are deterministic and fully defined by the item.
|
||||
// Thus they can always be recomputed if needed.
|
||||
|
||||
pub struct SymbolCache<'a, 'tcx: 'a> {
|
||||
tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
index: RefCell<FxHashMap<TransItem<'tcx>, Symbol>>,
|
||||
}
|
||||
|
||||
impl<'a, 'tcx> SymbolCache<'a, 'tcx> {
|
||||
pub fn new(tcx: TyCtxt<'a, 'tcx, 'tcx>) -> Self {
|
||||
SymbolCache {
|
||||
tcx: tcx,
|
||||
index: RefCell::new(FxHashMap())
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get(&self, trans_item: TransItem<'tcx>) -> InternedString {
|
||||
let mut index = self.index.borrow_mut();
|
||||
index.entry(trans_item)
|
||||
.or_insert_with(|| Symbol::intern(&trans_item.compute_symbol_name(self.tcx)))
|
||||
.as_str()
|
||||
}
|
||||
}
|
@ -34,8 +34,9 @@ impl<'tcx> SymbolMap<'tcx> {
|
||||
where I: Iterator<Item=TransItem<'tcx>>
|
||||
{
|
||||
// Check for duplicate symbol names
|
||||
let tcx = scx.tcx();
|
||||
let mut symbols: Vec<_> = trans_items.map(|trans_item| {
|
||||
(trans_item, trans_item.compute_symbol_name(scx))
|
||||
(trans_item, trans_item.compute_symbol_name(tcx))
|
||||
}).collect();
|
||||
|
||||
(&mut symbols[..]).sort_by(|&(_, ref sym1), &(_, ref sym2)|{
|
||||
@ -124,7 +125,7 @@ impl<'tcx> SymbolMap<'tcx> {
|
||||
if let Some(sym) = self.get(trans_item) {
|
||||
Cow::from(sym)
|
||||
} else {
|
||||
Cow::from(trans_item.compute_symbol_name(scx))
|
||||
Cow::from(trans_item.compute_symbol_name(scx.tcx()))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -17,43 +17,42 @@
|
||||
use back::symbol_names;
|
||||
use rustc::hir;
|
||||
use rustc::hir::intravisit::{self, Visitor, NestedVisitorMap};
|
||||
use rustc::ty::TyCtxt;
|
||||
use syntax::ast;
|
||||
|
||||
use common::SharedCrateContext;
|
||||
use monomorphize::Instance;
|
||||
|
||||
const SYMBOL_NAME: &'static str = "rustc_symbol_name";
|
||||
const ITEM_PATH: &'static str = "rustc_item_path";
|
||||
|
||||
pub fn report_symbol_names(scx: &SharedCrateContext) {
|
||||
pub fn report_symbol_names<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) {
|
||||
// if the `rustc_attrs` feature is not enabled, then the
|
||||
// attributes we are interested in cannot be present anyway, so
|
||||
// skip the walk.
|
||||
let tcx = scx.tcx();
|
||||
if !tcx.sess.features.borrow().rustc_attrs {
|
||||
return;
|
||||
}
|
||||
|
||||
let _ignore = tcx.dep_graph.in_ignore();
|
||||
let mut visitor = SymbolNamesTest { scx: scx };
|
||||
let mut visitor = SymbolNamesTest { tcx: tcx };
|
||||
// FIXME(#37712) could use ItemLikeVisitor if trait items were item-like
|
||||
tcx.hir.krate().visit_all_item_likes(&mut visitor.as_deep_visitor());
|
||||
}
|
||||
|
||||
struct SymbolNamesTest<'a, 'tcx:'a> {
|
||||
scx: &'a SharedCrateContext<'a, 'tcx>,
|
||||
tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
}
|
||||
|
||||
impl<'a, 'tcx> SymbolNamesTest<'a, 'tcx> {
|
||||
fn process_attrs(&mut self,
|
||||
node_id: ast::NodeId) {
|
||||
let tcx = self.scx.tcx();
|
||||
let tcx = self.tcx;
|
||||
let def_id = tcx.hir.local_def_id(node_id);
|
||||
for attr in tcx.get_attrs(def_id).iter() {
|
||||
if attr.check_name(SYMBOL_NAME) {
|
||||
// for now, can only use on monomorphic names
|
||||
let instance = Instance::mono(tcx, def_id);
|
||||
let name = symbol_names::symbol_name(instance, self.scx);
|
||||
let name = symbol_names::symbol_name(instance, self.tcx);
|
||||
tcx.sess.span_err(attr.span, &format!("symbol-name({})", name));
|
||||
} else if attr.check_name(ITEM_PATH) {
|
||||
let path = tcx.item_path_str(def_id);
|
||||
|
@ -18,7 +18,7 @@ use asm;
|
||||
use attributes;
|
||||
use base;
|
||||
use consts;
|
||||
use context::{CrateContext, SharedCrateContext};
|
||||
use context::CrateContext;
|
||||
use common;
|
||||
use declare;
|
||||
use llvm;
|
||||
@ -118,8 +118,7 @@ impl<'a, 'tcx> TransItem<'tcx> {
|
||||
self.to_raw_string(),
|
||||
ccx.codegen_unit().name());
|
||||
|
||||
let symbol_name = ccx.symbol_map()
|
||||
.get_or_compute(ccx.shared(), *self);
|
||||
let symbol_name = ccx.symbol_cache().get(*self);
|
||||
|
||||
debug!("symbol {}", &symbol_name);
|
||||
|
||||
@ -185,16 +184,15 @@ impl<'a, 'tcx> TransItem<'tcx> {
|
||||
ccx.instances().borrow_mut().insert(instance, lldecl);
|
||||
}
|
||||
|
||||
pub fn compute_symbol_name(&self,
|
||||
scx: &SharedCrateContext<'a, 'tcx>) -> String {
|
||||
pub fn compute_symbol_name(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> String {
|
||||
match *self {
|
||||
TransItem::Fn(instance) => symbol_names::symbol_name(instance, scx),
|
||||
TransItem::Fn(instance) => symbol_names::symbol_name(instance, tcx),
|
||||
TransItem::Static(node_id) => {
|
||||
let def_id = scx.tcx().hir.local_def_id(node_id);
|
||||
symbol_names::symbol_name(Instance::mono(scx.tcx(), def_id), scx)
|
||||
let def_id = tcx.hir.local_def_id(node_id);
|
||||
symbol_names::symbol_name(Instance::mono(tcx, def_id), tcx)
|
||||
}
|
||||
TransItem::GlobalAsm(node_id) => {
|
||||
let def_id = scx.tcx().hir.local_def_id(node_id);
|
||||
let def_id = tcx.hir.local_def_id(node_id);
|
||||
format!("global_asm_{:?}", def_id)
|
||||
}
|
||||
}
|
||||
|
@ -99,6 +99,7 @@ pub fn provide(providers: &mut Providers) {
|
||||
trait_def,
|
||||
adt_def,
|
||||
impl_trait_ref,
|
||||
is_foreign_item,
|
||||
..*providers
|
||||
};
|
||||
}
|
||||
@ -1530,3 +1531,13 @@ fn compute_type_of_foreign_fn_decl<'a, 'tcx>(
|
||||
let substs = Substs::identity_for_item(tcx, def_id);
|
||||
tcx.mk_fn_def(def_id, substs, fty)
|
||||
}
|
||||
|
||||
fn is_foreign_item<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
def_id: DefId)
|
||||
-> bool {
|
||||
match tcx.hir.get_if_local(def_id) {
|
||||
Some(hir_map::NodeForeignItem(..)) => true,
|
||||
Some(_) => false,
|
||||
_ => bug!("is_foreign_item applied to non-local def-id {:?}", def_id)
|
||||
}
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user