run rustfmt on librustc_metadata folder

This commit is contained in:
Srinivas Reddy Thatiparthy 2016-10-22 18:44:32 +05:30
parent affc3b7552
commit 892a05d694
No known key found for this signature in database
GPG Key ID: 091C58F4BFC36571
6 changed files with 363 additions and 342 deletions

View File

@ -117,7 +117,8 @@ impl<'a, 'tcx> SpecializedEncoder<Ty<'tcx>> for EncodeContext<'a, 'tcx> {
} }
impl<'a, 'tcx> SpecializedEncoder<ty::GenericPredicates<'tcx>> for EncodeContext<'a, 'tcx> { impl<'a, 'tcx> SpecializedEncoder<ty::GenericPredicates<'tcx>> for EncodeContext<'a, 'tcx> {
fn specialized_encode(&mut self, predicates: &ty::GenericPredicates<'tcx>) fn specialized_encode(&mut self,
predicates: &ty::GenericPredicates<'tcx>)
-> Result<(), Self::Error> { -> Result<(), Self::Error> {
predicates.parent.encode(self)?; predicates.parent.encode(self)?;
predicates.predicates.len().encode(self)?; predicates.predicates.len().encode(self)?;
@ -142,13 +143,13 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
r r
} }
fn emit_lazy_distance(&mut self, position: usize, min_size: usize) fn emit_lazy_distance(&mut self,
position: usize,
min_size: usize)
-> Result<(), <Self as Encoder>::Error> { -> Result<(), <Self as Encoder>::Error> {
let min_end = position + min_size; let min_end = position + min_size;
let distance = match self.lazy_state { let distance = match self.lazy_state {
LazyState::NoNode => { LazyState::NoNode => bug!("emit_lazy_distance: outside of a metadata node"),
bug!("emit_lazy_distance: outside of a metadata node")
}
LazyState::NodeStart(start) => { LazyState::NodeStart(start) => {
assert!(min_end <= start); assert!(min_end <= start);
start - min_end start - min_end
@ -172,7 +173,9 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
} }
fn lazy_seq<I, T>(&mut self, iter: I) -> LazySeq<T> fn lazy_seq<I, T>(&mut self, iter: I) -> LazySeq<T>
where I: IntoIterator<Item=T>, T: Encodable { where I: IntoIterator<Item = T>,
T: Encodable
{
self.emit_node(|ecx, pos| { self.emit_node(|ecx, pos| {
let len = iter.into_iter().map(|value| value.encode(ecx).unwrap()).count(); let len = iter.into_iter().map(|value| value.encode(ecx).unwrap()).count();
@ -182,7 +185,9 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
} }
fn lazy_seq_ref<'b, I, T>(&mut self, iter: I) -> LazySeq<T> fn lazy_seq_ref<'b, I, T>(&mut self, iter: I) -> LazySeq<T>
where I: IntoIterator<Item=&'b T>, T: 'b + Encodable { where I: IntoIterator<Item = &'b T>,
T: 'b + Encodable
{
self.emit_node(|ecx, pos| { self.emit_node(|ecx, pos| {
let len = iter.into_iter().map(|value| value.encode(ecx).unwrap()).count(); let len = iter.into_iter().map(|value| value.encode(ecx).unwrap()).count();
@ -192,11 +197,15 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
} }
/// Encode the given value or a previously cached shorthand. /// Encode the given value or a previously cached shorthand.
fn encode_with_shorthand<T, U, M>(&mut self, value: &T, variant: &U, map: M) fn encode_with_shorthand<T, U, M>(&mut self,
value: &T,
variant: &U,
map: M)
-> Result<(), <Self as Encoder>::Error> -> Result<(), <Self as Encoder>::Error>
where M: for<'b> Fn(&'b mut Self) -> &'b mut FnvHashMap<T, usize>, where M: for<'b> Fn(&'b mut Self) -> &'b mut FnvHashMap<T, usize>,
T: Clone + Eq + Hash, T: Clone + Eq + Hash,
U: Encodable { U: Encodable
{
let existing_shorthand = map(self).get(value).cloned(); let existing_shorthand = map(self).get(value).cloned();
if let Some(shorthand) = existing_shorthand { if let Some(shorthand) = existing_shorthand {
return self.emit_usize(shorthand); return self.emit_usize(shorthand);
@ -208,9 +217,7 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
// The shorthand encoding uses the same usize as the // The shorthand encoding uses the same usize as the
// discriminant, with an offset so they can't conflict. // discriminant, with an offset so they can't conflict.
let discriminant = unsafe { let discriminant = unsafe { intrinsics::discriminant_value(variant) };
intrinsics::discriminant_value(variant)
};
assert!(discriminant < SHORTHAND_OFFSET as u64); assert!(discriminant < SHORTHAND_OFFSET as u64);
let shorthand = start + SHORTHAND_OFFSET; let shorthand = start + SHORTHAND_OFFSET;
@ -250,8 +257,8 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
/// the right to access any information in the adt-def (including, /// the right to access any information in the adt-def (including,
/// e.g., the length of the various vectors). /// e.g., the length of the various vectors).
fn encode_enum_variant_info(&mut self, fn encode_enum_variant_info(&mut self,
(enum_did, Untracked(index)): (enum_did, Untracked(index)): (DefId, Untracked<usize>))
(DefId, Untracked<usize>)) -> Entry<'tcx> { -> Entry<'tcx> {
let tcx = self.tcx; let tcx = self.tcx;
let def = tcx.lookup_adt_def(enum_did); let def = tcx.lookup_adt_def(enum_did);
let variant = &def.variants[index]; let variant = &def.variants[index];
@ -260,7 +267,7 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
let data = VariantData { let data = VariantData {
ctor_kind: variant.ctor_kind, ctor_kind: variant.ctor_kind,
disr: variant.disr_val.to_u64_unchecked(), disr: variant.disr_val.to_u64_unchecked(),
struct_ctor: None struct_ctor: None,
}; };
let enum_id = tcx.map.as_local_node_id(enum_did).unwrap(); let enum_id = tcx.map.as_local_node_id(enum_did).unwrap();
@ -285,24 +292,23 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
predicates: Some(self.encode_predicates(def_id)), predicates: Some(self.encode_predicates(def_id)),
ast: None, ast: None,
mir: None mir: None,
} }
} }
fn encode_info_for_mod(&mut self, fn encode_info_for_mod(&mut self,
FromId(id, (md, attrs, vis)): FromId(id, (md, attrs, vis)): FromId<(&hir::Mod,
FromId<(&hir::Mod, &[ast::Attribute], &hir::Visibility)>) &[ast::Attribute],
&hir::Visibility)>)
-> Entry<'tcx> { -> Entry<'tcx> {
let tcx = self.tcx; let tcx = self.tcx;
let def_id = tcx.map.local_def_id(id); let def_id = tcx.map.local_def_id(id);
let data = ModData { let data = ModData {
reexports: match self.reexports.get(&id) { reexports: match self.reexports.get(&id) {
Some(exports) if *vis == hir::Public => { Some(exports) if *vis == hir::Public => self.lazy_seq_ref(exports),
self.lazy_seq_ref(exports) _ => LazySeq::empty(),
} },
_ => LazySeq::empty()
}
}; };
Entry { Entry {
@ -353,8 +359,7 @@ impl Visibility for ty::Visibility {
} }
impl<'a, 'b, 'tcx> IndexBuilder<'a, 'b, 'tcx> { impl<'a, 'b, 'tcx> IndexBuilder<'a, 'b, 'tcx> {
fn encode_fields(&mut self, fn encode_fields(&mut self, adt_def_id: DefId) {
adt_def_id: DefId) {
let def = self.tcx.lookup_adt_def(adt_def_id); let def = self.tcx.lookup_adt_def(adt_def_id);
for (variant_index, variant) in def.variants.iter().enumerate() { for (variant_index, variant) in def.variants.iter().enumerate() {
for (field_index, field) in variant.fields.iter().enumerate() { for (field_index, field) in variant.fields.iter().enumerate() {
@ -374,8 +379,10 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
/// the adt-def (including, e.g., the length of the various /// the adt-def (including, e.g., the length of the various
/// vectors). /// vectors).
fn encode_field(&mut self, fn encode_field(&mut self,
(adt_def_id, Untracked((variant_index, field_index))): (adt_def_id, Untracked((variant_index, field_index))): (DefId,
(DefId, Untracked<(usize, usize)>)) -> Entry<'tcx> { Untracked<(usize,
usize)>))
-> Entry<'tcx> {
let tcx = self.tcx; let tcx = self.tcx;
let variant = &tcx.lookup_adt_def(adt_def_id).variants[variant_index]; let variant = &tcx.lookup_adt_def(adt_def_id).variants[variant_index];
let field = &variant.fields[field_index]; let field = &variant.fields[field_index];
@ -400,19 +407,18 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
predicates: Some(self.encode_predicates(def_id)), predicates: Some(self.encode_predicates(def_id)),
ast: None, ast: None,
mir: None mir: None,
} }
} }
fn encode_struct_ctor(&mut self, (adt_def_id, def_id): (DefId, DefId)) fn encode_struct_ctor(&mut self, (adt_def_id, def_id): (DefId, DefId)) -> Entry<'tcx> {
-> Entry<'tcx> {
let tcx = self.tcx; let tcx = self.tcx;
let variant = tcx.lookup_adt_def(adt_def_id).struct_variant(); let variant = tcx.lookup_adt_def(adt_def_id).struct_variant();
let data = VariantData { let data = VariantData {
ctor_kind: variant.ctor_kind, ctor_kind: variant.ctor_kind,
disr: variant.disr_val.to_u64_unchecked(), disr: variant.disr_val.to_u64_unchecked(),
struct_ctor: Some(def_id.index) struct_ctor: Some(def_id.index),
}; };
let struct_id = tcx.map.as_local_node_id(adt_def_id).unwrap(); let struct_id = tcx.map.as_local_node_id(adt_def_id).unwrap();
@ -434,7 +440,7 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
predicates: Some(self.encode_predicates(def_id)), predicates: Some(self.encode_predicates(def_id)),
ast: None, ast: None,
mir: None mir: None,
} }
} }
@ -469,7 +475,7 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
let fn_data = if let hir::MethodTraitItem(ref sig, _) = ast_item.node { let fn_data = if let hir::MethodTraitItem(ref sig, _) = ast_item.node {
FnData { FnData {
constness: hir::Constness::NotConst, constness: hir::Constness::NotConst,
arg_names: self.encode_fn_arg_names(&sig.decl) arg_names: self.encode_fn_arg_names(&sig.decl),
} }
} else { } else {
bug!() bug!()
@ -477,13 +483,11 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
let data = MethodData { let data = MethodData {
fn_data: fn_data, fn_data: fn_data,
container: container(method_ty.has_body), container: container(method_ty.has_body),
explicit_self: self.lazy(&method_ty.explicit_self) explicit_self: self.lazy(&method_ty.explicit_self),
}; };
EntryKind::Method(self.lazy(&data)) EntryKind::Method(self.lazy(&data))
} }
ty::TypeTraitItem(_) => { ty::TypeTraitItem(_) => EntryKind::AssociatedType(container(false)),
EntryKind::AssociatedType(container(false))
}
}; };
Entry { Entry {
@ -497,9 +501,7 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
ty: match trait_item { ty: match trait_item {
ty::ConstTraitItem(_) | ty::ConstTraitItem(_) |
ty::MethodTraitItem(_) => { ty::MethodTraitItem(_) => Some(self.encode_item_type(def_id)),
Some(self.encode_item_type(def_id))
}
ty::TypeTraitItem(ref associated_type) => { ty::TypeTraitItem(ref associated_type) => {
associated_type.ty.map(|ty| self.lazy(&ty)) associated_type.ty.map(|ty| self.lazy(&ty))
} }
@ -515,7 +517,7 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
} else { } else {
None None
}, },
mir: self.encode_mir(def_id) mir: self.encode_mir(def_id),
} }
} }
@ -527,18 +529,16 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
let container = match ast_item.defaultness { let container = match ast_item.defaultness {
hir::Defaultness::Default => AssociatedContainer::ImplDefault, hir::Defaultness::Default => AssociatedContainer::ImplDefault,
hir::Defaultness::Final => AssociatedContainer::ImplFinal hir::Defaultness::Final => AssociatedContainer::ImplFinal,
}; };
let kind = match impl_item { let kind = match impl_item {
ty::ConstTraitItem(_) => { ty::ConstTraitItem(_) => EntryKind::AssociatedConst(container),
EntryKind::AssociatedConst(container)
}
ty::MethodTraitItem(ref method_ty) => { ty::MethodTraitItem(ref method_ty) => {
let fn_data = if let hir::ImplItemKind::Method(ref sig, _) = ast_item.node { let fn_data = if let hir::ImplItemKind::Method(ref sig, _) = ast_item.node {
FnData { FnData {
constness: sig.constness, constness: sig.constness,
arg_names: self.encode_fn_arg_names(&sig.decl) arg_names: self.encode_fn_arg_names(&sig.decl),
} }
} else { } else {
bug!() bug!()
@ -546,13 +546,11 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
let data = MethodData { let data = MethodData {
fn_data: fn_data, fn_data: fn_data,
container: container, container: container,
explicit_self: self.lazy(&method_ty.explicit_self) explicit_self: self.lazy(&method_ty.explicit_self),
}; };
EntryKind::Method(self.lazy(&data)) EntryKind::Method(self.lazy(&data))
} }
ty::TypeTraitItem(_) => { ty::TypeTraitItem(_) => EntryKind::AssociatedType(container),
EntryKind::AssociatedType(container)
}
}; };
let (ast, mir) = if let ty::ConstTraitItem(_) = impl_item { let (ast, mir) = if let ty::ConstTraitItem(_) = impl_item {
@ -578,9 +576,7 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
ty: match impl_item { ty: match impl_item {
ty::ConstTraitItem(_) | ty::ConstTraitItem(_) |
ty::MethodTraitItem(_) => { ty::MethodTraitItem(_) => Some(self.encode_item_type(def_id)),
Some(self.encode_item_type(def_id))
}
ty::TypeTraitItem(ref associated_type) => { ty::TypeTraitItem(ref associated_type) => {
associated_type.ty.map(|ty| self.lazy(&ty)) associated_type.ty.map(|ty| self.lazy(&ty))
} }
@ -595,11 +591,7 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
} else { } else {
None None
}, },
mir: if mir { mir: if mir { self.encode_mir(def_id) } else { None },
self.encode_mir(def_id)
} else {
None
}
} }
} }
@ -638,8 +630,7 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
self.tcx.lookup_deprecation(def_id).map(|depr| self.lazy(&depr)) self.tcx.lookup_deprecation(def_id).map(|depr| self.lazy(&depr))
} }
fn encode_info_for_item(&mut self, fn encode_info_for_item(&mut self, (def_id, item): (DefId, &hir::Item)) -> Entry<'tcx> {
(def_id, item): (DefId, &hir::Item)) -> Entry<'tcx> {
let tcx = self.tcx; let tcx = self.tcx;
debug!("encoding info for item at {}", debug!("encoding info for item at {}",
@ -652,7 +643,7 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
hir::ItemFn(ref decl, _, constness, ..) => { hir::ItemFn(ref decl, _, constness, ..) => {
let data = FnData { let data = FnData {
constness: constness, constness: constness,
arg_names: self.encode_fn_arg_names(&decl) arg_names: self.encode_fn_arg_names(&decl),
}; };
EntryKind::Fn(self.lazy(&data)) EntryKind::Fn(self.lazy(&data))
@ -666,9 +657,9 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
hir::ItemStruct(ref struct_def, _) => { hir::ItemStruct(ref struct_def, _) => {
let variant = tcx.lookup_adt_def(def_id).struct_variant(); let variant = tcx.lookup_adt_def(def_id).struct_variant();
/* Encode def_ids for each field and method // Encode def_ids for each field and method
for methods, write all the stuff get_trait_method // for methods, write all the stuff get_trait_method
needs to know*/ // needs to know
let struct_ctor = if !struct_def.is_struct() { let struct_ctor = if !struct_def.is_struct() {
Some(tcx.map.local_def_id(struct_def.id()).index) Some(tcx.map.local_def_id(struct_def.id()).index)
} else { } else {
@ -677,7 +668,7 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
EntryKind::Struct(self.lazy(&VariantData { EntryKind::Struct(self.lazy(&VariantData {
ctor_kind: variant.ctor_kind, ctor_kind: variant.ctor_kind,
disr: variant.disr_val.to_u64_unchecked(), disr: variant.disr_val.to_u64_unchecked(),
struct_ctor: struct_ctor struct_ctor: struct_ctor,
})) }))
} }
hir::ItemUnion(..) => { hir::ItemUnion(..) => {
@ -686,7 +677,7 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
EntryKind::Union(self.lazy(&VariantData { EntryKind::Union(self.lazy(&VariantData {
ctor_kind: variant.ctor_kind, ctor_kind: variant.ctor_kind,
disr: variant.disr_val.to_u64_unchecked(), disr: variant.disr_val.to_u64_unchecked(),
struct_ctor: None struct_ctor: None,
})) }))
} }
hir::ItemDefaultImpl(..) => { hir::ItemDefaultImpl(..) => {
@ -694,7 +685,7 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
polarity: hir::ImplPolarity::Positive, polarity: hir::ImplPolarity::Positive,
parent_impl: None, parent_impl: None,
coerce_unsized_kind: None, coerce_unsized_kind: None,
trait_ref: tcx.impl_trait_ref(def_id).map(|trait_ref| self.lazy(&trait_ref)) trait_ref: tcx.impl_trait_ref(def_id).map(|trait_ref| self.lazy(&trait_ref)),
}; };
EntryKind::DefaultImpl(self.lazy(&data)) EntryKind::DefaultImpl(self.lazy(&data))
@ -716,9 +707,11 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
let data = ImplData { let data = ImplData {
polarity: polarity, polarity: polarity,
parent_impl: parent, parent_impl: parent,
coerce_unsized_kind: tcx.custom_coerce_unsized_kinds.borrow() coerce_unsized_kind: tcx.custom_coerce_unsized_kinds
.get(&def_id).cloned(), .borrow()
trait_ref: trait_ref.map(|trait_ref| self.lazy(&trait_ref)) .get(&def_id)
.cloned(),
trait_ref: trait_ref.map(|trait_ref| self.lazy(&trait_ref)),
}; };
EntryKind::Impl(self.lazy(&data)) EntryKind::Impl(self.lazy(&data))
@ -730,14 +723,13 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
paren_sugar: trait_def.paren_sugar, paren_sugar: trait_def.paren_sugar,
has_default_impl: tcx.trait_has_default_impl(def_id), has_default_impl: tcx.trait_has_default_impl(def_id),
trait_ref: self.lazy(&trait_def.trait_ref), trait_ref: self.lazy(&trait_def.trait_ref),
super_predicates: self.lazy(&tcx.lookup_super_predicates(def_id)) super_predicates: self.lazy(&tcx.lookup_super_predicates(def_id)),
}; };
EntryKind::Trait(self.lazy(&data)) EntryKind::Trait(self.lazy(&data))
} }
hir::ItemExternCrate(_) | hir::ItemUse(_) => { hir::ItemExternCrate(_) |
bug!("cannot encode info for item {:?}", item) hir::ItemUse(_) => bug!("cannot encode info for item {:?}", item),
}
}; };
Entry { Entry {
@ -747,9 +739,9 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
attributes: self.encode_attributes(&item.attrs), attributes: self.encode_attributes(&item.attrs),
children: match item.node { children: match item.node {
hir::ItemForeignMod(ref fm) => { hir::ItemForeignMod(ref fm) => {
self.lazy_seq(fm.items.iter().map(|foreign_item| { self.lazy_seq(fm.items
tcx.map.local_def_id(foreign_item.id).index .iter()
})) .map(|foreign_item| tcx.map.local_def_id(foreign_item.id).index))
} }
hir::ItemEnum(..) => { hir::ItemEnum(..) => {
let def = self.tcx.lookup_adt_def(def_id); let def = self.tcx.lookup_adt_def(def_id);
@ -773,7 +765,7 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
def_id.index def_id.index
})) }))
} }
_ => LazySeq::empty() _ => LazySeq::empty(),
}, },
stability: self.encode_stability(def_id), stability: self.encode_stability(def_id),
deprecation: self.encode_deprecation(def_id), deprecation: self.encode_deprecation(def_id),
@ -786,20 +778,16 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
hir::ItemEnum(..) | hir::ItemEnum(..) |
hir::ItemStruct(..) | hir::ItemStruct(..) |
hir::ItemUnion(..) | hir::ItemUnion(..) |
hir::ItemImpl(..) => { hir::ItemImpl(..) => Some(self.encode_item_type(def_id)),
Some(self.encode_item_type(def_id)) _ => None,
}
_ => None
}, },
inherent_impls: self.encode_inherent_implementations(def_id), inherent_impls: self.encode_inherent_implementations(def_id),
variances: match item.node { variances: match item.node {
hir::ItemEnum(..) | hir::ItemEnum(..) |
hir::ItemStruct(..) | hir::ItemStruct(..) |
hir::ItemUnion(..) | hir::ItemUnion(..) |
hir::ItemTrait(..) => { hir::ItemTrait(..) => self.encode_item_variances(def_id),
self.encode_item_variances(def_id) _ => LazySeq::empty(),
}
_ => LazySeq::empty()
}, },
generics: match item.node { generics: match item.node {
hir::ItemStatic(..) | hir::ItemStatic(..) |
@ -810,10 +798,8 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
hir::ItemStruct(..) | hir::ItemStruct(..) |
hir::ItemUnion(..) | hir::ItemUnion(..) |
hir::ItemImpl(..) | hir::ItemImpl(..) |
hir::ItemTrait(..) => { hir::ItemTrait(..) => Some(self.encode_generics(def_id)),
Some(self.encode_generics(def_id)) _ => None,
}
_ => None
}, },
predicates: match item.node { predicates: match item.node {
hir::ItemStatic(..) | hir::ItemStatic(..) |
@ -824,10 +810,8 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
hir::ItemStruct(..) | hir::ItemStruct(..) |
hir::ItemUnion(..) | hir::ItemUnion(..) |
hir::ItemImpl(..) | hir::ItemImpl(..) |
hir::ItemTrait(..) => { hir::ItemTrait(..) => Some(self.encode_predicates(def_id)),
Some(self.encode_predicates(def_id)) _ => None,
}
_ => None
}, },
ast: match item.node { ast: match item.node {
@ -835,12 +819,10 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
hir::ItemFn(_, _, hir::Constness::Const, ..) => { hir::ItemFn(_, _, hir::Constness::Const, ..) => {
Some(self.encode_inlined_item(InlinedItemRef::Item(def_id, item))) Some(self.encode_inlined_item(InlinedItemRef::Item(def_id, item)))
} }
_ => None _ => None,
}, },
mir: match item.node { mir: match item.node {
hir::ItemConst(..) => { hir::ItemConst(..) => self.encode_mir(def_id),
self.encode_mir(def_id)
}
hir::ItemFn(_, _, constness, _, ref generics, _) => { hir::ItemFn(_, _, constness, _, ref generics, _) => {
let tps_len = generics.ty_params.len(); let tps_len = generics.ty_params.len();
let needs_inline = tps_len > 0 || attr::requests_inline(&item.attrs); let needs_inline = tps_len > 0 || attr::requests_inline(&item.attrs);
@ -850,8 +832,8 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
None None
} }
} }
_ => None _ => None,
} },
} }
} }
} }
@ -861,8 +843,7 @@ impl<'a, 'b, 'tcx> IndexBuilder<'a, 'b, 'tcx> {
/// encode some sub-items. Usually we want some info from the item /// encode some sub-items. Usually we want some info from the item
/// so it's easier to do that here then to wait until we would encounter /// so it's easier to do that here then to wait until we would encounter
/// normally in the visitor walk. /// normally in the visitor walk.
fn encode_addl_info_for_item(&mut self, fn encode_addl_info_for_item(&mut self, item: &hir::Item) {
item: &hir::Item) {
let def_id = self.tcx.map.local_def_id(item.id); let def_id = self.tcx.map.local_def_id(item.id);
match item.node { match item.node {
hir::ItemStatic(..) | hir::ItemStatic(..) |
@ -930,12 +911,12 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
hir::ForeignItemFn(ref fndecl, _) => { hir::ForeignItemFn(ref fndecl, _) => {
let data = FnData { let data = FnData {
constness: hir::Constness::NotConst, constness: hir::Constness::NotConst,
arg_names: self.encode_fn_arg_names(&fndecl) arg_names: self.encode_fn_arg_names(&fndecl),
}; };
EntryKind::ForeignFn(self.lazy(&data)) EntryKind::ForeignFn(self.lazy(&data))
} }
hir::ForeignItemStatic(_, true) => EntryKind::ForeignMutStatic, hir::ForeignItemStatic(_, true) => EntryKind::ForeignMutStatic,
hir::ForeignItemStatic(_, false) => EntryKind::ForeignImmStatic hir::ForeignItemStatic(_, false) => EntryKind::ForeignImmStatic,
}; };
Entry { Entry {
@ -954,7 +935,7 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
predicates: Some(self.encode_predicates(def_id)), predicates: Some(self.encode_predicates(def_id)),
ast: None, ast: None,
mir: None mir: None,
} }
} }
} }
@ -972,10 +953,9 @@ impl<'a, 'b, 'tcx> Visitor<'tcx> for EncodeVisitor<'a, 'b, 'tcx> {
intravisit::walk_item(self, item); intravisit::walk_item(self, item);
let def_id = self.index.tcx.map.local_def_id(item.id); let def_id = self.index.tcx.map.local_def_id(item.id);
match item.node { match item.node {
hir::ItemExternCrate(_) | hir::ItemUse(_) => (), // ignore these hir::ItemExternCrate(_) |
_ => self.index.record(def_id, hir::ItemUse(_) => (), // ignore these
EncodeContext::encode_info_for_item, _ => self.index.record(def_id, EncodeContext::encode_info_for_item, (def_id, item)),
(def_id, item)),
} }
self.index.encode_addl_info_for_item(item); self.index.encode_addl_info_for_item(item);
} }
@ -996,9 +976,7 @@ impl<'a, 'b, 'tcx> IndexBuilder<'a, 'b, 'tcx> {
fn encode_info_for_ty(&mut self, ty: &hir::Ty) { fn encode_info_for_ty(&mut self, ty: &hir::Ty) {
if let hir::TyImplTrait(_) = ty.node { if let hir::TyImplTrait(_) = ty.node {
let def_id = self.tcx.map.local_def_id(ty.id); let def_id = self.tcx.map.local_def_id(ty.id);
self.record(def_id, self.record(def_id, EncodeContext::encode_info_for_anon_ty, def_id);
EncodeContext::encode_info_for_anon_ty,
def_id);
} }
} }
@ -1006,11 +984,9 @@ impl<'a, 'b, 'tcx> IndexBuilder<'a, 'b, 'tcx> {
match expr.node { match expr.node {
hir::ExprClosure(..) => { hir::ExprClosure(..) => {
let def_id = self.tcx.map.local_def_id(expr.id); let def_id = self.tcx.map.local_def_id(expr.id);
self.record(def_id, self.record(def_id, EncodeContext::encode_info_for_closure, def_id);
EncodeContext::encode_info_for_closure,
def_id);
} }
_ => { } _ => {}
} }
} }
} }
@ -1033,7 +1009,7 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
predicates: Some(self.encode_predicates(def_id)), predicates: Some(self.encode_predicates(def_id)),
ast: None, ast: None,
mir: None mir: None,
} }
} }
@ -1042,7 +1018,7 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
let data = ClosureData { let data = ClosureData {
kind: tcx.closure_kind(def_id), kind: tcx.closure_kind(def_id),
ty: self.lazy(&tcx.tables.borrow().closure_tys[&def_id]) ty: self.lazy(&tcx.tables.borrow().closure_tys[&def_id]),
}; };
Entry { Entry {
@ -1061,7 +1037,7 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
predicates: None, predicates: None,
ast: None, ast: None,
mir: self.encode_mir(def_id) mir: self.encode_mir(def_id),
} }
} }
@ -1071,9 +1047,7 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
index.record(DefId::local(CRATE_DEF_INDEX), index.record(DefId::local(CRATE_DEF_INDEX),
EncodeContext::encode_info_for_mod, EncodeContext::encode_info_for_mod,
FromId(CRATE_NODE_ID, (&krate.module, &krate.attrs, &hir::Public))); FromId(CRATE_NODE_ID, (&krate.module, &krate.attrs, &hir::Public)));
let mut visitor = EncodeVisitor { let mut visitor = EncodeVisitor { index: index };
index: index,
};
krate.visit_all_items(&mut visitor); krate.visit_all_items(&mut visitor);
visitor.index.into_items() visitor.index.into_items()
} }
@ -1083,8 +1057,7 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
} }
fn encode_crate_deps(&mut self) -> LazySeq<CrateDep> { fn encode_crate_deps(&mut self) -> LazySeq<CrateDep> {
fn get_ordered_deps(cstore: &cstore::CStore) fn get_ordered_deps(cstore: &cstore::CStore) -> Vec<(CrateNum, Rc<cstore::CrateMetadata>)> {
-> Vec<(CrateNum, Rc<cstore::CrateMetadata>)> {
// Pull the cnums and name,vers,hash out of cstore // Pull the cnums and name,vers,hash out of cstore
let mut deps = Vec::new(); let mut deps = Vec::new();
cstore.iter_crate_data(|cnum, val| { cstore.iter_crate_data(|cnum, val| {
@ -1113,13 +1086,12 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
CrateDep { CrateDep {
name: syntax::parse::token::intern(dep.name()), name: syntax::parse::token::intern(dep.name()),
hash: dep.hash(), hash: dep.hash(),
explicitly_linked: dep.explicitly_linked.get() explicitly_linked: dep.explicitly_linked.get(),
} }
})) }))
} }
fn encode_lang_items(&mut self) fn encode_lang_items(&mut self) -> (LazySeq<(DefIndex, usize)>, LazySeq<lang_items::LangItem>) {
-> (LazySeq<(DefIndex, usize)>, LazySeq<lang_items::LangItem>) {
let tcx = self.tcx; let tcx = self.tcx;
let lang_items = tcx.lang_items.items().iter(); let lang_items = tcx.lang_items.items().iter();
(self.lazy_seq(lang_items.enumerate().filter_map(|(i, &opt_def_id)| { (self.lazy_seq(lang_items.enumerate().filter_map(|(i, &opt_def_id)| {
@ -1129,7 +1101,8 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
} }
} }
None None
})), self.lazy_seq_ref(&tcx.lang_items.missing)) })),
self.lazy_seq_ref(&tcx.lang_items.missing))
} }
fn encode_native_libraries(&mut self) -> LazySeq<(NativeLibraryKind, String)> { fn encode_native_libraries(&mut self) -> LazySeq<(NativeLibraryKind, String)> {
@ -1137,9 +1110,7 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
self.lazy_seq(used_libraries.into_iter().filter_map(|(lib, kind)| { self.lazy_seq(used_libraries.into_iter().filter_map(|(lib, kind)| {
match kind { match kind {
cstore::NativeStatic => None, // these libraries are not propagated cstore::NativeStatic => None, // these libraries are not propagated
cstore::NativeFramework | cstore::NativeUnknown => { cstore::NativeFramework | cstore::NativeUnknown => Some((kind, lib)),
Some((kind, lib))
}
} }
})) }))
} }
@ -1147,13 +1118,15 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
fn encode_codemap(&mut self) -> LazySeq<syntax_pos::FileMap> { fn encode_codemap(&mut self) -> LazySeq<syntax_pos::FileMap> {
let codemap = self.tcx.sess.codemap(); let codemap = self.tcx.sess.codemap();
let all_filemaps = codemap.files.borrow(); let all_filemaps = codemap.files.borrow();
self.lazy_seq_ref(all_filemaps.iter().filter(|filemap| { self.lazy_seq_ref(all_filemaps.iter()
// No need to export empty filemaps, as they can't contain spans .filter(|filemap| {
// that need translation. // No need to export empty filemaps, as they can't contain spans
// Also no need to re-export imported filemaps, as any downstream // that need translation.
// crate will import them from their original source. // Also no need to re-export imported filemaps, as any downstream
!filemap.lines.borrow().is_empty() && !filemap.is_imported() // crate will import them from their original source.
}).map(|filemap| &**filemap)) !filemap.lines.borrow().is_empty() && !filemap.is_imported()
})
.map(|filemap| &**filemap))
} }
/// Serialize the text of the exported macros /// Serialize the text of the exported macros
@ -1164,15 +1137,15 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
name: def.name, name: def.name,
attrs: def.attrs.to_vec(), attrs: def.attrs.to_vec(),
span: def.span, span: def.span,
body: ::syntax::print::pprust::tts_to_string(&def.body) body: ::syntax::print::pprust::tts_to_string(&def.body),
} }
})) }))
} }
} }
struct ImplVisitor<'a, 'tcx:'a> { struct ImplVisitor<'a, 'tcx: 'a> {
tcx: TyCtxt<'a, 'tcx, 'tcx>, tcx: TyCtxt<'a, 'tcx, 'tcx>,
impls: FnvHashMap<DefId, Vec<DefIndex>> impls: FnvHashMap<DefId, Vec<DefIndex>>,
} }
impl<'a, 'tcx, 'v> Visitor<'v> for ImplVisitor<'a, 'tcx> { impl<'a, 'tcx, 'v> Visitor<'v> for ImplVisitor<'a, 'tcx> {
@ -1180,7 +1153,8 @@ impl<'a, 'tcx, 'v> Visitor<'v> for ImplVisitor<'a, 'tcx> {
if let hir::ItemImpl(..) = item.node { if let hir::ItemImpl(..) = item.node {
let impl_id = self.tcx.map.local_def_id(item.id); let impl_id = self.tcx.map.local_def_id(item.id);
if let Some(trait_ref) = self.tcx.impl_trait_ref(impl_id) { if let Some(trait_ref) = self.tcx.impl_trait_ref(impl_id) {
self.impls.entry(trait_ref.def_id) self.impls
.entry(trait_ref.def_id)
.or_insert(vec![]) .or_insert(vec![])
.push(impl_id.index); .push(impl_id.index);
} }
@ -1193,16 +1167,19 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
fn encode_impls(&mut self) -> LazySeq<TraitImpls> { fn encode_impls(&mut self) -> LazySeq<TraitImpls> {
let mut visitor = ImplVisitor { let mut visitor = ImplVisitor {
tcx: self.tcx, tcx: self.tcx,
impls: FnvHashMap() impls: FnvHashMap(),
}; };
self.tcx.map.krate().visit_all_items(&mut visitor); self.tcx.map.krate().visit_all_items(&mut visitor);
let all_impls: Vec<_> = visitor.impls.into_iter().map(|(trait_def_id, impls)| { let all_impls: Vec<_> = visitor.impls
TraitImpls { .into_iter()
trait_id: (trait_def_id.krate.as_u32(), trait_def_id.index), .map(|(trait_def_id, impls)| {
impls: self.lazy_seq(impls) TraitImpls {
} trait_id: (trait_def_id.krate.as_u32(), trait_def_id.index),
}).collect(); impls: self.lazy_seq(impls),
}
})
.collect();
self.lazy_seq(all_impls) self.lazy_seq(all_impls)
} }
@ -1232,7 +1209,7 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
} }
})) }))
} }
None => LazySeq::empty() None => LazySeq::empty(),
} }
} }
@ -1291,9 +1268,10 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
hash: link_meta.crate_hash, hash: link_meta.crate_hash,
disambiguator: tcx.sess.local_crate_disambiguator().to_string(), disambiguator: tcx.sess.local_crate_disambiguator().to_string(),
panic_strategy: tcx.sess.panic_strategy(), panic_strategy: tcx.sess.panic_strategy(),
plugin_registrar_fn: tcx.sess.plugin_registrar_fn.get().map(|id| { plugin_registrar_fn: tcx.sess
tcx.map.local_def_id(id).index .plugin_registrar_fn
}), .get()
.map(|id| tcx.map.local_def_id(id).index),
macro_derive_registrar: if is_proc_macro { macro_derive_registrar: if is_proc_macro {
let id = tcx.sess.derive_registrar_fn.get().unwrap(); let id = tcx.sess.derive_registrar_fn.get().unwrap();
Some(tcx.map.local_def_id(id).index) Some(tcx.map.local_def_id(id).index)
@ -1369,7 +1347,8 @@ pub fn encode_metadata<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
reexports: &def::ExportMap, reexports: &def::ExportMap,
link_meta: &LinkMeta, link_meta: &LinkMeta,
reachable: &NodeSet, reachable: &NodeSet,
mir_map: &MirMap<'tcx>) -> Vec<u8> { mir_map: &MirMap<'tcx>)
-> Vec<u8> {
let mut cursor = Cursor::new(vec![]); let mut cursor = Cursor::new(vec![]);
cursor.write_all(METADATA_HEADER).unwrap(); cursor.write_all(METADATA_HEADER).unwrap();
@ -1377,17 +1356,18 @@ pub fn encode_metadata<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
cursor.write_all(&[0, 0, 0, 0]).unwrap(); cursor.write_all(&[0, 0, 0, 0]).unwrap();
let root = EncodeContext { let root = EncodeContext {
opaque: opaque::Encoder::new(&mut cursor), opaque: opaque::Encoder::new(&mut cursor),
tcx: tcx, tcx: tcx,
reexports: reexports, reexports: reexports,
link_meta: link_meta, link_meta: link_meta,
cstore: cstore, cstore: cstore,
reachable: reachable, reachable: reachable,
mir_map: mir_map, mir_map: mir_map,
lazy_state: LazyState::NoNode, lazy_state: LazyState::NoNode,
type_shorthands: Default::default(), type_shorthands: Default::default(),
predicate_shorthands: Default::default() predicate_shorthands: Default::default(),
}.encode_crate_root(); }
.encode_crate_root();
let mut result = cursor.into_inner(); let mut result = cursor.into_inner();
// Encode the root position. // Encode the root position.
@ -1395,8 +1375,8 @@ pub fn encode_metadata<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
let pos = root.position; let pos = root.position;
result[header + 0] = (pos >> 24) as u8; result[header + 0] = (pos >> 24) as u8;
result[header + 1] = (pos >> 16) as u8; result[header + 1] = (pos >> 16) as u8;
result[header + 2] = (pos >> 8) as u8; result[header + 2] = (pos >> 8) as u8;
result[header + 3] = (pos >> 0) as u8; result[header + 3] = (pos >> 0) as u8;
result result
} }

View File

@ -28,9 +28,7 @@ pub struct Index {
impl Index { impl Index {
pub fn new(max_index: usize) -> Index { pub fn new(max_index: usize) -> Index {
Index { Index { positions: vec![u32::MAX; max_index] }
positions: vec![u32::MAX; max_index]
}
} }
pub fn record(&mut self, def_id: DefId, entry: Lazy<Entry>) { pub fn record(&mut self, def_id: DefId, entry: Lazy<Entry>) {
@ -46,7 +44,9 @@ impl Index {
assert!(self.positions[item] == u32::MAX, assert!(self.positions[item] == u32::MAX,
"recorded position for item {:?} twice, first at {:?} and now at {:?}", "recorded position for item {:?} twice, first at {:?} and now at {:?}",
item, self.positions[item], position); item,
self.positions[item],
position);
self.positions[item] = position.to_le(); self.positions[item] = position.to_le();
} }
@ -67,7 +67,8 @@ impl<'tcx> LazySeq<Index> {
let index = def_index.as_usize(); let index = def_index.as_usize();
debug!("Index::lookup: index={:?} words.len={:?}", debug!("Index::lookup: index={:?} words.len={:?}",
index, words.len()); index,
words.len());
let position = u32::from_le(words[index]); let position = u32::from_le(words[index]);
if position == u32::MAX { if position == u32::MAX {
@ -79,8 +80,9 @@ impl<'tcx> LazySeq<Index> {
} }
} }
pub fn iter_enumerated<'a>(&self, bytes: &'a [u8]) pub fn iter_enumerated<'a>(&self,
-> impl Iterator<Item=(DefIndex, Lazy<Entry<'tcx>>)> + 'a { bytes: &'a [u8])
-> impl Iterator<Item = (DefIndex, Lazy<Entry<'tcx>>)> + 'a {
let words = &bytes_to_words(&bytes[self.position..])[..self.len]; let words = &bytes_to_words(&bytes[self.position..])[..self.len];
words.iter().enumerate().filter_map(|(index, &position)| { words.iter().enumerate().filter_map(|(index, &position)| {
if position == u32::MAX { if position == u32::MAX {

View File

@ -138,11 +138,11 @@ pub trait DepGraphRead {
} }
impl DepGraphRead for DefId { impl DepGraphRead for DefId {
fn read(&self, _tcx: TyCtxt) { } fn read(&self, _tcx: TyCtxt) {}
} }
impl DepGraphRead for ast::NodeId { impl DepGraphRead for ast::NodeId {
fn read(&self, _tcx: TyCtxt) { } fn read(&self, _tcx: TyCtxt) {}
} }
impl<T> DepGraphRead for Option<T> impl<T> DepGraphRead for Option<T>
@ -179,8 +179,8 @@ macro_rules! read_tuple {
} }
} }
} }
read_tuple!(A,B); read_tuple!(A, B);
read_tuple!(A,B,C); read_tuple!(A, B, C);
macro_rules! read_hir { macro_rules! read_hir {
($t:ty) => { ($t:ty) => {
@ -208,7 +208,7 @@ read_hir!(hir::ForeignItem);
pub struct Untracked<T>(pub T); pub struct Untracked<T>(pub T);
impl<T> DepGraphRead for Untracked<T> { impl<T> DepGraphRead for Untracked<T> {
fn read(&self, _tcx: TyCtxt) { } fn read(&self, _tcx: TyCtxt) {}
} }
/// Newtype that can be used to package up misc data extracted from a /// Newtype that can be used to package up misc data extracted from a

View File

@ -30,8 +30,10 @@
#![feature(specialization)] #![feature(specialization)]
#![feature(staged_api)] #![feature(staged_api)]
#[macro_use] extern crate log; #[macro_use]
#[macro_use] extern crate syntax; extern crate log;
#[macro_use]
extern crate syntax;
extern crate syntax_pos; extern crate syntax_pos;
extern crate flate; extern crate flate;
extern crate serialize as rustc_serialize; // used by deriving extern crate serialize as rustc_serialize; // used by deriving

View File

@ -273,7 +273,7 @@ pub struct ArchiveMetadata {
pub struct CratePaths { pub struct CratePaths {
pub ident: String, pub ident: String,
pub dylib: Option<PathBuf>, pub dylib: Option<PathBuf>,
pub rlib: Option<PathBuf> pub rlib: Option<PathBuf>,
} }
pub const METADATA_FILENAME: &'static str = "rust.metadata.bin"; pub const METADATA_FILENAME: &'static str = "rust.metadata.bin";
@ -281,14 +281,14 @@ pub const METADATA_FILENAME: &'static str = "rust.metadata.bin";
#[derive(Copy, Clone, PartialEq)] #[derive(Copy, Clone, PartialEq)]
enum CrateFlavor { enum CrateFlavor {
Rlib, Rlib,
Dylib Dylib,
} }
impl fmt::Display for CrateFlavor { impl fmt::Display for CrateFlavor {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.write_str(match *self { f.write_str(match *self {
CrateFlavor::Rlib => "rlib", CrateFlavor::Rlib => "rlib",
CrateFlavor::Dylib => "dylib" CrateFlavor::Dylib => "dylib",
}) })
} }
} }
@ -296,10 +296,10 @@ impl fmt::Display for CrateFlavor {
impl CratePaths { impl CratePaths {
fn paths(&self) -> Vec<PathBuf> { fn paths(&self) -> Vec<PathBuf> {
match (&self.dylib, &self.rlib) { match (&self.dylib, &self.rlib) {
(&None, &None) => vec!(), (&None, &None) => vec![],
(&Some(ref p), &None) | (&Some(ref p), &None) |
(&None, &Some(ref p)) => vec!(p.clone()), (&None, &Some(ref p)) => vec![p.clone()],
(&Some(ref p1), &Some(ref p2)) => vec!(p1.clone(), p2.clone()), (&Some(ref p1), &Some(ref p2)) => vec![p1.clone(), p2.clone()],
} }
} }
} }
@ -316,53 +316,72 @@ impl<'a> Context<'a> {
pub fn report_errs(&mut self) -> ! { pub fn report_errs(&mut self) -> ! {
let add = match self.root { let add = match self.root {
&None => String::new(), &None => String::new(),
&Some(ref r) => format!(" which `{}` depends on", &Some(ref r) => format!(" which `{}` depends on", r.ident),
r.ident)
}; };
let mut err = if !self.rejected_via_hash.is_empty() { let mut err = if !self.rejected_via_hash.is_empty() {
struct_span_err!(self.sess, self.span, E0460, struct_span_err!(self.sess,
self.span,
E0460,
"found possibly newer version of crate `{}`{}", "found possibly newer version of crate `{}`{}",
self.ident, add) self.ident,
add)
} else if !self.rejected_via_triple.is_empty() { } else if !self.rejected_via_triple.is_empty() {
struct_span_err!(self.sess, self.span, E0461, struct_span_err!(self.sess,
self.span,
E0461,
"couldn't find crate `{}` with expected target triple {}{}", "couldn't find crate `{}` with expected target triple {}{}",
self.ident, self.triple, add) self.ident,
self.triple,
add)
} else if !self.rejected_via_kind.is_empty() { } else if !self.rejected_via_kind.is_empty() {
struct_span_err!(self.sess, self.span, E0462, struct_span_err!(self.sess,
self.span,
E0462,
"found staticlib `{}` instead of rlib or dylib{}", "found staticlib `{}` instead of rlib or dylib{}",
self.ident, add) self.ident,
add)
} else if !self.rejected_via_version.is_empty() { } else if !self.rejected_via_version.is_empty() {
struct_span_err!(self.sess, self.span, E0514, struct_span_err!(self.sess,
self.span,
E0514,
"found crate `{}` compiled by an incompatible version of rustc{}", "found crate `{}` compiled by an incompatible version of rustc{}",
self.ident, add) self.ident,
add)
} else { } else {
let mut err = struct_span_err!(self.sess, self.span, E0463, let mut err = struct_span_err!(self.sess,
self.span,
E0463,
"can't find crate for `{}`{}", "can't find crate for `{}`{}",
self.ident, add); self.ident,
add);
err.span_label(self.span, &format!("can't find crate")); err.span_label(self.span, &format!("can't find crate"));
err err
}; };
if !self.rejected_via_triple.is_empty() { if !self.rejected_via_triple.is_empty() {
let mismatches = self.rejected_via_triple.iter(); let mismatches = self.rejected_via_triple.iter();
for (i, &CrateMismatch{ ref path, ref got }) in mismatches.enumerate() { for (i, &CrateMismatch { ref path, ref got }) in mismatches.enumerate() {
err.note(&format!("crate `{}`, path #{}, triple {}: {}", err.note(&format!("crate `{}`, path #{}, triple {}: {}",
self.ident, i+1, got, path.display())); self.ident,
i + 1,
got,
path.display()));
} }
} }
if !self.rejected_via_hash.is_empty() { if !self.rejected_via_hash.is_empty() {
err.note("perhaps that crate needs to be recompiled?"); err.note("perhaps that crate needs to be recompiled?");
let mismatches = self.rejected_via_hash.iter(); let mismatches = self.rejected_via_hash.iter();
for (i, &CrateMismatch{ ref path, .. }) in mismatches.enumerate() { for (i, &CrateMismatch { ref path, .. }) in mismatches.enumerate() {
err.note(&format!("crate `{}` path #{}: {}", err.note(&format!("crate `{}` path #{}: {}", self.ident, i + 1, path.display()));
self.ident, i+1, path.display()));
} }
match self.root { match self.root {
&None => {} &None => {}
&Some(ref r) => { &Some(ref r) => {
for (i, path) in r.paths().iter().enumerate() { for (i, path) in r.paths().iter().enumerate() {
err.note(&format!("crate `{}` path #{}: {}", err.note(&format!("crate `{}` path #{}: {}",
r.ident, i+1, path.display())); r.ident,
i + 1,
path.display()));
} }
} }
} }
@ -371,8 +390,7 @@ impl<'a> Context<'a> {
err.help("please recompile that crate using --crate-type lib"); err.help("please recompile that crate using --crate-type lib");
let mismatches = self.rejected_via_kind.iter(); let mismatches = self.rejected_via_kind.iter();
for (i, &CrateMismatch { ref path, .. }) in mismatches.enumerate() { for (i, &CrateMismatch { ref path, .. }) in mismatches.enumerate() {
err.note(&format!("crate `{}` path #{}: {}", err.note(&format!("crate `{}` path #{}: {}", self.ident, i + 1, path.display()));
self.ident, i+1, path.display()));
} }
} }
if !self.rejected_via_version.is_empty() { if !self.rejected_via_version.is_empty() {
@ -381,7 +399,10 @@ impl<'a> Context<'a> {
let mismatches = self.rejected_via_version.iter(); let mismatches = self.rejected_via_version.iter();
for (i, &CrateMismatch { ref path, ref got }) in mismatches.enumerate() { for (i, &CrateMismatch { ref path, ref got }) in mismatches.enumerate() {
err.note(&format!("crate `{}` path #{}: {} compiled by {:?}", err.note(&format!("crate `{}` path #{}: {} compiled by {:?}",
self.ident, i+1, path.display(), got)); self.ident,
i + 1,
path.display(),
got));
} }
} }
@ -410,7 +431,7 @@ impl<'a> Context<'a> {
let staticlib_prefix = format!("{}{}", staticpair.0, self.crate_name); let staticlib_prefix = format!("{}{}", staticpair.0, self.crate_name);
let mut candidates = FnvHashMap(); let mut candidates = FnvHashMap();
let mut staticlibs = vec!(); let mut staticlibs = vec![];
// First, find all possible candidate rlibs and dylibs purely based on // First, find all possible candidate rlibs and dylibs purely based on
// the name of the files themselves. We're trying to match against an // the name of the files themselves. We're trying to match against an
@ -430,38 +451,36 @@ impl<'a> Context<'a> {
None => return FileDoesntMatch, None => return FileDoesntMatch,
Some(file) => file, Some(file) => file,
}; };
let (hash, rlib) = if file.starts_with(&rlib_prefix[..]) && let (hash, rlib) = if file.starts_with(&rlib_prefix[..]) && file.ends_with(".rlib") {
file.ends_with(".rlib") { (&file[(rlib_prefix.len())..(file.len() - ".rlib".len())], true)
(&file[(rlib_prefix.len()) .. (file.len() - ".rlib".len())],
true)
} else if file.starts_with(&dylib_prefix) && } else if file.starts_with(&dylib_prefix) &&
file.ends_with(&dypair.1) { file.ends_with(&dypair.1) {
(&file[(dylib_prefix.len()) .. (file.len() - dypair.1.len())], (&file[(dylib_prefix.len())..(file.len() - dypair.1.len())], false)
false)
} else { } else {
if file.starts_with(&staticlib_prefix[..]) && if file.starts_with(&staticlib_prefix[..]) && file.ends_with(&staticpair.1) {
file.ends_with(&staticpair.1) {
staticlibs.push(CrateMismatch { staticlibs.push(CrateMismatch {
path: path.to_path_buf(), path: path.to_path_buf(),
got: "static".to_string() got: "static".to_string(),
}); });
} }
return FileDoesntMatch return FileDoesntMatch;
}; };
info!("lib candidate: {}", path.display()); info!("lib candidate: {}", path.display());
let hash_str = hash.to_string(); let hash_str = hash.to_string();
let slot = candidates.entry(hash_str) let slot = candidates.entry(hash_str)
.or_insert_with(|| (FnvHashMap(), FnvHashMap())); .or_insert_with(|| (FnvHashMap(), FnvHashMap()));
let (ref mut rlibs, ref mut dylibs) = *slot; let (ref mut rlibs, ref mut dylibs) = *slot;
fs::canonicalize(path).map(|p| { fs::canonicalize(path)
if rlib { .map(|p| {
rlibs.insert(p, kind); if rlib {
} else { rlibs.insert(p, kind);
dylibs.insert(p, kind); } else {
} dylibs.insert(p, kind);
FileMatches }
}).unwrap_or(FileDoesntMatch) FileMatches
})
.unwrap_or(FileDoesntMatch)
}); });
self.rejected_via_kind.extend(staticlibs); self.rejected_via_kind.extend(staticlibs);
@ -479,11 +498,12 @@ impl<'a> Context<'a> {
let rlib = self.extract_one(rlibs, CrateFlavor::Rlib, &mut slot); let rlib = self.extract_one(rlibs, CrateFlavor::Rlib, &mut slot);
let dylib = self.extract_one(dylibs, CrateFlavor::Dylib, &mut slot); let dylib = self.extract_one(dylibs, CrateFlavor::Dylib, &mut slot);
if let Some((h, m)) = slot { if let Some((h, m)) = slot {
libraries.insert(h, Library { libraries.insert(h,
dylib: dylib, Library {
rlib: rlib, dylib: dylib,
metadata: m, rlib: rlib,
}); metadata: m,
});
} }
} }
@ -494,7 +514,9 @@ impl<'a> Context<'a> {
0 => None, 0 => None,
1 => Some(libraries.into_iter().next().unwrap().1), 1 => Some(libraries.into_iter().next().unwrap().1),
_ => { _ => {
let mut err = struct_span_err!(self.sess, self.span, E0464, let mut err = struct_span_err!(self.sess,
self.span,
E0464,
"multiple matching crates for `{}`", "multiple matching crates for `{}`",
self.crate_name); self.crate_name);
err.note("candidates:"); err.note("candidates:");
@ -521,8 +543,11 @@ impl<'a> Context<'a> {
// read the metadata from it if `*slot` is `None`. If the metadata couldn't // read the metadata from it if `*slot` is `None`. If the metadata couldn't
// be read, it is assumed that the file isn't a valid rust library (no // be read, it is assumed that the file isn't a valid rust library (no
// errors are emitted). // errors are emitted).
fn extract_one(&mut self, m: FnvHashMap<PathBuf, PathKind>, flavor: CrateFlavor, fn extract_one(&mut self,
slot: &mut Option<(Svh, MetadataBlob)>) -> Option<(PathBuf, PathKind)> { m: FnvHashMap<PathBuf, PathKind>,
flavor: CrateFlavor,
slot: &mut Option<(Svh, MetadataBlob)>)
-> Option<(PathBuf, PathKind)> {
let mut ret: Option<(PathBuf, PathKind)> = None; let mut ret: Option<(PathBuf, PathKind)> = None;
let mut error = 0; let mut error = 0;
@ -532,9 +557,9 @@ impl<'a> Context<'a> {
// read both, but reading dylib metadata is quite // read both, but reading dylib metadata is quite
// slow. // slow.
if m.is_empty() { if m.is_empty() {
return None return None;
} else if m.len() == 1 { } else if m.len() == 1 {
return Some(m.into_iter().next().unwrap()) return Some(m.into_iter().next().unwrap());
} }
} }
@ -547,23 +572,28 @@ impl<'a> Context<'a> {
(h, blob) (h, blob)
} else { } else {
info!("metadata mismatch"); info!("metadata mismatch");
continue continue;
} }
} }
Err(err) => { Err(err) => {
info!("no metadata found: {}", err); info!("no metadata found: {}", err);
continue continue;
} }
}; };
// If we see multiple hashes, emit an error about duplicate candidates. // If we see multiple hashes, emit an error about duplicate candidates.
if slot.as_ref().map_or(false, |s| s.0 != hash) { if slot.as_ref().map_or(false, |s| s.0 != hash) {
let mut e = struct_span_err!(self.sess, self.span, E0465, let mut e = struct_span_err!(self.sess,
self.span,
E0465,
"multiple {} candidates for `{}` found", "multiple {} candidates for `{}` found",
flavor, self.crate_name); flavor,
self.crate_name);
e.span_note(self.span, e.span_note(self.span,
&format!(r"candidate #1: {}", &format!(r"candidate #1: {}",
ret.as_ref().unwrap().0 ret.as_ref()
.display())); .unwrap()
.0
.display()));
if let Some(ref mut e) = err { if let Some(ref mut e) = err {
e.emit(); e.emit();
} }
@ -574,9 +604,10 @@ impl<'a> Context<'a> {
if error > 0 { if error > 0 {
error += 1; error += 1;
err.as_mut().unwrap().span_note(self.span, err.as_mut().unwrap().span_note(self.span,
&format!(r"candidate #{}: {}", error, &format!(r"candidate #{}: {}",
error,
lib.display())); lib.display()));
continue continue;
} }
*slot = Some((hash, metadata)); *slot = Some((hash, metadata));
ret = Some((lib, kind)); ret = Some((lib, kind));
@ -595,37 +626,39 @@ impl<'a> Context<'a> {
let rustc_version = rustc_version(); let rustc_version = rustc_version();
if root.rustc_version != rustc_version { if root.rustc_version != rustc_version {
info!("Rejecting via version: expected {} got {}", info!("Rejecting via version: expected {} got {}",
rustc_version, root.rustc_version); rustc_version,
root.rustc_version);
self.rejected_via_version.push(CrateMismatch { self.rejected_via_version.push(CrateMismatch {
path: libpath.to_path_buf(), path: libpath.to_path_buf(),
got: root.rustc_version got: root.rustc_version,
}); });
return None; return None;
} }
if self.should_match_name { if self.should_match_name {
if self.crate_name != root.name { if self.crate_name != root.name {
info!("Rejecting via crate name"); return None; info!("Rejecting via crate name");
return None;
} }
} }
if root.triple != self.triple { if root.triple != self.triple {
info!("Rejecting via crate triple: expected {} got {}", info!("Rejecting via crate triple: expected {} got {}",
self.triple, root.triple); self.triple,
root.triple);
self.rejected_via_triple.push(CrateMismatch { self.rejected_via_triple.push(CrateMismatch {
path: libpath.to_path_buf(), path: libpath.to_path_buf(),
got: root.triple got: root.triple,
}); });
return None; return None;
} }
if let Some(myhash) = self.hash { if let Some(myhash) = self.hash {
if *myhash != root.hash { if *myhash != root.hash {
info!("Rejecting via hash: expected {} got {}", info!("Rejecting via hash: expected {} got {}", *myhash, root.hash);
*myhash, root.hash);
self.rejected_via_hash.push(CrateMismatch { self.rejected_via_hash.push(CrateMismatch {
path: libpath.to_path_buf(), path: libpath.to_path_buf(),
got: myhash.to_string() got: myhash.to_string(),
}); });
return None; return None;
} }
@ -649,8 +682,8 @@ impl<'a> Context<'a> {
(t.options.staticlib_prefix.clone(), t.options.staticlib_suffix.clone()) (t.options.staticlib_prefix.clone(), t.options.staticlib_suffix.clone())
} }
fn find_commandline_library<'b, LOCS> (&mut self, locs: LOCS) -> Option<Library> fn find_commandline_library<'b, LOCS>(&mut self, locs: LOCS) -> Option<Library>
where LOCS: Iterator<Item=&'b String> where LOCS: Iterator<Item = &'b String>
{ {
// First, filter out all libraries that look suspicious. We only accept // First, filter out all libraries that look suspicious. We only accept
// files which actually exist that have the correct naming scheme for // files which actually exist that have the correct naming scheme for
@ -663,30 +696,33 @@ impl<'a> Context<'a> {
let locs = locs.map(|l| PathBuf::from(l)).filter(|loc| { let locs = locs.map(|l| PathBuf::from(l)).filter(|loc| {
if !loc.exists() { if !loc.exists() {
sess.err(&format!("extern location for {} does not exist: {}", sess.err(&format!("extern location for {} does not exist: {}",
self.crate_name, loc.display())); self.crate_name,
loc.display()));
return false; return false;
} }
let file = match loc.file_name().and_then(|s| s.to_str()) { let file = match loc.file_name().and_then(|s| s.to_str()) {
Some(file) => file, Some(file) => file,
None => { None => {
sess.err(&format!("extern location for {} is not a file: {}", sess.err(&format!("extern location for {} is not a file: {}",
self.crate_name, loc.display())); self.crate_name,
loc.display()));
return false; return false;
} }
}; };
if file.starts_with("lib") && file.ends_with(".rlib") { if file.starts_with("lib") && file.ends_with(".rlib") {
return true return true;
} else { } else {
let (ref prefix, ref suffix) = dylibname; let (ref prefix, ref suffix) = dylibname;
if file.starts_with(&prefix[..]) && if file.starts_with(&prefix[..]) && file.ends_with(&suffix[..]) {
file.ends_with(&suffix[..]) { return true;
return true
} }
} }
sess.struct_err(&format!("extern location for {} is of an unknown type: {}", sess.struct_err(&format!("extern location for {} is of an unknown type: {}",
self.crate_name, loc.display())) self.crate_name,
loc.display()))
.help(&format!("file name should be lib*.rlib or {}*.{}", .help(&format!("file name should be lib*.rlib or {}*.{}",
dylibname.0, dylibname.1)) dylibname.0,
dylibname.1))
.emit(); .emit();
false false
}); });
@ -695,11 +731,9 @@ impl<'a> Context<'a> {
// there's at most one rlib and at most one dylib. // there's at most one rlib and at most one dylib.
for loc in locs { for loc in locs {
if loc.file_name().unwrap().to_str().unwrap().ends_with(".rlib") { if loc.file_name().unwrap().to_str().unwrap().ends_with(".rlib") {
rlibs.insert(fs::canonicalize(&loc).unwrap(), rlibs.insert(fs::canonicalize(&loc).unwrap(), PathKind::ExternFlag);
PathKind::ExternFlag);
} else { } else {
dylibs.insert(fs::canonicalize(&loc).unwrap(), dylibs.insert(fs::canonicalize(&loc).unwrap(), PathKind::ExternFlag);
PathKind::ExternFlag);
} }
} }
}; };
@ -709,13 +743,17 @@ impl<'a> Context<'a> {
let rlib = self.extract_one(rlibs, CrateFlavor::Rlib, &mut slot); let rlib = self.extract_one(rlibs, CrateFlavor::Rlib, &mut slot);
let dylib = self.extract_one(dylibs, CrateFlavor::Dylib, &mut slot); let dylib = self.extract_one(dylibs, CrateFlavor::Dylib, &mut slot);
if rlib.is_none() && dylib.is_none() { return None } if rlib.is_none() && dylib.is_none() {
return None;
}
match slot { match slot {
Some((_, metadata)) => Some(Library { Some((_, metadata)) => {
dylib: dylib, Some(Library {
rlib: rlib, dylib: dylib,
metadata: metadata, rlib: rlib,
}), metadata: metadata,
})
}
None => None, None => None,
} }
} }
@ -728,9 +766,9 @@ pub fn note_crate_name(err: &mut DiagnosticBuilder, name: &str) {
impl ArchiveMetadata { impl ArchiveMetadata {
fn new(ar: ArchiveRO) -> Option<ArchiveMetadata> { fn new(ar: ArchiveRO) -> Option<ArchiveMetadata> {
let data = { let data = {
let section = ar.iter().filter_map(|s| s.ok()).find(|sect| { let section = ar.iter()
sect.name() == Some(METADATA_FILENAME) .filter_map(|s| s.ok())
}); .find(|sect| sect.name() == Some(METADATA_FILENAME));
match section { match section {
Some(s) => s.data() as *const [u8], Some(s) => s.data() as *const [u8],
None => { None => {
@ -746,12 +784,14 @@ impl ArchiveMetadata {
}) })
} }
pub fn as_slice<'a>(&'a self) -> &'a [u8] { unsafe { &*self.data } } pub fn as_slice<'a>(&'a self) -> &'a [u8] {
unsafe { &*self.data }
}
} }
fn verify_decompressed_encoding_version(blob: &MetadataBlob, filename: &Path) fn verify_decompressed_encoding_version(blob: &MetadataBlob,
-> Result<(), String> filename: &Path)
{ -> Result<(), String> {
if !blob.is_compatible() { if !blob.is_compatible() {
Err((format!("incompatible metadata version found: '{}'", Err((format!("incompatible metadata version found: '{}'",
filename.display()))) filename.display())))
@ -761,16 +801,21 @@ fn verify_decompressed_encoding_version(blob: &MetadataBlob, filename: &Path)
} }
// Just a small wrapper to time how long reading metadata takes. // Just a small wrapper to time how long reading metadata takes.
fn get_metadata_section(target: &Target, flavor: CrateFlavor, filename: &Path) fn get_metadata_section(target: &Target,
flavor: CrateFlavor,
filename: &Path)
-> Result<MetadataBlob, String> { -> Result<MetadataBlob, String> {
let start = Instant::now(); let start = Instant::now();
let ret = get_metadata_section_imp(target, flavor, filename); let ret = get_metadata_section_imp(target, flavor, filename);
info!("reading {:?} => {:?}", filename.file_name().unwrap(), info!("reading {:?} => {:?}",
filename.file_name().unwrap(),
start.elapsed()); start.elapsed());
return ret return ret;
} }
fn get_metadata_section_imp(target: &Target, flavor: CrateFlavor, filename: &Path) fn get_metadata_section_imp(target: &Target,
flavor: CrateFlavor,
filename: &Path)
-> Result<MetadataBlob, String> { -> Result<MetadataBlob, String> {
if !filename.exists() { if !filename.exists() {
return Err(format!("no such file: '{}'", filename.display())); return Err(format!("no such file: '{}'", filename.display()));
@ -783,13 +828,11 @@ fn get_metadata_section_imp(target: &Target, flavor: CrateFlavor, filename: &Pat
Some(ar) => ar, Some(ar) => ar,
None => { None => {
debug!("llvm didn't like `{}`", filename.display()); debug!("llvm didn't like `{}`", filename.display());
return Err(format!("failed to read rlib metadata: '{}'", return Err(format!("failed to read rlib metadata: '{}'", filename.display()));
filename.display()));
} }
}; };
return match ArchiveMetadata::new(archive).map(|ar| MetadataBlob::Archive(ar)) { return match ArchiveMetadata::new(archive).map(|ar| MetadataBlob::Archive(ar)) {
None => Err(format!("failed to read rlib metadata: '{}'", None => Err(format!("failed to read rlib metadata: '{}'", filename.display())),
filename.display())),
Some(blob) => { Some(blob) => {
verify_decompressed_encoding_version(&blob, filename)?; verify_decompressed_encoding_version(&blob, filename)?;
Ok(blob) Ok(blob)
@ -800,22 +843,19 @@ fn get_metadata_section_imp(target: &Target, flavor: CrateFlavor, filename: &Pat
let buf = common::path2cstr(filename); let buf = common::path2cstr(filename);
let mb = llvm::LLVMRustCreateMemoryBufferWithContentsOfFile(buf.as_ptr()); let mb = llvm::LLVMRustCreateMemoryBufferWithContentsOfFile(buf.as_ptr());
if mb as isize == 0 { if mb as isize == 0 {
return Err(format!("error reading library: '{}'", return Err(format!("error reading library: '{}'", filename.display()));
filename.display()))
} }
let of = match ObjectFile::new(mb) { let of = match ObjectFile::new(mb) {
Some(of) => of, Some(of) => of,
_ => { _ => {
return Err((format!("provided path not an object file: '{}'", return Err((format!("provided path not an object file: '{}'", filename.display())))
filename.display())))
} }
}; };
let si = mk_section_iter(of.llof); let si = mk_section_iter(of.llof);
while llvm::LLVMIsSectionIteratorAtEnd(of.llof, si.llsi) == False { while llvm::LLVMIsSectionIteratorAtEnd(of.llof, si.llsi) == False {
let mut name_buf = ptr::null(); let mut name_buf = ptr::null();
let name_len = llvm::LLVMRustGetSectionName(si.llsi, &mut name_buf); let name_len = llvm::LLVMRustGetSectionName(si.llsi, &mut name_buf);
let name = slice::from_raw_parts(name_buf as *const u8, let name = slice::from_raw_parts(name_buf as *const u8, name_len as usize).to_vec();
name_len as usize).to_vec();
let name = String::from_utf8(name).unwrap(); let name = String::from_utf8(name).unwrap();
debug!("get_metadata_section: name {}", name); debug!("get_metadata_section: name {}", name);
if read_meta_section_name(target) == name { if read_meta_section_name(target) == name {
@ -823,8 +863,7 @@ fn get_metadata_section_imp(target: &Target, flavor: CrateFlavor, filename: &Pat
let csz = llvm::LLVMGetSectionSize(si.llsi) as usize; let csz = llvm::LLVMGetSectionSize(si.llsi) as usize;
let cvbuf: *const u8 = cbuf as *const u8; let cvbuf: *const u8 = cbuf as *const u8;
let vlen = METADATA_HEADER.len(); let vlen = METADATA_HEADER.len();
debug!("checking {} bytes of metadata-version stamp", debug!("checking {} bytes of metadata-version stamp", vlen);
vlen);
let minsz = cmp::min(vlen, csz); let minsz = cmp::min(vlen, csz);
let buf0 = slice::from_raw_parts(cvbuf, minsz); let buf0 = slice::from_raw_parts(cvbuf, minsz);
let version_ok = buf0 == METADATA_HEADER; let version_ok = buf0 == METADATA_HEADER;
@ -834,8 +873,7 @@ fn get_metadata_section_imp(target: &Target, flavor: CrateFlavor, filename: &Pat
} }
let cvbuf1 = cvbuf.offset(vlen as isize); let cvbuf1 = cvbuf.offset(vlen as isize);
debug!("inflating {} bytes of compressed metadata", debug!("inflating {} bytes of compressed metadata", csz - vlen);
csz - vlen);
let bytes = slice::from_raw_parts(cvbuf1, csz - vlen); let bytes = slice::from_raw_parts(cvbuf1, csz - vlen);
match flate::inflate_bytes(bytes) { match flate::inflate_bytes(bytes) {
Ok(inflated) => { Ok(inflated) => {
@ -879,14 +917,15 @@ pub fn read_meta_section_name(_target: &Target) -> &'static str {
} }
// A diagnostic function for dumping crate metadata to an output stream // A diagnostic function for dumping crate metadata to an output stream
pub fn list_file_metadata(target: &Target, path: &Path, pub fn list_file_metadata(target: &Target, path: &Path, out: &mut io::Write) -> io::Result<()> {
out: &mut io::Write) -> io::Result<()> {
let filename = path.file_name().unwrap().to_str().unwrap(); let filename = path.file_name().unwrap().to_str().unwrap();
let flavor = if filename.ends_with(".rlib") { CrateFlavor::Rlib } else { CrateFlavor::Dylib }; let flavor = if filename.ends_with(".rlib") {
CrateFlavor::Rlib
} else {
CrateFlavor::Dylib
};
match get_metadata_section(target, flavor, path) { match get_metadata_section(target, flavor, path) {
Ok(metadata) => metadata.list_crate_metadata(out), Ok(metadata) => metadata.list_crate_metadata(out),
Err(msg) => { Err(msg) => write!(out, "{}\n", msg),
write!(out, "{}\n", msg)
}
} }
} }

View File

@ -27,7 +27,8 @@ use syntax_pos::{self, Span};
use std::marker::PhantomData; use std::marker::PhantomData;
pub fn rustc_version() -> String { pub fn rustc_version() -> String {
format!("rustc {}", option_env!("CFG_VERSION").unwrap_or("unknown version")) format!("rustc {}",
option_env!("CFG_VERSION").unwrap_or("unknown version"))
} }
/// Metadata encoding version. /// Metadata encoding version.
@ -41,11 +42,8 @@ pub const METADATA_VERSION: u8 = 3;
/// as a length of 0 by old compilers. /// as a length of 0 by old compilers.
/// ///
/// This header is followed by the position of the `CrateRoot`. /// This header is followed by the position of the `CrateRoot`.
pub const METADATA_HEADER: &'static [u8; 12] = &[ pub const METADATA_HEADER: &'static [u8; 12] =
0, 0, 0, 0, &[0, 0, 0, 0, b'r', b'u', b's', b't', 0, 0, 0, METADATA_VERSION];
b'r', b'u', b's', b't',
0, 0, 0, METADATA_VERSION
];
/// The shorthand encoding uses an enum's variant index `usize` /// The shorthand encoding uses an enum's variant index `usize`
/// and is offset by this value so it never matches a real variant. /// and is offset by this value so it never matches a real variant.
@ -70,14 +68,14 @@ pub const SHORTHAND_OFFSET: usize = 0x80;
#[must_use] #[must_use]
pub struct Lazy<T> { pub struct Lazy<T> {
pub position: usize, pub position: usize,
_marker: PhantomData<T> _marker: PhantomData<T>,
} }
impl<T> Lazy<T> { impl<T> Lazy<T> {
pub fn with_position(position: usize) -> Lazy<T> { pub fn with_position(position: usize) -> Lazy<T> {
Lazy { Lazy {
position: position, position: position,
_marker: PhantomData _marker: PhantomData,
} }
} }
@ -90,7 +88,9 @@ impl<T> Lazy<T> {
impl<T> Copy for Lazy<T> {} impl<T> Copy for Lazy<T> {}
impl<T> Clone for Lazy<T> { impl<T> Clone for Lazy<T> {
fn clone(&self) -> Self { *self } fn clone(&self) -> Self {
*self
}
} }
impl<T> serialize::UseSpecializedEncodable for Lazy<T> {} impl<T> serialize::UseSpecializedEncodable for Lazy<T> {}
@ -112,7 +112,7 @@ impl<T> serialize::UseSpecializedDecodable for Lazy<T> {}
pub struct LazySeq<T> { pub struct LazySeq<T> {
pub len: usize, pub len: usize,
pub position: usize, pub position: usize,
_marker: PhantomData<T> _marker: PhantomData<T>,
} }
impl<T> LazySeq<T> { impl<T> LazySeq<T> {
@ -124,7 +124,7 @@ impl<T> LazySeq<T> {
LazySeq { LazySeq {
len: len, len: len,
position: position, position: position,
_marker: PhantomData _marker: PhantomData,
} }
} }
@ -136,7 +136,9 @@ impl<T> LazySeq<T> {
impl<T> Copy for LazySeq<T> {} impl<T> Copy for LazySeq<T> {}
impl<T> Clone for LazySeq<T> { impl<T> Clone for LazySeq<T> {
fn clone(&self) -> Self { *self } fn clone(&self) -> Self {
*self
}
} }
impl<T> serialize::UseSpecializedEncodable for LazySeq<T> {} impl<T> serialize::UseSpecializedEncodable for LazySeq<T> {}
@ -155,7 +157,7 @@ pub enum LazyState {
/// Inside a metadata node, with a previous `Lazy` or `LazySeq`. /// Inside a metadata node, with a previous `Lazy` or `LazySeq`.
/// The position is a conservative estimate of where that /// The position is a conservative estimate of where that
/// previous `Lazy` / `LazySeq` would end (see their comments). /// previous `Lazy` / `LazySeq` would end (see their comments).
Previous(usize) Previous(usize),
} }
#[derive(RustcEncodable, RustcDecodable)] #[derive(RustcEncodable, RustcDecodable)]
@ -185,13 +187,13 @@ pub struct CrateRoot {
pub struct CrateDep { pub struct CrateDep {
pub name: ast::Name, pub name: ast::Name,
pub hash: hir::svh::Svh, pub hash: hir::svh::Svh,
pub explicitly_linked: bool pub explicitly_linked: bool,
} }
#[derive(RustcEncodable, RustcDecodable)] #[derive(RustcEncodable, RustcDecodable)]
pub struct TraitImpls { pub struct TraitImpls {
pub trait_id: (u32, DefIndex), pub trait_id: (u32, DefIndex),
pub impls: LazySeq<DefIndex> pub impls: LazySeq<DefIndex>,
} }
#[derive(RustcEncodable, RustcDecodable)] #[derive(RustcEncodable, RustcDecodable)]
@ -199,7 +201,7 @@ pub struct MacroDef {
pub name: ast::Name, pub name: ast::Name,
pub attrs: Vec<ast::Attribute>, pub attrs: Vec<ast::Attribute>,
pub span: Span, pub span: Span,
pub body: String pub body: String,
} }
#[derive(RustcEncodable, RustcDecodable)] #[derive(RustcEncodable, RustcDecodable)]
@ -219,7 +221,7 @@ pub struct Entry<'tcx> {
pub predicates: Option<Lazy<ty::GenericPredicates<'tcx>>>, pub predicates: Option<Lazy<ty::GenericPredicates<'tcx>>>,
pub ast: Option<Lazy<astencode::Ast<'tcx>>>, pub ast: Option<Lazy<astencode::Ast<'tcx>>>,
pub mir: Option<Lazy<mir::repr::Mir<'tcx>>> pub mir: Option<Lazy<mir::repr::Mir<'tcx>>>,
} }
#[derive(Copy, Clone, RustcEncodable, RustcDecodable)] #[derive(Copy, Clone, RustcEncodable, RustcDecodable)]
@ -245,18 +247,18 @@ pub enum EntryKind<'tcx> {
DefaultImpl(Lazy<ImplData<'tcx>>), DefaultImpl(Lazy<ImplData<'tcx>>),
Method(Lazy<MethodData<'tcx>>), Method(Lazy<MethodData<'tcx>>),
AssociatedType(AssociatedContainer), AssociatedType(AssociatedContainer),
AssociatedConst(AssociatedContainer) AssociatedConst(AssociatedContainer),
} }
#[derive(RustcEncodable, RustcDecodable)] #[derive(RustcEncodable, RustcDecodable)]
pub struct ModData { pub struct ModData {
pub reexports: LazySeq<def::Export> pub reexports: LazySeq<def::Export>,
} }
#[derive(RustcEncodable, RustcDecodable)] #[derive(RustcEncodable, RustcDecodable)]
pub struct FnData { pub struct FnData {
pub constness: hir::Constness, pub constness: hir::Constness,
pub arg_names: LazySeq<ast::Name> pub arg_names: LazySeq<ast::Name>,
} }
#[derive(RustcEncodable, RustcDecodable)] #[derive(RustcEncodable, RustcDecodable)]
@ -266,7 +268,7 @@ pub struct VariantData {
/// If this is a struct's only variant, this /// If this is a struct's only variant, this
/// is the index of the "struct ctor" item. /// is the index of the "struct ctor" item.
pub struct_ctor: Option<DefIndex> pub struct_ctor: Option<DefIndex>,
} }
#[derive(RustcEncodable, RustcDecodable)] #[derive(RustcEncodable, RustcDecodable)]
@ -275,7 +277,7 @@ pub struct TraitData<'tcx> {
pub paren_sugar: bool, pub paren_sugar: bool,
pub has_default_impl: bool, pub has_default_impl: bool,
pub trait_ref: Lazy<ty::TraitRef<'tcx>>, pub trait_ref: Lazy<ty::TraitRef<'tcx>>,
pub super_predicates: Lazy<ty::GenericPredicates<'tcx>> pub super_predicates: Lazy<ty::GenericPredicates<'tcx>>,
} }
#[derive(RustcEncodable, RustcDecodable)] #[derive(RustcEncodable, RustcDecodable)]
@ -283,7 +285,7 @@ pub struct ImplData<'tcx> {
pub polarity: hir::ImplPolarity, pub polarity: hir::ImplPolarity,
pub parent_impl: Option<DefId>, pub parent_impl: Option<DefId>,
pub coerce_unsized_kind: Option<ty::adjustment::CustomCoerceUnsized>, pub coerce_unsized_kind: Option<ty::adjustment::CustomCoerceUnsized>,
pub trait_ref: Option<Lazy<ty::TraitRef<'tcx>>> pub trait_ref: Option<Lazy<ty::TraitRef<'tcx>>>,
} }
/// Describes whether the container of an associated item /// Describes whether the container of an associated item
@ -294,21 +296,17 @@ pub enum AssociatedContainer {
TraitRequired, TraitRequired,
TraitWithDefault, TraitWithDefault,
ImplDefault, ImplDefault,
ImplFinal ImplFinal,
} }
impl AssociatedContainer { impl AssociatedContainer {
pub fn with_def_id(&self, def_id: DefId) -> ty::ImplOrTraitItemContainer { pub fn with_def_id(&self, def_id: DefId) -> ty::ImplOrTraitItemContainer {
match *self { match *self {
AssociatedContainer::TraitRequired | AssociatedContainer::TraitRequired |
AssociatedContainer::TraitWithDefault => { AssociatedContainer::TraitWithDefault => ty::TraitContainer(def_id),
ty::TraitContainer(def_id)
}
AssociatedContainer::ImplDefault | AssociatedContainer::ImplDefault |
AssociatedContainer::ImplFinal => { AssociatedContainer::ImplFinal => ty::ImplContainer(def_id),
ty::ImplContainer(def_id)
}
} }
} }
@ -318,7 +316,7 @@ impl AssociatedContainer {
AssociatedContainer::TraitWithDefault | AssociatedContainer::TraitWithDefault |
AssociatedContainer::ImplDefault | AssociatedContainer::ImplDefault |
AssociatedContainer::ImplFinal => true AssociatedContainer::ImplFinal => true,
} }
} }
@ -328,7 +326,7 @@ impl AssociatedContainer {
AssociatedContainer::TraitWithDefault | AssociatedContainer::TraitWithDefault |
AssociatedContainer::ImplDefault => hir::Defaultness::Default, AssociatedContainer::ImplDefault => hir::Defaultness::Default,
AssociatedContainer::ImplFinal => hir::Defaultness::Final AssociatedContainer::ImplFinal => hir::Defaultness::Final,
} }
} }
} }
@ -337,11 +335,11 @@ impl AssociatedContainer {
pub struct MethodData<'tcx> { pub struct MethodData<'tcx> {
pub fn_data: FnData, pub fn_data: FnData,
pub container: AssociatedContainer, pub container: AssociatedContainer,
pub explicit_self: Lazy<ty::ExplicitSelfCategory<'tcx>> pub explicit_self: Lazy<ty::ExplicitSelfCategory<'tcx>>,
} }
#[derive(RustcEncodable, RustcDecodable)] #[derive(RustcEncodable, RustcDecodable)]
pub struct ClosureData<'tcx> { pub struct ClosureData<'tcx> {
pub kind: ty::ClosureKind, pub kind: ty::ClosureKind,
pub ty: Lazy<ty::ClosureTy<'tcx>> pub ty: Lazy<ty::ClosureTy<'tcx>>,
} }