rustc_metadata: remove ty{en,de}code and move to auto-derived serialization.
This commit is contained in:
parent
bcbb4107a1
commit
88c5679c4e
@ -25,7 +25,7 @@ use middle::stability;
|
||||
use ty::subst::Substs;
|
||||
use traits;
|
||||
use ty::{self, TraitRef, Ty, TypeAndMut};
|
||||
use ty::{TyS, TypeVariants};
|
||||
use ty::{TyS, TypeVariants, Slice};
|
||||
use ty::{AdtKind, AdtDef, ClosureSubsts, Region};
|
||||
use hir::FreevarMap;
|
||||
use ty::{BareFnTy, InferTy, ParamTy, ProjectionTy, TraitObject};
|
||||
@ -92,7 +92,7 @@ pub struct CtxtInterners<'tcx> {
|
||||
/// Specifically use a speedy hash algorithm for these hash sets,
|
||||
/// they're accessed quite often.
|
||||
type_: RefCell<FnvHashSet<Interned<'tcx, TyS<'tcx>>>>,
|
||||
type_list: RefCell<FnvHashSet<Interned<'tcx, [Ty<'tcx>]>>>,
|
||||
type_list: RefCell<FnvHashSet<Interned<'tcx, Slice<Ty<'tcx>>>>>,
|
||||
substs: RefCell<FnvHashSet<Interned<'tcx, Substs<'tcx>>>>,
|
||||
bare_fn: RefCell<FnvHashSet<Interned<'tcx, BareFnTy<'tcx>>>>,
|
||||
region: RefCell<FnvHashSet<Interned<'tcx, Region>>>,
|
||||
@ -847,10 +847,11 @@ impl<'a, 'tcx> Lift<'tcx> for &'a Region {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'tcx> Lift<'tcx> for &'a [Ty<'a>] {
|
||||
type Lifted = &'tcx [Ty<'tcx>];
|
||||
fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<&'tcx [Ty<'tcx>]> {
|
||||
if let Some(&Interned(list)) = tcx.interners.type_list.borrow().get(*self) {
|
||||
impl<'a, 'tcx> Lift<'tcx> for &'a Slice<Ty<'a>> {
|
||||
type Lifted = &'tcx Slice<Ty<'tcx>>;
|
||||
fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>)
|
||||
-> Option<&'tcx Slice<Ty<'tcx>>> {
|
||||
if let Some(&Interned(list)) = tcx.interners.type_list.borrow().get(&self[..]) {
|
||||
if *self as *const _ == list as *const _ {
|
||||
return Some(list);
|
||||
}
|
||||
@ -1067,9 +1068,24 @@ impl<'tcx: 'lcx, 'lcx> Borrow<TypeVariants<'lcx>> for Interned<'tcx, TyS<'tcx>>
|
||||
}
|
||||
}
|
||||
|
||||
impl<'tcx: 'lcx, 'lcx> Borrow<[Ty<'lcx>]> for Interned<'tcx, [Ty<'tcx>]> {
|
||||
// NB: An Interned<Slice<T>> compares and hashes as its elements.
|
||||
impl<'tcx, T: PartialEq> PartialEq for Interned<'tcx, Slice<T>> {
|
||||
fn eq(&self, other: &Interned<'tcx, Slice<T>>) -> bool {
|
||||
self.0[..] == other.0[..]
|
||||
}
|
||||
}
|
||||
|
||||
impl<'tcx, T: Eq> Eq for Interned<'tcx, Slice<T>> {}
|
||||
|
||||
impl<'tcx, T: Hash> Hash for Interned<'tcx, Slice<T>> {
|
||||
fn hash<H: Hasher>(&self, s: &mut H) {
|
||||
self.0[..].hash(s)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'tcx: 'lcx, 'lcx> Borrow<[Ty<'lcx>]> for Interned<'tcx, Slice<Ty<'tcx>>> {
|
||||
fn borrow<'a>(&'a self) -> &'a [Ty<'lcx>] {
|
||||
self.0
|
||||
&self.0[..]
|
||||
}
|
||||
}
|
||||
|
||||
@ -1091,32 +1107,23 @@ impl<'tcx> Borrow<Region> for Interned<'tcx, Region> {
|
||||
}
|
||||
}
|
||||
|
||||
macro_rules! items { ($($item:item)+) => ($($item)+) }
|
||||
macro_rules! impl_interners {
|
||||
($lt_tcx:tt, $($name:ident: $method:ident($alloc:ty, $needs_infer:expr)-> $ty:ty),+) => {
|
||||
items!($(impl<$lt_tcx> PartialEq for Interned<$lt_tcx, $ty> {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.0 == other.0
|
||||
}
|
||||
}
|
||||
|
||||
impl<$lt_tcx> Eq for Interned<$lt_tcx, $ty> {}
|
||||
|
||||
impl<$lt_tcx> Hash for Interned<$lt_tcx, $ty> {
|
||||
fn hash<H: Hasher>(&self, s: &mut H) {
|
||||
self.0.hash(s)
|
||||
}
|
||||
}
|
||||
|
||||
macro_rules! intern_method {
|
||||
($lt_tcx:tt, $name:ident: $method:ident($alloc:ty,
|
||||
$alloc_to_key:expr,
|
||||
$alloc_to_ret:expr,
|
||||
$needs_infer:expr) -> $ty:ty) => {
|
||||
impl<'a, 'gcx, $lt_tcx> TyCtxt<'a, 'gcx, $lt_tcx> {
|
||||
pub fn $method(self, v: $alloc) -> &$lt_tcx $ty {
|
||||
if let Some(i) = self.interners.$name.borrow().get::<$ty>(&v) {
|
||||
return i.0;
|
||||
}
|
||||
if !self.is_global() {
|
||||
if let Some(i) = self.global_interners.$name.borrow().get::<$ty>(&v) {
|
||||
{
|
||||
let key = ($alloc_to_key)(&v);
|
||||
if let Some(i) = self.interners.$name.borrow().get(key) {
|
||||
return i.0;
|
||||
}
|
||||
if !self.is_global() {
|
||||
if let Some(i) = self.global_interners.$name.borrow().get(key) {
|
||||
return i.0;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// HACK(eddyb) Depend on flags being accurate to
|
||||
@ -1127,7 +1134,7 @@ macro_rules! impl_interners {
|
||||
let v = unsafe {
|
||||
mem::transmute(v)
|
||||
};
|
||||
let i = self.global_interners.arenas.$name.alloc(v);
|
||||
let i = ($alloc_to_ret)(self.global_interners.arenas.$name.alloc(v));
|
||||
self.global_interners.$name.borrow_mut().insert(Interned(i));
|
||||
return i;
|
||||
}
|
||||
@ -1141,11 +1148,31 @@ macro_rules! impl_interners {
|
||||
}
|
||||
}
|
||||
|
||||
let i = self.interners.arenas.$name.alloc(v);
|
||||
let i = ($alloc_to_ret)(self.interners.arenas.$name.alloc(v));
|
||||
self.interners.$name.borrow_mut().insert(Interned(i));
|
||||
i
|
||||
}
|
||||
})+);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
macro_rules! direct_interners {
|
||||
($lt_tcx:tt, $($name:ident: $method:ident($needs_infer:expr) -> $ty:ty),+) => {
|
||||
$(impl<$lt_tcx> PartialEq for Interned<$lt_tcx, $ty> {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.0 == other.0
|
||||
}
|
||||
}
|
||||
|
||||
impl<$lt_tcx> Eq for Interned<$lt_tcx, $ty> {}
|
||||
|
||||
impl<$lt_tcx> Hash for Interned<$lt_tcx, $ty> {
|
||||
fn hash<H: Hasher>(&self, s: &mut H) {
|
||||
self.0.hash(s)
|
||||
}
|
||||
}
|
||||
|
||||
intern_method!($lt_tcx, $name: $method($ty, |x| x, |x| x, $needs_infer) -> $ty);)+
|
||||
}
|
||||
}
|
||||
|
||||
@ -1153,15 +1180,14 @@ fn keep_local<'tcx, T: ty::TypeFoldable<'tcx>>(x: &T) -> bool {
|
||||
x.has_type_flags(ty::TypeFlags::KEEP_IN_LOCAL_TCX)
|
||||
}
|
||||
|
||||
impl_interners!('tcx,
|
||||
type_list: mk_type_list(Vec<Ty<'tcx>>, keep_local) -> [Ty<'tcx>],
|
||||
substs: mk_substs(Substs<'tcx>, |substs: &Substs| {
|
||||
direct_interners!('tcx,
|
||||
substs: mk_substs(|substs: &Substs| {
|
||||
substs.params().iter().any(keep_local)
|
||||
}) -> Substs<'tcx>,
|
||||
bare_fn: mk_bare_fn(BareFnTy<'tcx>, |fty: &BareFnTy| {
|
||||
bare_fn: mk_bare_fn(|fty: &BareFnTy| {
|
||||
keep_local(&fty.sig)
|
||||
}) -> BareFnTy<'tcx>,
|
||||
region: mk_region(Region, |r| {
|
||||
region: mk_region(|r| {
|
||||
match r {
|
||||
&ty::ReVar(_) | &ty::ReSkolemized(..) => true,
|
||||
_ => false
|
||||
@ -1169,6 +1195,12 @@ impl_interners!('tcx,
|
||||
}) -> Region
|
||||
);
|
||||
|
||||
intern_method!('tcx,
|
||||
type_list: mk_type_list(Vec<Ty<'tcx>>, Deref::deref, |xs: &[Ty]| -> &Slice<Ty> {
|
||||
unsafe { mem::transmute(xs) }
|
||||
}, keep_local) -> Slice<Ty<'tcx>>
|
||||
);
|
||||
|
||||
impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
|
||||
/// Create an unsafe fn ty based on a safe fn ty.
|
||||
pub fn safe_to_unsafe_fn_ty(self, bare_fn: &BareFnTy<'tcx>) -> Ty<'tcx> {
|
||||
|
@ -38,6 +38,7 @@ use std::borrow::Cow;
|
||||
use std::cell::Cell;
|
||||
use std::hash::{Hash, Hasher};
|
||||
use std::iter;
|
||||
use std::ops::Deref;
|
||||
use std::rc::Rc;
|
||||
use std::slice;
|
||||
use std::vec::IntoIter;
|
||||
@ -569,6 +570,45 @@ pub type Ty<'tcx> = &'tcx TyS<'tcx>;
|
||||
impl<'tcx> serialize::UseSpecializedEncodable for Ty<'tcx> {}
|
||||
impl<'tcx> serialize::UseSpecializedDecodable for Ty<'tcx> {}
|
||||
|
||||
/// A wrapper for slices with the additioanl invariant
|
||||
/// that the slice is interned and no other slice with
|
||||
/// the same contents can exist in the same context.
|
||||
/// This means we can use pointer + length for both
|
||||
/// equality comparisons and hashing.
|
||||
#[derive(Debug, RustcEncodable)]
|
||||
pub struct Slice<T>([T]);
|
||||
|
||||
impl<T> PartialEq for Slice<T> {
|
||||
#[inline]
|
||||
fn eq(&self, other: &Slice<T>) -> bool {
|
||||
(&self.0 as *const [T]) == (&other.0 as *const [T])
|
||||
}
|
||||
}
|
||||
impl<T> Eq for Slice<T> {}
|
||||
|
||||
impl<T> Hash for Slice<T> {
|
||||
fn hash<H: Hasher>(&self, s: &mut H) {
|
||||
(self.as_ptr(), self.len()).hash(s)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> Deref for Slice<T> {
|
||||
type Target = [T];
|
||||
fn deref(&self) -> &[T] {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, T> IntoIterator for &'a Slice<T> {
|
||||
type Item = &'a T;
|
||||
type IntoIter = <&'a [T] as IntoIterator>::IntoIter;
|
||||
fn into_iter(self) -> Self::IntoIter {
|
||||
self[..].iter()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'tcx> serialize::UseSpecializedDecodable for &'tcx Slice<Ty<'tcx>> {}
|
||||
|
||||
/// Upvars do not get their own node-id. Instead, we use the pair of
|
||||
/// the original var id (that is, the root variable that is referenced
|
||||
/// by the upvar) and the id of the closure expression.
|
||||
|
@ -14,7 +14,6 @@ use ty::fold::{TypeFoldable, TypeFolder, TypeVisitor};
|
||||
|
||||
use std::rc::Rc;
|
||||
use syntax::abi;
|
||||
use syntax::ptr::P;
|
||||
|
||||
use hir;
|
||||
|
||||
@ -437,16 +436,6 @@ impl<'tcx, T:TypeFoldable<'tcx>> TypeFoldable<'tcx> for ty::Binder<T> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'tcx, T: TypeFoldable<'tcx>> TypeFoldable<'tcx> for P<[T]> {
|
||||
fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self {
|
||||
self.iter().map(|t| t.fold_with(folder)).collect()
|
||||
}
|
||||
|
||||
fn super_visit_with<V: TypeVisitor<'tcx>>(&self, visitor: &mut V) -> bool {
|
||||
self.iter().any(|t| t.visit_with(visitor))
|
||||
}
|
||||
}
|
||||
|
||||
impl<'tcx> TypeFoldable<'tcx> for ty::TraitObject<'tcx> {
|
||||
fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self {
|
||||
ty::TraitObject {
|
||||
@ -464,7 +453,7 @@ impl<'tcx> TypeFoldable<'tcx> for ty::TraitObject<'tcx> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'tcx> TypeFoldable<'tcx> for &'tcx [Ty<'tcx>] {
|
||||
impl<'tcx> TypeFoldable<'tcx> for &'tcx ty::Slice<Ty<'tcx>> {
|
||||
fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self {
|
||||
let tys = self.iter().map(|t| t.fold_with(folder)).collect();
|
||||
folder.tcx().mk_type_list(tys)
|
||||
|
@ -13,12 +13,12 @@
|
||||
use hir::def_id::DefId;
|
||||
use middle::region;
|
||||
use ty::subst::Substs;
|
||||
use ty::{self, AdtDef, ToPredicate, TypeFlags, Ty, TyCtxt, TyS, TypeFoldable};
|
||||
use ty::{self, AdtDef, ToPredicate, TypeFlags, Ty, TyCtxt, TypeFoldable};
|
||||
use ty::{Slice, TyS};
|
||||
use util::common::ErrorReported;
|
||||
|
||||
use collections::enum_set::{self, EnumSet, CLike};
|
||||
use std::fmt;
|
||||
use std::mem;
|
||||
use std::ops;
|
||||
use syntax::abi;
|
||||
use syntax::ast::{self, Name};
|
||||
@ -31,7 +31,7 @@ use hir;
|
||||
use self::InferTy::*;
|
||||
use self::TypeVariants::*;
|
||||
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug, RustcEncodable, RustcDecodable)]
|
||||
pub struct TypeAndMut<'tcx> {
|
||||
pub ty: Ty<'tcx>,
|
||||
pub mutbl: hir::Mutability,
|
||||
@ -87,7 +87,7 @@ pub enum Issue32330 {
|
||||
|
||||
// NB: If you change this, you'll probably want to change the corresponding
|
||||
// AST structure in libsyntax/ast.rs as well.
|
||||
#[derive(Clone, PartialEq, Eq, Hash, Debug)]
|
||||
#[derive(Clone, PartialEq, Eq, Hash, Debug, RustcEncodable, RustcDecodable)]
|
||||
pub enum TypeVariants<'tcx> {
|
||||
/// The primitive boolean type. Written as `bool`.
|
||||
TyBool,
|
||||
@ -155,7 +155,7 @@ pub enum TypeVariants<'tcx> {
|
||||
TyNever,
|
||||
|
||||
/// A tuple type. For example, `(i32, bool)`.
|
||||
TyTuple(&'tcx [Ty<'tcx>]),
|
||||
TyTuple(&'tcx Slice<Ty<'tcx>>),
|
||||
|
||||
/// The projection of an associated type. For example,
|
||||
/// `<T as Trait<..>>::N`.
|
||||
@ -252,7 +252,7 @@ pub enum TypeVariants<'tcx> {
|
||||
/// closure C wind up influencing the decisions we ought to make for
|
||||
/// closure C (which would then require fixed point iteration to
|
||||
/// handle). Plus it fixes an ICE. :P
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug, RustcEncodable)]
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug, RustcEncodable, RustcDecodable)]
|
||||
pub struct ClosureSubsts<'tcx> {
|
||||
/// Lifetime and type parameters from the enclosing function.
|
||||
/// These are separated out because trans wants to pass them around
|
||||
@ -262,12 +262,10 @@ pub struct ClosureSubsts<'tcx> {
|
||||
/// The types of the upvars. The list parallels the freevars and
|
||||
/// `upvar_borrows` lists. These are kept distinct so that we can
|
||||
/// easily index into them.
|
||||
pub upvar_tys: &'tcx [Ty<'tcx>]
|
||||
pub upvar_tys: &'tcx Slice<Ty<'tcx>>
|
||||
}
|
||||
|
||||
impl<'tcx> serialize::UseSpecializedDecodable for ClosureSubsts<'tcx> {}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Hash)]
|
||||
#[derive(Clone, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable)]
|
||||
pub struct TraitObject<'tcx> {
|
||||
pub principal: PolyExistentialTraitRef<'tcx>,
|
||||
pub region_bound: &'tcx ty::Region,
|
||||
@ -330,7 +328,7 @@ impl<'tcx> PolyTraitRef<'tcx> {
|
||||
///
|
||||
/// The substitutions don't include the erased `Self`, only trait
|
||||
/// type and lifetime parameters (`[X, Y]` and `['a, 'b]` above).
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash)]
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable)]
|
||||
pub struct ExistentialTraitRef<'tcx> {
|
||||
pub def_id: DefId,
|
||||
pub substs: &'tcx Substs<'tcx>,
|
||||
@ -423,13 +421,15 @@ pub struct ProjectionTy<'tcx> {
|
||||
pub item_name: Name,
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Hash, Debug)]
|
||||
#[derive(Clone, PartialEq, Eq, Hash, Debug, RustcEncodable, RustcDecodable)]
|
||||
pub struct BareFnTy<'tcx> {
|
||||
pub unsafety: hir::Unsafety,
|
||||
pub abi: abi::Abi,
|
||||
pub sig: PolyFnSig<'tcx>,
|
||||
}
|
||||
|
||||
impl<'tcx> serialize::UseSpecializedDecodable for &'tcx BareFnTy<'tcx> {}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable)]
|
||||
pub struct ClosureTy<'tcx> {
|
||||
pub unsafety: hir::Unsafety,
|
||||
@ -467,7 +467,7 @@ impl<'tcx> PolyFnSig<'tcx> {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Hash)]
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable)]
|
||||
pub struct ParamTy {
|
||||
pub idx: u32,
|
||||
pub name: Name,
|
||||
@ -654,17 +654,17 @@ pub struct EarlyBoundRegion {
|
||||
pub name: Name,
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Hash)]
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable)]
|
||||
pub struct TyVid {
|
||||
pub index: u32,
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Hash)]
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable)]
|
||||
pub struct IntVid {
|
||||
pub index: u32
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Hash)]
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable)]
|
||||
pub struct FloatVid {
|
||||
pub index: u32
|
||||
}
|
||||
@ -679,7 +679,7 @@ pub struct SkolemizedRegionVid {
|
||||
pub index: u32
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Hash)]
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable)]
|
||||
pub enum InferTy {
|
||||
TyVar(TyVid),
|
||||
IntVar(IntVid),
|
||||
@ -694,7 +694,7 @@ pub enum InferTy {
|
||||
}
|
||||
|
||||
/// A `ProjectionPredicate` for an `ExistentialTraitRef`.
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug, RustcEncodable, RustcDecodable)]
|
||||
pub struct ExistentialProjection<'tcx> {
|
||||
pub trait_ref: ExistentialTraitRef<'tcx>,
|
||||
pub item_name: Name,
|
||||
@ -739,7 +739,7 @@ impl<'a, 'tcx, 'gcx> PolyExistentialProjection<'tcx> {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug, RustcEncodable, RustcDecodable)]
|
||||
pub struct BuiltinBounds(EnumSet<BuiltinBound>);
|
||||
|
||||
impl<'a, 'gcx, 'tcx> BuiltinBounds {
|
||||
@ -782,12 +782,11 @@ impl<'a> IntoIterator for &'a BuiltinBounds {
|
||||
|
||||
#[derive(Clone, RustcEncodable, PartialEq, Eq, RustcDecodable, Hash,
|
||||
Debug, Copy)]
|
||||
#[repr(usize)]
|
||||
pub enum BuiltinBound {
|
||||
Send,
|
||||
Sized,
|
||||
Copy,
|
||||
Sync,
|
||||
Send = 0,
|
||||
Sized = 1,
|
||||
Copy = 2,
|
||||
Sync = 3,
|
||||
}
|
||||
|
||||
impl CLike for BuiltinBound {
|
||||
@ -795,7 +794,13 @@ impl CLike for BuiltinBound {
|
||||
*self as usize
|
||||
}
|
||||
fn from_usize(v: usize) -> BuiltinBound {
|
||||
unsafe { mem::transmute(v) }
|
||||
match v {
|
||||
0 => BuiltinBound::Send,
|
||||
1 => BuiltinBound::Sized,
|
||||
2 => BuiltinBound::Copy,
|
||||
3 => BuiltinBound::Sync,
|
||||
_ => bug!("{} is not a valid BuiltinBound", v)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -26,8 +26,7 @@ use rustc::middle::expr_use_visitor as euv;
|
||||
use rustc::middle::mem_categorization::{cmt};
|
||||
use rustc::hir::pat_util::*;
|
||||
use rustc::traits::Reveal;
|
||||
use rustc::ty::*;
|
||||
use rustc::ty;
|
||||
use rustc::ty::{self, Ty, TyCtxt};
|
||||
use std::cmp::Ordering;
|
||||
use std::fmt;
|
||||
use std::iter::{FromIterator, IntoIterator, repeat};
|
||||
@ -110,7 +109,7 @@ impl<'a, 'tcx> FromIterator<Vec<(&'a Pat, Option<Ty<'tcx>>)>> for Matrix<'a, 'tc
|
||||
//NOTE: appears to be the only place other then InferCtxt to contain a ParamEnv
|
||||
pub struct MatchCheckCtxt<'a, 'tcx: 'a> {
|
||||
pub tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
pub param_env: ParameterEnvironment<'tcx>,
|
||||
pub param_env: ty::ParameterEnvironment<'tcx>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
@ -248,7 +247,7 @@ fn check_for_bindings_named_the_same_as_variants(cx: &MatchCheckCtxt, pat: &Pat)
|
||||
if edef.is_enum() {
|
||||
if let Def::Local(..) = cx.tcx.expect_def(p.id) {
|
||||
if edef.variants.iter().any(|variant| {
|
||||
variant.name == name.node && variant.kind == VariantKind::Unit
|
||||
variant.name == name.node && variant.kind == ty::VariantKind::Unit
|
||||
}) {
|
||||
let ty_path = cx.tcx.item_path_str(edef.did);
|
||||
let mut err = struct_span_warn!(cx.tcx.sess, p.span, E0170,
|
||||
@ -579,7 +578,7 @@ fn construct_witness<'a,'tcx>(cx: &MatchCheckCtxt<'a,'tcx>, ctor: &Constructor,
|
||||
ty::TyAdt(adt, _) => {
|
||||
let v = ctor.variant_for_adt(adt);
|
||||
match v.kind {
|
||||
VariantKind::Struct => {
|
||||
ty::VariantKind::Struct => {
|
||||
let field_pats: hir::HirVec<_> = v.fields.iter()
|
||||
.zip(pats)
|
||||
.filter(|&(_, ref pat)| pat.node != PatKind::Wild)
|
||||
@ -594,10 +593,10 @@ fn construct_witness<'a,'tcx>(cx: &MatchCheckCtxt<'a,'tcx>, ctor: &Constructor,
|
||||
let has_more_fields = field_pats.len() < pats_len;
|
||||
PatKind::Struct(def_to_path(cx.tcx, v.did), field_pats, has_more_fields)
|
||||
}
|
||||
VariantKind::Tuple => {
|
||||
ty::VariantKind::Tuple => {
|
||||
PatKind::TupleStruct(def_to_path(cx.tcx, v.did), pats.collect(), None)
|
||||
}
|
||||
VariantKind::Unit => {
|
||||
ty::VariantKind::Unit => {
|
||||
PatKind::Path(None, def_to_path(cx.tcx, v.did))
|
||||
}
|
||||
}
|
||||
@ -639,7 +638,7 @@ fn construct_witness<'a,'tcx>(cx: &MatchCheckCtxt<'a,'tcx>, ctor: &Constructor,
|
||||
impl Constructor {
|
||||
fn variant_for_adt<'tcx, 'container, 'a>(&self,
|
||||
adt: &'a ty::AdtDefData<'tcx, 'container>)
|
||||
-> &'a VariantDefData<'tcx, 'container> {
|
||||
-> &'a ty::VariantDefData<'tcx, 'container> {
|
||||
match self {
|
||||
&Variant(vid) => adt.variant_with_id(vid),
|
||||
_ => adt.struct_variant()
|
||||
@ -878,7 +877,7 @@ fn wrap_pat<'a, 'b, 'tcx>(cx: &MatchCheckCtxt<'b, 'tcx>,
|
||||
let pat_ty = cx.tcx.pat_ty(pat);
|
||||
(pat, Some(match pat.node {
|
||||
PatKind::Binding(hir::BindByRef(..), ..) => {
|
||||
pat_ty.builtin_deref(false, NoPreference).unwrap().ty
|
||||
pat_ty.builtin_deref(false, ty::NoPreference).unwrap().ty
|
||||
}
|
||||
_ => pat_ty
|
||||
}))
|
||||
@ -1068,7 +1067,7 @@ fn check_fn(cx: &mut MatchCheckCtxt,
|
||||
fn_id: NodeId) {
|
||||
match kind {
|
||||
FnKind::Closure(_) => {}
|
||||
_ => cx.param_env = ParameterEnvironment::for_item(cx.tcx, fn_id),
|
||||
_ => cx.param_env = ty::ParameterEnvironment::for_item(cx.tcx, fn_id),
|
||||
}
|
||||
|
||||
intravisit::walk_fn(cx, kind, decl, body, sp, fn_id);
|
||||
@ -1187,17 +1186,17 @@ impl<'a, 'gcx, 'tcx> Delegate<'tcx> for MutationChecker<'a, 'gcx> {
|
||||
_: NodeId,
|
||||
span: Span,
|
||||
_: cmt,
|
||||
_: &'tcx Region,
|
||||
kind: BorrowKind,
|
||||
_: &'tcx ty::Region,
|
||||
kind:ty:: BorrowKind,
|
||||
_: LoanCause) {
|
||||
match kind {
|
||||
MutBorrow => {
|
||||
ty::MutBorrow => {
|
||||
struct_span_err!(self.cx.tcx.sess, span, E0301,
|
||||
"cannot mutably borrow in a pattern guard")
|
||||
.span_label(span, &format!("borrowed mutably in pattern guard"))
|
||||
.emit();
|
||||
}
|
||||
ImmBorrow | UniqueImmBorrow => {}
|
||||
ty::ImmBorrow | ty::UniqueImmBorrow => {}
|
||||
}
|
||||
}
|
||||
fn decl_without_init(&mut self, _: NodeId, _: Span) {}
|
||||
|
@ -10,9 +10,7 @@
|
||||
|
||||
#![allow(non_camel_case_types, non_upper_case_globals)]
|
||||
|
||||
pub const tag_opaque: usize = 0x00;
|
||||
|
||||
// GAP 0x01...0x19
|
||||
// GAP 0x00...0x19
|
||||
|
||||
pub const tag_items: usize = 0x100; // top-level only
|
||||
|
||||
@ -214,3 +212,8 @@ pub const tag_macro_derive_registrar: usize = 0x115;
|
||||
// NB: increment this if you change the format of metadata such that
|
||||
// rustc_version can't be found.
|
||||
pub const metadata_encoding_version : &'static [u8] = &[b'r', b'u', b's', b't', 0, 0, 0, 2];
|
||||
|
||||
/// The shorthand encoding of `Ty` uses `TypeVariants`' variant `usize`
|
||||
/// and is offset by this value so it never matches a real variant.
|
||||
/// This offset is also chosen so that the first byte is never < 0x80.
|
||||
pub const TYPE_SHORTHAND_OFFSET: usize = 0x80;
|
||||
|
@ -20,7 +20,6 @@ use common::*;
|
||||
use def_key;
|
||||
use encoder::def_to_u64;
|
||||
use index;
|
||||
use tydecode::TyDecoder;
|
||||
|
||||
use rustc::hir::def_id::CRATE_DEF_INDEX;
|
||||
use rustc::hir::svh::Svh;
|
||||
@ -45,13 +44,14 @@ use rustc_const_math::ConstInt;
|
||||
use rustc::mir::repr::Mir;
|
||||
|
||||
use std::io;
|
||||
use std::mem;
|
||||
use std::rc::Rc;
|
||||
use std::str;
|
||||
use std::u32;
|
||||
|
||||
use rbml::reader;
|
||||
use rbml;
|
||||
use rustc_serialize::{Decodable, SpecializedDecoder, opaque};
|
||||
use rustc_serialize::{Decodable, Decoder, SpecializedDecoder, opaque};
|
||||
use rustc_serialize as serialize;
|
||||
use syntax::attr;
|
||||
use syntax::parse::token;
|
||||
@ -233,35 +233,53 @@ impl<'a, 'tcx> SpecializedDecoder<Span> for DecodeContext<'a, 'tcx> {
|
||||
|
||||
impl<'a, 'tcx> SpecializedDecoder<Ty<'tcx>> for DecodeContext<'a, 'tcx> {
|
||||
fn specialized_decode(&mut self) -> Result<Ty<'tcx>, Self::Error> {
|
||||
let pos = self.opaque.position();
|
||||
let doc = rbml::Doc::at(self.opaque.data, pos);
|
||||
self.opaque.advance(doc.end - pos);
|
||||
Ok(TyDecoder::with_doc(self.tcx(), self.cdata().cnum, doc,
|
||||
&mut |d| translate_def_id(self.cdata(), d))
|
||||
.parse_ty())
|
||||
let tcx = self.tcx();
|
||||
|
||||
// Handle shorthands first, if we have an usize > 0x80.
|
||||
if self.opaque.data[self.opaque.position()] & 0x80 != 0 {
|
||||
let pos = self.read_usize()?;
|
||||
assert!(pos >= TYPE_SHORTHAND_OFFSET);
|
||||
let key = ty::CReaderCacheKey {
|
||||
cnum: self.cdata().cnum,
|
||||
pos: pos - TYPE_SHORTHAND_OFFSET
|
||||
};
|
||||
if let Some(ty) = tcx.rcache.borrow().get(&key).cloned() {
|
||||
return Ok(ty);
|
||||
}
|
||||
|
||||
let new = opaque::Decoder::new(self.opaque.data, key.pos);
|
||||
let old = mem::replace(&mut self.opaque, new);
|
||||
let ty = Ty::decode(self)?;
|
||||
self.opaque = old;
|
||||
tcx.rcache.borrow_mut().insert(key, ty);
|
||||
return Ok(ty);
|
||||
}
|
||||
|
||||
Ok(tcx.mk_ty(ty::TypeVariants::decode(self)?))
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'tcx> SpecializedDecoder<&'tcx Substs<'tcx>> for DecodeContext<'a, 'tcx> {
|
||||
fn specialized_decode(&mut self) -> Result<&'tcx Substs<'tcx>, Self::Error> {
|
||||
let substs = Substs::decode(self)?;
|
||||
Ok(self.tcx().mk_substs(substs))
|
||||
Ok(self.tcx().mk_substs(Decodable::decode(self)?))
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'tcx> SpecializedDecoder<&'tcx ty::Region> for DecodeContext<'a, 'tcx> {
|
||||
fn specialized_decode(&mut self) -> Result<&'tcx ty::Region, Self::Error> {
|
||||
let r = ty::Region::decode(self)?;
|
||||
Ok(self.tcx().mk_region(r))
|
||||
Ok(self.tcx().mk_region(Decodable::decode(self)?))
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'tcx> SpecializedDecoder<ty::ClosureSubsts<'tcx>> for DecodeContext<'a, 'tcx> {
|
||||
fn specialized_decode(&mut self) -> Result<ty::ClosureSubsts<'tcx>, Self::Error> {
|
||||
Ok(ty::ClosureSubsts {
|
||||
func_substs: Decodable::decode(self)?,
|
||||
upvar_tys: self.tcx().mk_type_list(Decodable::decode(self)?)
|
||||
})
|
||||
impl<'a, 'tcx> SpecializedDecoder<&'tcx ty::Slice<Ty<'tcx>>> for DecodeContext<'a, 'tcx> {
|
||||
fn specialized_decode(&mut self) -> Result<&'tcx ty::Slice<Ty<'tcx>>, Self::Error> {
|
||||
Ok(self.tcx().mk_type_list(Decodable::decode(self)?))
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'tcx> SpecializedDecoder<&'tcx ty::BareFnTy<'tcx>> for DecodeContext<'a, 'tcx> {
|
||||
fn specialized_decode(&mut self) -> Result<&'tcx ty::BareFnTy<'tcx>, Self::Error> {
|
||||
Ok(self.tcx().mk_bare_fn(Decodable::decode(self)?))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -18,7 +18,6 @@ use common::*;
|
||||
use cstore;
|
||||
use decoder;
|
||||
use def_key;
|
||||
use tyencode;
|
||||
use index::{self, IndexData};
|
||||
|
||||
use middle::cstore::{InlinedItemRef, LinkMeta};
|
||||
@ -34,8 +33,10 @@ use rustc::mir::mir_map::MirMap;
|
||||
use rustc::session::config::{self, PanicStrategy, CrateTypeRustcMacro};
|
||||
use rustc::util::nodemap::{FnvHashMap, NodeSet};
|
||||
|
||||
use rustc_serialize::{Encodable, SpecializedEncoder, opaque};
|
||||
use rustc_serialize::{Encodable, Encoder, SpecializedEncoder, opaque};
|
||||
use rustc_serialize as serialize;
|
||||
use std::cell::RefCell;
|
||||
use std::intrinsics;
|
||||
use std::io::prelude::*;
|
||||
use std::io::Cursor;
|
||||
use std::ops::{Deref, DerefMut};
|
||||
@ -55,14 +56,14 @@ use rustc::hir::map::DefKey;
|
||||
use super::index_builder::{FromId, IndexBuilder, ItemContentBuilder, Untracked, XRef};
|
||||
|
||||
pub struct EncodeContext<'a, 'tcx: 'a> {
|
||||
pub rbml_w: rbml::writer::Encoder<'a>,
|
||||
rbml_w: rbml::writer::Encoder<'a>,
|
||||
pub tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
pub reexports: &'a def::ExportMap,
|
||||
pub link_meta: &'a LinkMeta,
|
||||
pub cstore: &'a cstore::CStore,
|
||||
pub type_abbrevs: &'a tyencode::abbrev_map<'tcx>,
|
||||
pub reachable: &'a NodeSet,
|
||||
pub mir_map: &'a MirMap<'tcx>,
|
||||
reexports: &'a def::ExportMap,
|
||||
link_meta: &'a LinkMeta,
|
||||
cstore: &'a cstore::CStore,
|
||||
type_shorthands: RefCell<FnvHashMap<Ty<'tcx>, usize>>,
|
||||
reachable: &'a NodeSet,
|
||||
mir_map: &'a MirMap<'tcx>,
|
||||
}
|
||||
|
||||
impl<'a, 'tcx> Deref for EncodeContext<'a, 'tcx> {
|
||||
@ -116,12 +117,32 @@ impl<'a, 'tcx> serialize::Encoder for ::encoder::EncodeContext<'a, 'tcx> {
|
||||
|
||||
impl<'a, 'tcx> SpecializedEncoder<Ty<'tcx>> for EncodeContext<'a, 'tcx> {
|
||||
fn specialized_encode(&mut self, ty: &Ty<'tcx>) -> Result<(), Self::Error> {
|
||||
let cx = self.ty_str_ctxt();
|
||||
let existing_shorthand = self.type_shorthands.borrow().get(ty).cloned();
|
||||
if let Some(shorthand) = existing_shorthand {
|
||||
return self.emit_usize(shorthand);
|
||||
}
|
||||
|
||||
self.start_tag(tag_opaque)?;
|
||||
tyencode::enc_ty(&mut self.rbml_w.opaque.cursor, &cx, ty);
|
||||
self.mark_stable_position();
|
||||
self.end_tag()
|
||||
let start = self.mark_stable_position();
|
||||
ty.sty.encode(self)?;
|
||||
let len = self.mark_stable_position() - start;
|
||||
|
||||
// The shorthand encoding uses the same usize as the
|
||||
// discriminant, with an offset so they can't conflict.
|
||||
let discriminant = unsafe { intrinsics::discriminant_value(&ty.sty) };
|
||||
assert!(discriminant < TYPE_SHORTHAND_OFFSET as u64);
|
||||
let shorthand = start + TYPE_SHORTHAND_OFFSET;
|
||||
|
||||
// Get the number of bits that leb128 could fit
|
||||
// in the same space as the fully encoded type.
|
||||
let leb128_bits = len * 7;
|
||||
|
||||
// Check that the shorthand is a not longer than the
|
||||
// full encoding itself, i.e. it's an obvious win.
|
||||
if leb128_bits >= 64 || (shorthand as u64) < (1 << leb128_bits) {
|
||||
self.type_shorthands.borrow_mut().insert(*ty, shorthand);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
@ -1742,7 +1763,7 @@ pub fn encode_metadata<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
cstore: cstore,
|
||||
reachable: reachable,
|
||||
mir_map: mir_map,
|
||||
type_abbrevs: &Default::default(),
|
||||
type_shorthands: Default::default(),
|
||||
});
|
||||
|
||||
// RBML compacts the encoded bytes whenever appropriate,
|
||||
|
@ -17,9 +17,9 @@
|
||||
html_root_url = "https://doc.rust-lang.org/nightly/")]
|
||||
#![cfg_attr(not(stage0), deny(warnings))]
|
||||
|
||||
#![feature(core_intrinsics)]
|
||||
#![feature(box_patterns)]
|
||||
#![feature(dotdot_in_tuple_patterns)]
|
||||
#![feature(enumset)]
|
||||
#![feature(question_mark)]
|
||||
#![feature(quote)]
|
||||
#![feature(rustc_diagnostic_macros)]
|
||||
@ -63,8 +63,6 @@ pub mod diagnostics;
|
||||
pub mod astencode;
|
||||
pub mod common;
|
||||
pub mod def_key;
|
||||
pub mod tyencode;
|
||||
pub mod tydecode;
|
||||
pub mod encoder;
|
||||
mod index_builder;
|
||||
pub mod decoder;
|
||||
|
@ -1,598 +0,0 @@
|
||||
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
|
||||
// file at the top-level directory of this distribution and at
|
||||
// http://rust-lang.org/COPYRIGHT.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
|
||||
// Type decoding
|
||||
|
||||
// tjc note: Would be great to have a `match check` macro equivalent
|
||||
// for some of these
|
||||
|
||||
#![allow(non_camel_case_types)]
|
||||
|
||||
use rustc::hir;
|
||||
|
||||
use rustc::hir::def_id::{CrateNum, DefId, DefIndex};
|
||||
use middle::region;
|
||||
use rustc::ty::subst::{Kind, Substs};
|
||||
use rustc::ty::{self, Ty, TyCtxt, TypeFoldable};
|
||||
|
||||
use rbml;
|
||||
use rustc_serialize::leb128;
|
||||
use std::str;
|
||||
use syntax::abi;
|
||||
use syntax::ast;
|
||||
use syntax::parse::token;
|
||||
|
||||
// Compact string representation for Ty values. API TyStr &
|
||||
// parse_from_str. Extra parameters are for converting to/from def_ids in the
|
||||
// data buffer. Whatever format you choose should not contain pipe characters.
|
||||
|
||||
pub type DefIdConvert<'a> = &'a mut FnMut(DefId) -> DefId;
|
||||
|
||||
pub struct TyDecoder<'a, 'tcx: 'a> {
|
||||
data: &'a [u8],
|
||||
krate: CrateNum,
|
||||
pos: usize,
|
||||
tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
conv_def_id: DefIdConvert<'a>,
|
||||
}
|
||||
|
||||
impl<'a,'tcx> TyDecoder<'a,'tcx> {
|
||||
pub fn with_doc(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
crate_num: CrateNum,
|
||||
doc: rbml::Doc<'a>,
|
||||
conv: DefIdConvert<'a>)
|
||||
-> TyDecoder<'a,'tcx> {
|
||||
TyDecoder::new(doc.data, crate_num, doc.start, tcx, conv)
|
||||
}
|
||||
|
||||
pub fn new(data: &'a [u8],
|
||||
crate_num: CrateNum,
|
||||
pos: usize,
|
||||
tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
conv: DefIdConvert<'a>)
|
||||
-> TyDecoder<'a, 'tcx> {
|
||||
TyDecoder {
|
||||
data: data,
|
||||
krate: crate_num,
|
||||
pos: pos,
|
||||
tcx: tcx,
|
||||
conv_def_id: conv,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn position(&self) -> usize {
|
||||
self.pos
|
||||
}
|
||||
|
||||
fn peek(&self) -> char {
|
||||
self.data[self.pos] as char
|
||||
}
|
||||
|
||||
fn next(&mut self) -> char {
|
||||
let ch = self.data[self.pos] as char;
|
||||
self.pos = self.pos + 1;
|
||||
return ch;
|
||||
}
|
||||
|
||||
fn next_byte(&mut self) -> u8 {
|
||||
let b = self.data[self.pos];
|
||||
self.pos = self.pos + 1;
|
||||
return b;
|
||||
}
|
||||
|
||||
fn scan<F>(&mut self, mut is_last: F) -> &'a [u8]
|
||||
where F: FnMut(char) -> bool,
|
||||
{
|
||||
let start_pos = self.pos;
|
||||
debug!("scan: '{}' (start)", self.data[self.pos] as char);
|
||||
while !is_last(self.data[self.pos] as char) {
|
||||
self.pos += 1;
|
||||
debug!("scan: '{}'", self.data[self.pos] as char);
|
||||
}
|
||||
let end_pos = self.pos;
|
||||
self.pos += 1;
|
||||
return &self.data[start_pos..end_pos];
|
||||
}
|
||||
|
||||
fn parse_vuint(&mut self) -> usize {
|
||||
let (value, bytes_read) = leb128::read_unsigned_leb128(self.data,
|
||||
self.pos);
|
||||
self.pos += bytes_read;
|
||||
value as usize
|
||||
}
|
||||
|
||||
fn parse_size(&mut self) -> Option<usize> {
|
||||
assert_eq!(self.next(), '/');
|
||||
|
||||
if self.peek() == '|' {
|
||||
assert_eq!(self.next(), '|');
|
||||
None
|
||||
} else {
|
||||
let n = self.parse_uint();
|
||||
assert_eq!(self.next(), '|');
|
||||
Some(n)
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_substs(&mut self) -> &'tcx Substs<'tcx> {
|
||||
let mut params = vec![];
|
||||
assert_eq!(self.next(), '[');
|
||||
while self.peek() != ']' {
|
||||
let k = match self.next() {
|
||||
'r' => Kind::from(self.parse_region()),
|
||||
't' => Kind::from(self.parse_ty()),
|
||||
_ => bug!()
|
||||
};
|
||||
params.push(k);
|
||||
}
|
||||
assert_eq!(self.next(), ']');
|
||||
|
||||
Substs::new(self.tcx, params)
|
||||
}
|
||||
|
||||
fn parse_bound_region(&mut self) -> ty::BoundRegion {
|
||||
match self.next() {
|
||||
'a' => {
|
||||
let id = self.parse_u32();
|
||||
assert_eq!(self.next(), '|');
|
||||
ty::BrAnon(id)
|
||||
}
|
||||
'[' => {
|
||||
let def = self.parse_def();
|
||||
let name = token::intern(&self.parse_str('|'));
|
||||
let issue32330 = match self.next() {
|
||||
'n' => {
|
||||
assert_eq!(self.next(), ']');
|
||||
ty::Issue32330::WontChange
|
||||
}
|
||||
'y' => {
|
||||
ty::Issue32330::WillChange {
|
||||
fn_def_id: self.parse_def(),
|
||||
region_name: token::intern(&self.parse_str(']')),
|
||||
}
|
||||
}
|
||||
c => panic!("expected n or y not {}", c)
|
||||
};
|
||||
ty::BrNamed(def, name, issue32330)
|
||||
}
|
||||
'f' => {
|
||||
let id = self.parse_u32();
|
||||
assert_eq!(self.next(), '|');
|
||||
ty::BrFresh(id)
|
||||
}
|
||||
'e' => ty::BrEnv,
|
||||
_ => bug!("parse_bound_region: bad input")
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_region(&mut self) -> &'tcx ty::Region {
|
||||
self.tcx.mk_region(match self.next() {
|
||||
'b' => {
|
||||
assert_eq!(self.next(), '[');
|
||||
let id = ty::DebruijnIndex::new(self.parse_u32());
|
||||
assert_eq!(self.next(), '|');
|
||||
let br = self.parse_bound_region();
|
||||
assert_eq!(self.next(), ']');
|
||||
ty::ReLateBound(id, br)
|
||||
}
|
||||
'B' => {
|
||||
assert_eq!(self.next(), '[');
|
||||
let index = self.parse_u32();
|
||||
assert_eq!(self.next(), '|');
|
||||
let name = token::intern(&self.parse_str(']'));
|
||||
ty::ReEarlyBound(ty::EarlyBoundRegion {
|
||||
index: index,
|
||||
name: name
|
||||
})
|
||||
}
|
||||
'f' => {
|
||||
assert_eq!(self.next(), '[');
|
||||
let scope = self.parse_scope();
|
||||
assert_eq!(self.next(), '|');
|
||||
let br = self.parse_bound_region();
|
||||
assert_eq!(self.next(), ']');
|
||||
ty::ReFree(ty::FreeRegion { scope: scope,
|
||||
bound_region: br})
|
||||
}
|
||||
's' => {
|
||||
let scope = self.parse_scope();
|
||||
assert_eq!(self.next(), '|');
|
||||
ty::ReScope(scope)
|
||||
}
|
||||
't' => ty::ReStatic,
|
||||
'e' => ty::ReEmpty,
|
||||
'E' => ty::ReErased,
|
||||
_ => bug!("parse_region: bad input")
|
||||
})
|
||||
}
|
||||
|
||||
fn parse_scope(&mut self) -> region::CodeExtent {
|
||||
self.tcx.region_maps.bogus_code_extent(match self.next() {
|
||||
// This creates scopes with the wrong NodeId. This isn't
|
||||
// actually a problem because scopes only exist *within*
|
||||
// functions, and functions aren't loaded until trans which
|
||||
// doesn't care about regions.
|
||||
//
|
||||
// May still be worth fixing though.
|
||||
'C' => {
|
||||
assert_eq!(self.next(), '[');
|
||||
let fn_id = ast::NodeId::new(self.parse_uint());
|
||||
assert_eq!(self.next(), '|');
|
||||
let body_id = ast::NodeId::new(self.parse_uint());
|
||||
assert_eq!(self.next(), ']');
|
||||
region::CodeExtentData::CallSiteScope {
|
||||
fn_id: fn_id, body_id: body_id
|
||||
}
|
||||
}
|
||||
// This creates scopes with the wrong NodeId. (See note above.)
|
||||
'P' => {
|
||||
assert_eq!(self.next(), '[');
|
||||
let fn_id = ast::NodeId::new(self.parse_uint());
|
||||
assert_eq!(self.next(), '|');
|
||||
let body_id = ast::NodeId::new(self.parse_uint());
|
||||
assert_eq!(self.next(), ']');
|
||||
region::CodeExtentData::ParameterScope {
|
||||
fn_id: fn_id, body_id: body_id
|
||||
}
|
||||
}
|
||||
'M' => {
|
||||
let node_id = ast::NodeId::new(self.parse_uint());
|
||||
region::CodeExtentData::Misc(node_id)
|
||||
}
|
||||
'D' => {
|
||||
let node_id = ast::NodeId::new(self.parse_uint());
|
||||
region::CodeExtentData::DestructionScope(node_id)
|
||||
}
|
||||
'B' => {
|
||||
assert_eq!(self.next(), '[');
|
||||
let node_id = ast::NodeId::new(self.parse_uint());
|
||||
assert_eq!(self.next(), '|');
|
||||
let first_stmt_index = self.parse_u32();
|
||||
assert_eq!(self.next(), ']');
|
||||
let block_remainder = region::BlockRemainder {
|
||||
block: node_id, first_statement_index: first_stmt_index,
|
||||
};
|
||||
region::CodeExtentData::Remainder(block_remainder)
|
||||
}
|
||||
_ => bug!("parse_scope: bad input")
|
||||
})
|
||||
}
|
||||
|
||||
fn parse_str(&mut self, term: char) -> String {
|
||||
let mut result = String::new();
|
||||
while self.peek() != term {
|
||||
unsafe {
|
||||
result.as_mut_vec().extend_from_slice(&[self.next_byte()])
|
||||
}
|
||||
}
|
||||
self.next();
|
||||
result
|
||||
}
|
||||
|
||||
fn parse_trait_ref(&mut self) -> ty::TraitRef<'tcx> {
|
||||
ty::TraitRef {
|
||||
def_id: self.parse_def(),
|
||||
substs: self.parse_substs()
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_existential_trait_ref(&mut self) -> ty::ExistentialTraitRef<'tcx> {
|
||||
ty::ExistentialTraitRef {
|
||||
def_id: self.parse_def(),
|
||||
substs: self.parse_substs()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn parse_ty(&mut self) -> Ty<'tcx> {
|
||||
let tcx = self.tcx;
|
||||
match self.next() {
|
||||
'b' => return tcx.types.bool,
|
||||
'!' => return tcx.types.never,
|
||||
'i' => { /* eat the s of is */ self.next(); return tcx.types.isize },
|
||||
'u' => { /* eat the s of us */ self.next(); return tcx.types.usize },
|
||||
'M' => {
|
||||
match self.next() {
|
||||
'b' => return tcx.types.u8,
|
||||
'w' => return tcx.types.u16,
|
||||
'l' => return tcx.types.u32,
|
||||
'd' => return tcx.types.u64,
|
||||
'B' => return tcx.types.i8,
|
||||
'W' => return tcx.types.i16,
|
||||
'L' => return tcx.types.i32,
|
||||
'D' => return tcx.types.i64,
|
||||
'f' => return tcx.types.f32,
|
||||
'F' => return tcx.types.f64,
|
||||
_ => bug!("parse_ty: bad numeric type")
|
||||
}
|
||||
}
|
||||
'c' => return tcx.types.char,
|
||||
'x' => {
|
||||
assert_eq!(self.next(), '[');
|
||||
let trait_ref = ty::Binder(self.parse_existential_trait_ref());
|
||||
let builtin_bounds = self.parse_builtin_bounds();
|
||||
let region_bound = self.parse_region();
|
||||
let mut projection_bounds = Vec::new();
|
||||
|
||||
loop {
|
||||
match self.next() {
|
||||
'P' => {
|
||||
let bound = self.parse_existential_projection();
|
||||
projection_bounds.push(ty::Binder(bound));
|
||||
}
|
||||
'.' => { break; }
|
||||
c => {
|
||||
bug!("parse_bounds: bad bounds ('{}')", c)
|
||||
}
|
||||
}
|
||||
}
|
||||
assert_eq!(self.next(), ']');
|
||||
return tcx.mk_trait(ty::TraitObject {
|
||||
principal: trait_ref,
|
||||
region_bound: region_bound,
|
||||
builtin_bounds: builtin_bounds,
|
||||
projection_bounds: projection_bounds
|
||||
});
|
||||
}
|
||||
'p' => {
|
||||
assert_eq!(self.next(), '[');
|
||||
let index = self.parse_u32();
|
||||
assert_eq!(self.next(), '|');
|
||||
let name = token::intern(&self.parse_str(']'));
|
||||
return tcx.mk_param(index, name);
|
||||
}
|
||||
'~' => return tcx.mk_box(self.parse_ty()),
|
||||
'*' => return tcx.mk_ptr(self.parse_mt()),
|
||||
'&' => {
|
||||
return tcx.mk_ref(self.parse_region(), self.parse_mt());
|
||||
}
|
||||
'V' => {
|
||||
let t = self.parse_ty();
|
||||
return match self.parse_size() {
|
||||
Some(n) => tcx.mk_array(t, n),
|
||||
None => tcx.mk_slice(t)
|
||||
};
|
||||
}
|
||||
'v' => {
|
||||
return tcx.mk_str();
|
||||
}
|
||||
'T' => {
|
||||
assert_eq!(self.next(), '[');
|
||||
let mut params = Vec::new();
|
||||
while self.peek() != ']' { params.push(self.parse_ty()); }
|
||||
self.pos = self.pos + 1;
|
||||
return tcx.mk_tup(params);
|
||||
}
|
||||
'F' => {
|
||||
let def_id = self.parse_def();
|
||||
let substs = self.parse_substs();
|
||||
return tcx.mk_fn_def(def_id, substs, self.parse_bare_fn_ty());
|
||||
}
|
||||
'G' => {
|
||||
return tcx.mk_fn_ptr(self.parse_bare_fn_ty());
|
||||
}
|
||||
'#' => {
|
||||
// This is a hacky little caching scheme. The idea is that if we encode
|
||||
// the same type twice, the second (and third, and fourth...) time we will
|
||||
// just write `#123`, where `123` is the offset in the metadata of the
|
||||
// first appearance. Now when we are *decoding*, if we see a `#123`, we
|
||||
// can first check a cache (`tcx.rcache`) for that offset. If we find something,
|
||||
// we return it (modulo closure types, see below). But if not, then we
|
||||
// jump to offset 123 and read the type from there.
|
||||
|
||||
let pos = self.parse_vuint();
|
||||
let key = ty::CReaderCacheKey { cnum: self.krate, pos: pos };
|
||||
if let Some(tt) = tcx.rcache.borrow().get(&key).cloned() {
|
||||
// If there is a closure buried in the type some where, then we
|
||||
// need to re-convert any def ids (see case 'k', below). That means
|
||||
// we can't reuse the cached version.
|
||||
if !tt.has_closure_types() {
|
||||
return tt;
|
||||
}
|
||||
}
|
||||
|
||||
let mut substate = TyDecoder::new(self.data,
|
||||
self.krate,
|
||||
pos,
|
||||
self.tcx,
|
||||
self.conv_def_id);
|
||||
let tt = substate.parse_ty();
|
||||
tcx.rcache.borrow_mut().insert(key, tt);
|
||||
return tt;
|
||||
}
|
||||
'\"' => {
|
||||
let _ = self.parse_def();
|
||||
let inner = self.parse_ty();
|
||||
inner
|
||||
}
|
||||
'a' => {
|
||||
assert_eq!(self.next(), '[');
|
||||
let did = self.parse_def();
|
||||
let substs = self.parse_substs();
|
||||
assert_eq!(self.next(), ']');
|
||||
let def = self.tcx.lookup_adt_def(did);
|
||||
return self.tcx.mk_adt(def, substs);
|
||||
}
|
||||
'k' => {
|
||||
assert_eq!(self.next(), '[');
|
||||
let did = self.parse_def();
|
||||
let substs = self.parse_substs();
|
||||
let mut tys = vec![];
|
||||
while self.peek() != '.' {
|
||||
tys.push(self.parse_ty());
|
||||
}
|
||||
assert_eq!(self.next(), '.');
|
||||
assert_eq!(self.next(), ']');
|
||||
return self.tcx.mk_closure(did, substs, tys);
|
||||
}
|
||||
'P' => {
|
||||
assert_eq!(self.next(), '[');
|
||||
let trait_ref = self.parse_trait_ref();
|
||||
let name = token::intern(&self.parse_str(']'));
|
||||
return tcx.mk_projection(trait_ref, name);
|
||||
}
|
||||
'A' => {
|
||||
assert_eq!(self.next(), '[');
|
||||
let def_id = self.parse_def();
|
||||
let substs = self.parse_substs();
|
||||
assert_eq!(self.next(), ']');
|
||||
return self.tcx.mk_anon(def_id, substs);
|
||||
}
|
||||
'e' => {
|
||||
return tcx.types.err;
|
||||
}
|
||||
c => { bug!("unexpected char in type string: {}", c);}
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_mutability(&mut self) -> hir::Mutability {
|
||||
match self.peek() {
|
||||
'm' => { self.next(); hir::MutMutable }
|
||||
_ => { hir::MutImmutable }
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_mt(&mut self) -> ty::TypeAndMut<'tcx> {
|
||||
let m = self.parse_mutability();
|
||||
ty::TypeAndMut { ty: self.parse_ty(), mutbl: m }
|
||||
}
|
||||
|
||||
fn parse_def(&mut self) -> DefId {
|
||||
let def_id = parse_defid(self.scan(|c| c == '|'));
|
||||
return (self.conv_def_id)(def_id);
|
||||
}
|
||||
|
||||
fn parse_uint(&mut self) -> usize {
|
||||
let mut n = 0;
|
||||
loop {
|
||||
let cur = self.peek();
|
||||
if cur < '0' || cur > '9' { return n; }
|
||||
self.pos = self.pos + 1;
|
||||
n *= 10;
|
||||
n += (cur as usize) - ('0' as usize);
|
||||
};
|
||||
}
|
||||
|
||||
fn parse_u32(&mut self) -> u32 {
|
||||
let n = self.parse_uint();
|
||||
let m = n as u32;
|
||||
assert_eq!(m as usize, n);
|
||||
m
|
||||
}
|
||||
|
||||
fn parse_abi_set(&mut self) -> abi::Abi {
|
||||
assert_eq!(self.next(), '[');
|
||||
let bytes = self.scan(|c| c == ']');
|
||||
let abi_str = str::from_utf8(bytes).unwrap();
|
||||
abi::lookup(&abi_str[..]).expect(abi_str)
|
||||
}
|
||||
|
||||
fn parse_bare_fn_ty(&mut self) -> &'tcx ty::BareFnTy<'tcx> {
|
||||
let unsafety = parse_unsafety(self.next());
|
||||
let abi = self.parse_abi_set();
|
||||
let sig = self.parse_sig();
|
||||
self.tcx.mk_bare_fn(ty::BareFnTy {
|
||||
unsafety: unsafety,
|
||||
abi: abi,
|
||||
sig: sig
|
||||
})
|
||||
}
|
||||
|
||||
fn parse_sig(&mut self) -> ty::PolyFnSig<'tcx> {
|
||||
assert_eq!(self.next(), '[');
|
||||
let mut inputs = Vec::new();
|
||||
while self.peek() != ']' {
|
||||
inputs.push(self.parse_ty());
|
||||
}
|
||||
self.pos += 1; // eat the ']'
|
||||
let variadic = match self.next() {
|
||||
'V' => true,
|
||||
'N' => false,
|
||||
r => bug!("bad variadic: {}", r),
|
||||
};
|
||||
let output = self.parse_ty();
|
||||
ty::Binder(ty::FnSig {inputs: inputs,
|
||||
output: output,
|
||||
variadic: variadic})
|
||||
}
|
||||
|
||||
fn parse_existential_projection(&mut self) -> ty::ExistentialProjection<'tcx> {
|
||||
ty::ExistentialProjection {
|
||||
trait_ref: self.parse_existential_trait_ref(),
|
||||
item_name: token::intern(&self.parse_str('|')),
|
||||
ty: self.parse_ty(),
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_builtin_bounds(&mut self) -> ty::BuiltinBounds {
|
||||
let mut builtin_bounds = ty::BuiltinBounds::empty();
|
||||
loop {
|
||||
match self.next() {
|
||||
'S' => {
|
||||
builtin_bounds.insert(ty::BoundSend);
|
||||
}
|
||||
'Z' => {
|
||||
builtin_bounds.insert(ty::BoundSized);
|
||||
}
|
||||
'P' => {
|
||||
builtin_bounds.insert(ty::BoundCopy);
|
||||
}
|
||||
'T' => {
|
||||
builtin_bounds.insert(ty::BoundSync);
|
||||
}
|
||||
'.' => {
|
||||
return builtin_bounds;
|
||||
}
|
||||
c => {
|
||||
bug!("parse_bounds: bad builtin bounds ('{}')", c)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Rust metadata parsing
|
||||
fn parse_defid(buf: &[u8]) -> DefId {
|
||||
let mut colon_idx = 0;
|
||||
let len = buf.len();
|
||||
while colon_idx < len && buf[colon_idx] != ':' as u8 { colon_idx += 1; }
|
||||
if colon_idx == len {
|
||||
error!("didn't find ':' when parsing def id");
|
||||
bug!();
|
||||
}
|
||||
|
||||
let crate_part = &buf[0..colon_idx];
|
||||
let def_part = &buf[colon_idx + 1..len];
|
||||
|
||||
let crate_num = match str::from_utf8(crate_part).ok().and_then(|s| {
|
||||
s.parse::<usize>().ok()
|
||||
}) {
|
||||
Some(cn) => CrateNum::new(cn),
|
||||
None => bug!("internal error: parse_defid: crate number expected, found {:?}",
|
||||
crate_part)
|
||||
};
|
||||
let def_num = match str::from_utf8(def_part).ok().and_then(|s| {
|
||||
s.parse::<usize>().ok()
|
||||
}) {
|
||||
Some(dn) => dn,
|
||||
None => bug!("internal error: parse_defid: id expected, found {:?}",
|
||||
def_part)
|
||||
};
|
||||
let index = DefIndex::new(def_num);
|
||||
DefId { krate: crate_num, index: index }
|
||||
}
|
||||
|
||||
fn parse_unsafety(c: char) -> hir::Unsafety {
|
||||
match c {
|
||||
'u' => hir::Unsafety::Unsafe,
|
||||
'n' => hir::Unsafety::Normal,
|
||||
_ => bug!("parse_unsafety: bad unsafety {}", c)
|
||||
}
|
||||
}
|
@ -1,385 +0,0 @@
|
||||
// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT
|
||||
// file at the top-level directory of this distribution and at
|
||||
// http://rust-lang.org/COPYRIGHT.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
// Type encoding
|
||||
|
||||
#![allow(unused_must_use)] // as with encoding, everything is a no-fail MemWriter
|
||||
#![allow(non_camel_case_types)]
|
||||
|
||||
use std::cell::RefCell;
|
||||
use std::io::Cursor;
|
||||
use std::io::prelude::*;
|
||||
|
||||
use rustc::hir::def_id::DefId;
|
||||
use middle::region;
|
||||
use rustc::ty::subst::Substs;
|
||||
use rustc::ty::{self, Ty, TyCtxt};
|
||||
use rustc::util::nodemap::FnvHashMap;
|
||||
|
||||
use rustc::hir;
|
||||
|
||||
use syntax::abi::Abi;
|
||||
use syntax::ast;
|
||||
|
||||
use rustc_serialize::leb128;
|
||||
use encoder;
|
||||
|
||||
pub struct ctxt<'a, 'tcx: 'a> {
|
||||
// Def -> str Callback:
|
||||
pub ds: for<'b> fn(TyCtxt<'b, 'tcx, 'tcx>, DefId) -> String,
|
||||
// The type context.
|
||||
pub tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
pub abbrevs: &'a abbrev_map<'tcx>
|
||||
}
|
||||
|
||||
impl<'a, 'tcx> encoder::EncodeContext<'a, 'tcx> {
|
||||
pub fn ty_str_ctxt(&self) -> ctxt<'a, 'tcx> {
|
||||
ctxt {
|
||||
ds: encoder::def_to_string,
|
||||
tcx: self.tcx,
|
||||
abbrevs: self.type_abbrevs
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Compact string representation for Ty values. API TyStr & parse_from_str.
|
||||
// Extra parameters are for converting to/from def_ids in the string rep.
|
||||
// Whatever format you choose should not contain pipe characters.
|
||||
pub struct ty_abbrev {
|
||||
s: Vec<u8>
|
||||
}
|
||||
|
||||
pub type abbrev_map<'tcx> = RefCell<FnvHashMap<Ty<'tcx>, ty_abbrev>>;
|
||||
|
||||
pub fn enc_ty<'a, 'tcx>(w: &mut Cursor<Vec<u8>>, cx: &ctxt<'a, 'tcx>, t: Ty<'tcx>) {
|
||||
if let Some(a) = cx.abbrevs.borrow_mut().get(&t) {
|
||||
w.write_all(&a.s);
|
||||
return;
|
||||
}
|
||||
|
||||
let pos = w.position();
|
||||
|
||||
match t.sty {
|
||||
ty::TyBool => { write!(w, "b"); }
|
||||
ty::TyChar => { write!(w, "c"); }
|
||||
ty::TyNever => { write!(w, "!"); }
|
||||
ty::TyInt(t) => {
|
||||
match t {
|
||||
ast::IntTy::Is => write!(w, "is"),
|
||||
ast::IntTy::I8 => write!(w, "MB"),
|
||||
ast::IntTy::I16 => write!(w, "MW"),
|
||||
ast::IntTy::I32 => write!(w, "ML"),
|
||||
ast::IntTy::I64 => write!(w, "MD")
|
||||
};
|
||||
}
|
||||
ty::TyUint(t) => {
|
||||
match t {
|
||||
ast::UintTy::Us => write!(w, "us"),
|
||||
ast::UintTy::U8 => write!(w, "Mb"),
|
||||
ast::UintTy::U16 => write!(w, "Mw"),
|
||||
ast::UintTy::U32 => write!(w, "Ml"),
|
||||
ast::UintTy::U64 => write!(w, "Md")
|
||||
};
|
||||
}
|
||||
ty::TyFloat(t) => {
|
||||
match t {
|
||||
ast::FloatTy::F32 => write!(w, "Mf"),
|
||||
ast::FloatTy::F64 => write!(w, "MF"),
|
||||
};
|
||||
}
|
||||
ty::TyTrait(ref obj) => {
|
||||
write!(w, "x[");
|
||||
enc_existential_trait_ref(w, cx, obj.principal.0);
|
||||
enc_builtin_bounds(w, cx, &obj.builtin_bounds);
|
||||
|
||||
enc_region(w, cx, obj.region_bound);
|
||||
|
||||
for tp in &obj.projection_bounds {
|
||||
write!(w, "P");
|
||||
enc_existential_projection(w, cx, &tp.0);
|
||||
}
|
||||
|
||||
write!(w, ".");
|
||||
write!(w, "]");
|
||||
}
|
||||
ty::TyTuple(ts) => {
|
||||
write!(w, "T[");
|
||||
for t in ts { enc_ty(w, cx, *t); }
|
||||
write!(w, "]");
|
||||
}
|
||||
ty::TyBox(typ) => { write!(w, "~"); enc_ty(w, cx, typ); }
|
||||
ty::TyRawPtr(mt) => { write!(w, "*"); enc_mt(w, cx, mt); }
|
||||
ty::TyRef(r, mt) => {
|
||||
write!(w, "&");
|
||||
enc_region(w, cx, r);
|
||||
enc_mt(w, cx, mt);
|
||||
}
|
||||
ty::TyArray(t, sz) => {
|
||||
write!(w, "V");
|
||||
enc_ty(w, cx, t);
|
||||
write!(w, "/{}|", sz);
|
||||
}
|
||||
ty::TySlice(t) => {
|
||||
write!(w, "V");
|
||||
enc_ty(w, cx, t);
|
||||
write!(w, "/|");
|
||||
}
|
||||
ty::TyStr => {
|
||||
write!(w, "v");
|
||||
}
|
||||
ty::TyFnDef(def_id, substs, f) => {
|
||||
write!(w, "F");
|
||||
write!(w, "{}|", (cx.ds)(cx.tcx, def_id));
|
||||
enc_substs(w, cx, substs);
|
||||
enc_bare_fn_ty(w, cx, f);
|
||||
}
|
||||
ty::TyFnPtr(f) => {
|
||||
write!(w, "G");
|
||||
enc_bare_fn_ty(w, cx, f);
|
||||
}
|
||||
ty::TyInfer(_) => {
|
||||
bug!("cannot encode inference variable types");
|
||||
}
|
||||
ty::TyParam(p) => {
|
||||
write!(w, "p[{}|{}]", p.idx, p.name);
|
||||
}
|
||||
ty::TyAdt(def, substs) => {
|
||||
write!(w, "a[{}|", (cx.ds)(cx.tcx, def.did));
|
||||
enc_substs(w, cx, substs);
|
||||
write!(w, "]");
|
||||
}
|
||||
ty::TyClosure(def, substs) => {
|
||||
write!(w, "k[{}|", (cx.ds)(cx.tcx, def));
|
||||
enc_substs(w, cx, substs.func_substs);
|
||||
for ty in substs.upvar_tys {
|
||||
enc_ty(w, cx, ty);
|
||||
}
|
||||
write!(w, ".");
|
||||
write!(w, "]");
|
||||
}
|
||||
ty::TyProjection(ref data) => {
|
||||
write!(w, "P[");
|
||||
enc_trait_ref(w, cx, data.trait_ref);
|
||||
write!(w, "{}]", data.item_name);
|
||||
}
|
||||
ty::TyAnon(def_id, substs) => {
|
||||
write!(w, "A[{}|", (cx.ds)(cx.tcx, def_id));
|
||||
enc_substs(w, cx, substs);
|
||||
write!(w, "]");
|
||||
}
|
||||
ty::TyError => {
|
||||
write!(w, "e");
|
||||
}
|
||||
}
|
||||
|
||||
let end = w.position();
|
||||
let len = end - pos;
|
||||
|
||||
let mut abbrev = Cursor::new(Vec::with_capacity(16));
|
||||
abbrev.write_all(b"#");
|
||||
{
|
||||
let start_position = abbrev.position() as usize;
|
||||
let meta_start = 8 + ::common::metadata_encoding_version.len() as u64;
|
||||
let bytes_written = leb128::write_unsigned_leb128(abbrev.get_mut(),
|
||||
start_position,
|
||||
pos - meta_start);
|
||||
abbrev.set_position((start_position + bytes_written) as u64);
|
||||
}
|
||||
|
||||
cx.abbrevs.borrow_mut().insert(t, ty_abbrev {
|
||||
s: if abbrev.position() < len {
|
||||
abbrev.get_ref()[..abbrev.position() as usize].to_owned()
|
||||
} else {
|
||||
// if the abbreviation is longer than the real type,
|
||||
// don't use #-notation. However, insert it here so
|
||||
// other won't have to `mark_stable_position`
|
||||
w.get_ref()[pos as usize .. end as usize].to_owned()
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
fn enc_mutability(w: &mut Cursor<Vec<u8>>, mt: hir::Mutability) {
|
||||
match mt {
|
||||
hir::MutImmutable => (),
|
||||
hir::MutMutable => {
|
||||
write!(w, "m");
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
fn enc_mt<'a, 'tcx>(w: &mut Cursor<Vec<u8>>, cx: &ctxt<'a, 'tcx>,
|
||||
mt: ty::TypeAndMut<'tcx>) {
|
||||
enc_mutability(w, mt.mutbl);
|
||||
enc_ty(w, cx, mt.ty);
|
||||
}
|
||||
|
||||
fn enc_substs<'a, 'tcx>(w: &mut Cursor<Vec<u8>>, cx: &ctxt<'a, 'tcx>,
|
||||
substs: &Substs<'tcx>) {
|
||||
write!(w, "[");
|
||||
for &k in substs.params() {
|
||||
if let Some(ty) = k.as_type() {
|
||||
write!(w, "t");
|
||||
enc_ty(w, cx, ty);
|
||||
} else if let Some(r) = k.as_region() {
|
||||
write!(w, "r");
|
||||
enc_region(w, cx, r);
|
||||
} else {
|
||||
bug!()
|
||||
}
|
||||
}
|
||||
write!(w, "]");
|
||||
}
|
||||
|
||||
fn enc_region(w: &mut Cursor<Vec<u8>>, cx: &ctxt, r: &ty::Region) {
|
||||
match *r {
|
||||
ty::ReLateBound(id, br) => {
|
||||
write!(w, "b[{}|", id.depth);
|
||||
enc_bound_region(w, cx, br);
|
||||
write!(w, "]");
|
||||
}
|
||||
ty::ReEarlyBound(ref data) => {
|
||||
write!(w, "B[{}|{}]",
|
||||
data.index,
|
||||
data.name);
|
||||
}
|
||||
ty::ReFree(ref fr) => {
|
||||
write!(w, "f[");
|
||||
enc_scope(w, cx, fr.scope);
|
||||
write!(w, "|");
|
||||
enc_bound_region(w, cx, fr.bound_region);
|
||||
write!(w, "]");
|
||||
}
|
||||
ty::ReScope(scope) => {
|
||||
write!(w, "s");
|
||||
enc_scope(w, cx, scope);
|
||||
write!(w, "|");
|
||||
}
|
||||
ty::ReStatic => {
|
||||
write!(w, "t");
|
||||
}
|
||||
ty::ReEmpty => {
|
||||
write!(w, "e");
|
||||
}
|
||||
ty::ReErased => {
|
||||
write!(w, "E");
|
||||
}
|
||||
ty::ReVar(_) | ty::ReSkolemized(..) => {
|
||||
// these should not crop up after typeck
|
||||
bug!("cannot encode region variables");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn enc_scope(w: &mut Cursor<Vec<u8>>, cx: &ctxt, scope: region::CodeExtent) {
|
||||
match cx.tcx.region_maps.code_extent_data(scope) {
|
||||
region::CodeExtentData::CallSiteScope {
|
||||
fn_id, body_id } => write!(w, "C[{}|{}]", fn_id, body_id),
|
||||
region::CodeExtentData::ParameterScope {
|
||||
fn_id, body_id } => write!(w, "P[{}|{}]", fn_id, body_id),
|
||||
region::CodeExtentData::Misc(node_id) => write!(w, "M{}", node_id),
|
||||
region::CodeExtentData::Remainder(region::BlockRemainder {
|
||||
block: b, first_statement_index: i }) => write!(w, "B[{}|{}]", b, i),
|
||||
region::CodeExtentData::DestructionScope(node_id) => write!(w, "D{}", node_id),
|
||||
};
|
||||
}
|
||||
|
||||
fn enc_bound_region(w: &mut Cursor<Vec<u8>>, cx: &ctxt, br: ty::BoundRegion) {
|
||||
match br {
|
||||
ty::BrAnon(idx) => {
|
||||
write!(w, "a{}|", idx);
|
||||
}
|
||||
ty::BrNamed(d, name, issue32330) => {
|
||||
write!(w, "[{}|{}|",
|
||||
(cx.ds)(cx.tcx, d),
|
||||
name);
|
||||
|
||||
match issue32330 {
|
||||
ty::Issue32330::WontChange =>
|
||||
write!(w, "n]"),
|
||||
ty::Issue32330::WillChange { fn_def_id, region_name } =>
|
||||
write!(w, "y{}|{}]", (cx.ds)(cx.tcx, fn_def_id), region_name),
|
||||
};
|
||||
}
|
||||
ty::BrFresh(id) => {
|
||||
write!(w, "f{}|", id);
|
||||
}
|
||||
ty::BrEnv => {
|
||||
write!(w, "e|");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn enc_trait_ref<'a, 'tcx>(w: &mut Cursor<Vec<u8>>, cx: &ctxt<'a, 'tcx>,
|
||||
s: ty::TraitRef<'tcx>) {
|
||||
write!(w, "{}|", (cx.ds)(cx.tcx, s.def_id));
|
||||
enc_substs(w, cx, s.substs);
|
||||
}
|
||||
|
||||
fn enc_existential_trait_ref<'a, 'tcx>(w: &mut Cursor<Vec<u8>>, cx: &ctxt<'a, 'tcx>,
|
||||
s: ty::ExistentialTraitRef<'tcx>) {
|
||||
write!(w, "{}|", (cx.ds)(cx.tcx, s.def_id));
|
||||
enc_substs(w, cx, s.substs);
|
||||
}
|
||||
|
||||
fn enc_unsafety(w: &mut Cursor<Vec<u8>>, p: hir::Unsafety) {
|
||||
match p {
|
||||
hir::Unsafety::Normal => write!(w, "n"),
|
||||
hir::Unsafety::Unsafe => write!(w, "u"),
|
||||
};
|
||||
}
|
||||
|
||||
fn enc_abi(w: &mut Cursor<Vec<u8>>, abi: Abi) {
|
||||
write!(w, "[");
|
||||
write!(w, "{}", abi.name());
|
||||
write!(w, "]");
|
||||
}
|
||||
|
||||
fn enc_bare_fn_ty<'a, 'tcx>(w: &mut Cursor<Vec<u8>>, cx: &ctxt<'a, 'tcx>,
|
||||
ft: &ty::BareFnTy<'tcx>) {
|
||||
enc_unsafety(w, ft.unsafety);
|
||||
enc_abi(w, ft.abi);
|
||||
enc_fn_sig(w, cx, &ft.sig);
|
||||
}
|
||||
|
||||
fn enc_fn_sig<'a, 'tcx>(w: &mut Cursor<Vec<u8>>, cx: &ctxt<'a, 'tcx>,
|
||||
fsig: &ty::PolyFnSig<'tcx>) {
|
||||
write!(w, "[");
|
||||
for ty in &fsig.0.inputs {
|
||||
enc_ty(w, cx, *ty);
|
||||
}
|
||||
write!(w, "]");
|
||||
if fsig.0.variadic {
|
||||
write!(w, "V");
|
||||
} else {
|
||||
write!(w, "N");
|
||||
}
|
||||
enc_ty(w, cx, fsig.0.output);
|
||||
}
|
||||
|
||||
fn enc_builtin_bounds(w: &mut Cursor<Vec<u8>>, _cx: &ctxt, bs: &ty::BuiltinBounds) {
|
||||
for bound in bs {
|
||||
match bound {
|
||||
ty::BoundSend => write!(w, "S"),
|
||||
ty::BoundSized => write!(w, "Z"),
|
||||
ty::BoundCopy => write!(w, "P"),
|
||||
ty::BoundSync => write!(w, "T"),
|
||||
};
|
||||
}
|
||||
|
||||
write!(w, ".");
|
||||
}
|
||||
|
||||
fn enc_existential_projection<'a, 'tcx>(w: &mut Cursor<Vec<u8>>,
|
||||
cx: &ctxt<'a, 'tcx>,
|
||||
data: &ty::ExistentialProjection<'tcx>) {
|
||||
enc_existential_trait_ref(w, cx, data.trait_ref);
|
||||
write!(w, "{}|", data.item_name);
|
||||
enc_ty(w, cx, data.ty);
|
||||
}
|
@ -134,7 +134,7 @@ impl<
|
||||
fn encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> {
|
||||
let mut bits = 0;
|
||||
for item in self {
|
||||
bits |= item.to_usize();
|
||||
bits |= 1 << item.to_usize();
|
||||
}
|
||||
s.emit_usize(bits)
|
||||
}
|
||||
@ -148,7 +148,7 @@ impl<
|
||||
let mut set = EnumSet::new();
|
||||
for bit in 0..(mem::size_of::<usize>()*8) {
|
||||
if bits & (1 << bit) != 0 {
|
||||
set.insert(CLike::from_usize(1 << bit));
|
||||
set.insert(CLike::from_usize(bit));
|
||||
}
|
||||
}
|
||||
Ok(set)
|
||||
|
Loading…
x
Reference in New Issue
Block a user