cache ADT dtorck results

This avoids visiting the fields of all structs multiple times, improving
item-bodies checking time by 10% (!).
This commit is contained in:
Ariel Ben-Yehuda 2017-04-23 17:43:23 +03:00 committed by Ariel Ben-Yehuda
parent 5412587910
commit d3476f4b76
8 changed files with 300 additions and 399 deletions

View File

@ -91,6 +91,7 @@ pub enum DepNode<D: Clone + Debug> {
IsForeignItem(D),
TypeParamPredicates((D, D)),
SizedConstraint(D),
DtorckConstraint(D),
AdtDestructor(D),
AssociatedItemDefIds(D),
InherentImpls(D),
@ -228,6 +229,7 @@ impl<D: Clone + Debug> DepNode<D> {
Some(TypeParamPredicates((try_opt!(op(item)), try_opt!(op(param)))))
}
SizedConstraint(ref d) => op(d).map(SizedConstraint),
DtorckConstraint(ref d) => op(d).map(DtorckConstraint),
AdtDestructor(ref d) => op(d).map(AdtDestructor),
AssociatedItemDefIds(ref d) => op(d).map(AssociatedItemDefIds),
InherentImpls(ref d) => op(d).map(InherentImpls),

View File

@ -1829,6 +1829,7 @@ register_diagnostics! {
E0314, // closure outlives stack frame
E0315, // cannot invoke closure outside of its lifetime
E0316, // nested quantification of lifetimes
E0320, // recursive overflow during dropck
E0473, // dereference of reference outside its lifetime
E0474, // captured variable `..` does not outlive the enclosing closure
E0475, // index of slice outside its lifetime

View File

@ -107,6 +107,13 @@ impl<'tcx> Value<'tcx> for Ty<'tcx> {
}
}
impl<'tcx> Value<'tcx> for ty::DtorckConstraint<'tcx> {
fn from_cycle_error<'a>(_: TyCtxt<'a, 'tcx, 'tcx>) -> Self {
Self::empty()
}
}
pub struct CycleError<'a, 'tcx: 'a> {
span: Span,
cycle: RefMut<'a, [(Span, Query<'tcx>)]>,
@ -397,6 +404,7 @@ define_maps! { <'tcx>
pub adt_def: ItemSignature(DefId) -> &'tcx ty::AdtDef,
pub adt_destructor: AdtDestructor(DefId) -> Option<ty::Destructor>,
pub adt_sized_constraint: SizedConstraint(DefId) -> &'tcx [Ty<'tcx>],
pub adt_dtorck_constraint: DtorckConstraint(DefId) -> ty::DtorckConstraint<'tcx>,
/// True if this is a foreign item (i.e., linked via `extern { ... }`).
pub is_foreign_item: IsForeignItem(DefId) -> bool,

View File

@ -31,13 +31,15 @@ use ty;
use ty::subst::{Subst, Substs};
use ty::util::IntTypeExt;
use ty::walk::TypeWalker;
use util::nodemap::{NodeSet, DefIdMap, FxHashMap};
use util::common::ErrorReported;
use util::nodemap::{NodeSet, DefIdMap, FxHashMap, FxHashSet};
use serialize::{self, Encodable, Encoder};
use std::cell::{Cell, RefCell, Ref};
use std::collections::BTreeMap;
use std::cmp;
use std::hash::{Hash, Hasher};
use std::iter::FromIterator;
use std::ops::Deref;
use std::rc::Rc;
use std::slice;
@ -1332,17 +1334,6 @@ impl<'a, 'tcx> ParameterEnvironment<'tcx> {
pub struct Destructor {
/// The def-id of the destructor method
pub did: DefId,
/// Invoking the destructor of a dtorck type during usual cleanup
/// (e.g. the glue emitted for stack unwinding) requires all
/// lifetimes in the type-structure of `adt` to strictly outlive
/// the adt value itself.
///
/// If `adt` is not dtorck, then the adt's destructor can be
/// invoked even when there are lifetimes in the type-structure of
/// `adt` that do not strictly outlive the adt value itself.
/// (This allows programs to make cyclic structures without
/// resorting to unsafe means; see RFCs 769 and 1238).
pub is_dtorck: bool,
}
bitflags! {
@ -1609,14 +1600,6 @@ impl<'a, 'gcx, 'tcx> AdtDef {
}
}
/// Returns whether this is a dtorck type. If this returns
/// true, this type being safe for destruction requires it to be
/// alive; Otherwise, only the contents are required to be.
#[inline]
pub fn is_dtorck(&'gcx self, tcx: TyCtxt) -> bool {
self.destructor(tcx).map_or(false, |d| d.is_dtorck)
}
/// Returns whether this type is #[fundamental] for the purposes
/// of coherence checking.
#[inline]
@ -2708,6 +2691,38 @@ fn adt_sized_constraint<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
result
}
/// Calculates the dtorck constraint for a type.
fn adt_dtorck_constraint<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
def_id: DefId)
-> DtorckConstraint<'tcx> {
let def = tcx.lookup_adt_def(def_id);
let span = tcx.def_span(def_id);
debug!("dtorck_constraint: {:?}", def);
if def.is_phantom_data() {
let result = DtorckConstraint {
outlives: vec![],
dtorck_types: vec![
tcx.mk_param_from_def(&tcx.item_generics(def_id).types[0])
]
};
debug!("dtorck_constraint: {:?} => {:?}", def, result);
return result;
}
let mut result = def.all_fields()
.map(|field| tcx.item_type(field.did))
.map(|fty| tcx.dtorck_constraint_for_ty(span, fty, 0, fty))
.collect::<Result<DtorckConstraint, ErrorReported>>()
.unwrap_or(DtorckConstraint::empty());
result.outlives.extend(tcx.destructor_constraints(def));
result.dedup();
debug!("dtorck_constraint: {:?} => {:?}", def, result);
result
}
fn associated_item_def_ids<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
def_id: DefId)
-> Rc<Vec<DefId>> {
@ -2736,6 +2751,7 @@ pub fn provide(providers: &mut ty::maps::Providers) {
associated_item,
associated_item_def_ids,
adt_sized_constraint,
adt_dtorck_constraint,
..*providers
};
}
@ -2743,6 +2759,7 @@ pub fn provide(providers: &mut ty::maps::Providers) {
pub fn provide_extern(providers: &mut ty::maps::Providers) {
*providers = ty::maps::Providers {
adt_sized_constraint,
adt_dtorck_constraint,
..*providers
};
}
@ -2758,3 +2775,46 @@ pub fn provide_extern(providers: &mut ty::maps::Providers) {
pub struct CrateInherentImpls {
pub inherent_impls: DefIdMap<Rc<Vec<DefId>>>,
}
/// A set of constraints that need to be satisfied in order for
/// a type to be valid for destruction.
#[derive(Clone, Debug)]
pub struct DtorckConstraint<'tcx> {
/// Types that are required to be alive in order for this
/// type to be valid for destruction.
pub outlives: Vec<ty::subst::Kind<'tcx>>,
/// Types that could not be resolved: projections and params.
pub dtorck_types: Vec<Ty<'tcx>>,
}
impl<'tcx> FromIterator<DtorckConstraint<'tcx>> for DtorckConstraint<'tcx>
{
fn from_iter<I: IntoIterator<Item=DtorckConstraint<'tcx>>>(iter: I) -> Self {
let mut result = Self::empty();
for constraint in iter {
result.outlives.extend(constraint.outlives);
result.dtorck_types.extend(constraint.dtorck_types);
}
result
}
}
impl<'tcx> DtorckConstraint<'tcx> {
fn empty() -> DtorckConstraint<'tcx> {
DtorckConstraint {
outlives: vec![],
dtorck_types: vec![]
}
}
fn dedup<'a>(&mut self) {
let mut outlives = FxHashSet();
let mut dtorck_types = FxHashSet();
self.outlives.retain(|&val| outlives.replace(val).is_none());
self.dtorck_types.retain(|&val| dtorck_types.replace(val).is_none());
}
}

View File

@ -19,6 +19,7 @@ use ty::{self, Ty, TyCtxt, TypeAndMut, TypeFlags, TypeFoldable};
use ty::ParameterEnvironment;
use ty::fold::TypeVisitor;
use ty::layout::{Layout, LayoutError};
use ty::subst::{Subst, Kind};
use ty::TypeVariants::*;
use util::common::ErrorReported;
use util::nodemap::{FxHashMap, FxHashSet};
@ -385,6 +386,27 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
None => return None,
};
Some(ty::Destructor { did: dtor_did })
}
/// Return the set of types that are required to be alive in
/// order to run the destructor of `def` (see RFCs 769 and
/// 1238).
///
/// Note that this returns only the constraints for the
/// destructor of `def` itself. For the destructors of the
/// contents, you need `adt_dtorck_constraint`.
pub fn destructor_constraints(self, def: &'tcx ty::AdtDef)
-> Vec<ty::subst::Kind<'tcx>>
{
let dtor = match def.destructor(self) {
None => {
debug!("destructor_constraints({:?}) - no dtor", def.did);
return vec![]
}
Some(dtor) => dtor.did
};
// RFC 1238: if the destructor method is tagged with the
// attribute `unsafe_destructor_blind_to_params`, then the
// compiler is being instructed to *assume* that the
@ -394,11 +416,147 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
// Such access can be in plain sight (e.g. dereferencing
// `*foo.0` of `Foo<'a>(&'a u32)`) or indirectly hidden
// (e.g. calling `foo.0.clone()` of `Foo<T:Clone>`).
let is_dtorck = !self.has_attr(dtor_did, "unsafe_destructor_blind_to_params");
Some(ty::Destructor { did: dtor_did, is_dtorck: is_dtorck })
if self.has_attr(dtor, "unsafe_destructor_blind_to_params") {
debug!("destructor_constraint({:?}) - blind", def.did);
return vec![];
}
let impl_def_id = self.associated_item(dtor).container.id();
let impl_generics = self.item_generics(impl_def_id);
// We have a destructor - all the parameters that are not
// pure_wrt_drop (i.e, don't have a #[may_dangle] attribute)
// must be live.
// We need to return the list of parameters from the ADTs
// generics/substs that correspond to impure parameters on the
// impl's generics. This is a bit ugly, but conceptually simple:
//
// Suppose our ADT looks like the following
//
// struct S<X, Y, Z>(X, Y, Z);
//
// and the impl is
//
// impl<#[may_dangle] P0, P1, P2> Drop for S<P1, P2, P0>
//
// We want to return the parameters (X, Y). For that, we match
// up the item-substs <X, Y, Z> with the substs on the impl ADT,
// <P1, P2, P0>, and then look up which of the impl substs refer to
// parameters marked as pure.
let impl_substs = match self.item_type(impl_def_id).sty {
ty::TyAdt(def_, substs) if def_ == def => substs,
_ => bug!()
};
let item_substs = match self.item_type(def.did).sty {
ty::TyAdt(def_, substs) if def_ == def => substs,
_ => bug!()
};
let result = item_substs.iter().zip(impl_substs.iter())
.filter(|&(_, &k)| {
if let Some(&ty::Region::ReEarlyBound(ref ebr)) = k.as_region() {
!impl_generics.region_param(ebr).pure_wrt_drop
} else if let Some(&ty::TyS {
sty: ty::TypeVariants::TyParam(ref pt), ..
}) = k.as_type() {
!impl_generics.type_param(pt).pure_wrt_drop
} else {
// not a type or region param - this should be reported
// as an error.
false
}
}).map(|(&item_param, _)| item_param).collect();
debug!("destructor_constraint({:?}) = {:?}", def.did, result);
result
}
pub fn closure_base_def_id(&self, def_id: DefId) -> DefId {
/// Return a set of constraints that needs to be satisfied in
/// order for `ty` to be valid for destruction.
pub fn dtorck_constraint_for_ty(self,
span: Span,
for_ty: Ty<'tcx>,
depth: usize,
ty: Ty<'tcx>)
-> Result<ty::DtorckConstraint<'tcx>, ErrorReported>
{
debug!("dtorck_constraint_for_ty({:?}, {:?}, {:?}, {:?})",
span, for_ty, depth, ty);
if depth >= self.sess.recursion_limit.get() {
let mut err = struct_span_err!(
self.sess, span, E0320,
"overflow while adding drop-check rules for {}", for_ty);
err.note(&format!("overflowed on {}", ty));
err.emit();
return Err(ErrorReported);
}
let result = match ty.sty {
ty::TyBool | ty::TyChar | ty::TyInt(_) | ty::TyUint(_) |
ty::TyFloat(_) | ty::TyStr | ty::TyNever |
ty::TyRawPtr(..) | ty::TyRef(..) | ty::TyFnDef(..) | ty::TyFnPtr(_) => {
// these types never have a destructor
Ok(ty::DtorckConstraint::empty())
}
ty::TyArray(ety, _) | ty::TySlice(ety) => {
// single-element containers, behave like their element
self.dtorck_constraint_for_ty(span, for_ty, depth+1, ety)
}
ty::TyTuple(tys, _) => {
tys.iter().map(|ty| {
self.dtorck_constraint_for_ty(span, for_ty, depth+1, ty)
}).collect()
}
ty::TyClosure(def_id, substs) => {
substs.upvar_tys(def_id, self).map(|ty| {
self.dtorck_constraint_for_ty(span, for_ty, depth+1, ty)
}).collect()
}
ty::TyAdt(def, substs) => {
let ty::DtorckConstraint {
dtorck_types, outlives
} = ty::queries::adt_dtorck_constraint::get(self, span, def.did);
Ok(ty::DtorckConstraint {
// FIXME: we can try to recursively `dtorck_constraint_on_ty`
// there, but that needs some way to handle cycles.
dtorck_types: dtorck_types.subst(self, substs),
outlives: outlives.subst(self, substs)
})
}
// Objects must be alive in order for their destructor
// to be called.
ty::TyDynamic(..) => Ok(ty::DtorckConstraint {
outlives: vec![Kind::from(ty)],
dtorck_types: vec![],
}),
// Types that can't be resolved. Pass them forward.
ty::TyProjection(..) | ty::TyAnon(..) | ty::TyParam(..) => {
Ok(ty::DtorckConstraint {
outlives: vec![],
dtorck_types: vec![ty],
})
}
ty::TyInfer(..) | ty::TyError => {
self.sess.delay_span_bug(span, "unresolved type in dtorck");
Err(ErrorReported)
}
};
debug!("dtorck_constraint_for_ty({:?}) = {:?}", ty, result);
result
}
pub fn closure_base_def_id(self, def_id: DefId) -> DefId {
let mut def_id = def_id;
while self.def_key(def_id).disambiguated_data.data == DefPathData::ClosureExpr {
def_id = self.parent_def_id(def_id).unwrap_or_else(|| {

View File

@ -15,12 +15,11 @@ use middle::free_region::FreeRegionMap;
use rustc::infer::{self, InferOk};
use middle::region;
use rustc::ty::subst::{Subst, Substs};
use rustc::ty::{self, AdtKind, Ty, TyCtxt};
use rustc::ty::{self, Ty, TyCtxt};
use rustc::traits::{self, ObligationCause, Reveal};
use util::common::ErrorReported;
use util::nodemap::FxHashSet;
use syntax::ast;
use syntax_pos::Span;
/// check_drop_impl confirms that the Drop implementation identfied by
@ -270,389 +269,64 @@ fn ensure_drop_predicates_are_implied_by_item_defn<'a, 'tcx>(
///
pub fn check_safety_of_destructor_if_necessary<'a, 'gcx, 'tcx>(
rcx: &mut RegionCtxt<'a, 'gcx, 'tcx>,
typ: ty::Ty<'tcx>,
ty: ty::Ty<'tcx>,
span: Span,
scope: region::CodeExtent)
-> Result<(), ErrorReported>
{
debug!("check_safety_of_destructor_if_necessary typ: {:?} scope: {:?}",
typ, scope);
ty, scope);
let parent_scope = match rcx.tcx.region_maps.opt_encl_scope(scope) {
Some(parent_scope) => parent_scope,
// If no enclosing scope, then it must be the root scope which cannot be outlived.
None => return
Some(parent_scope) => parent_scope,
// If no enclosing scope, then it must be the root scope
// which cannot be outlived.
None => return Ok(())
};
let parent_scope = rcx.tcx.mk_region(ty::ReScope(parent_scope));
let origin = || infer::SubregionOrigin::SafeDestructor(span);
let result = iterate_over_potentially_unsafe_regions_in_type(
&mut DropckContext {
rcx: rcx,
span: span,
parent_scope: parent_scope,
breadcrumbs: FxHashSet()
},
TypeContext::Root,
typ,
0);
match result {
Ok(()) => {}
Err(Error::Overflow(ref ctxt, ref detected_on_typ)) => {
let tcx = rcx.tcx;
let mut err = struct_span_err!(tcx.sess, span, E0320,
"overflow while adding drop-check rules for {}", typ);
match *ctxt {
TypeContext::Root => {
// no need for an additional note if the overflow
// was somehow on the root.
let ty = rcx.fcx.resolve_type_vars_if_possible(&ty);
let for_ty = ty;
let mut types = vec![(ty, 0)];
let mut known = FxHashSet();
while let Some((ty, depth)) = types.pop() {
let ty::DtorckConstraint {
dtorck_types, outlives
} = rcx.tcx.dtorck_constraint_for_ty(span, for_ty, depth, ty)?;
for ty in dtorck_types {
let ty = rcx.fcx.normalize_associated_types_in(span, &ty);
let ty = rcx.fcx.resolve_type_vars_with_obligations(ty);
let ty = rcx.fcx.resolve_type_and_region_vars_if_possible(&ty);
match ty.sty {
// All parameters live for the duration of the
// function.
ty::TyParam(..) => {}
// A projection that we couldn't resolve - it
// might have a destructor.
ty::TyProjection(..) | ty::TyAnon(..) => {
rcx.type_must_outlive(origin(), ty, parent_scope);
}
TypeContext::ADT { def_id, variant, field } => {
let adt = tcx.lookup_adt_def(def_id);
let variant_name = match adt.adt_kind() {
AdtKind::Enum => format!("enum {} variant {}",
tcx.item_path_str(def_id),
variant),
AdtKind::Struct => format!("struct {}",
tcx.item_path_str(def_id)),
AdtKind::Union => format!("union {}",
tcx.item_path_str(def_id)),
};
span_note!(
&mut err,
span,
"overflowed on {} field {} type: {}",
variant_name,
field,
detected_on_typ);
_ => {
if let None = known.replace(ty) {
types.push((ty, depth+1));
}
}
}
err.emit();
}
}
}
enum Error<'tcx> {
Overflow(TypeContext, ty::Ty<'tcx>),
}
#[derive(Copy, Clone)]
enum TypeContext {
Root,
ADT {
def_id: DefId,
variant: ast::Name,
field: ast::Name,
}
}
struct DropckContext<'a, 'b: 'a, 'gcx: 'b+'tcx, 'tcx: 'b> {
rcx: &'a mut RegionCtxt<'b, 'gcx, 'tcx>,
/// types that have already been traversed
breadcrumbs: FxHashSet<Ty<'tcx>>,
/// span for error reporting
span: Span,
/// the scope reachable dtorck types must outlive
parent_scope: region::CodeExtent
}
// `context` is used for reporting overflow errors
fn iterate_over_potentially_unsafe_regions_in_type<'a, 'b, 'gcx, 'tcx>(
cx: &mut DropckContext<'a, 'b, 'gcx, 'tcx>,
context: TypeContext,
ty: Ty<'tcx>,
depth: usize)
-> Result<(), Error<'tcx>>
{
let tcx = cx.rcx.tcx;
// Issue #22443: Watch out for overflow. While we are careful to
// handle regular types properly, non-regular ones cause problems.
let recursion_limit = tcx.sess.recursion_limit.get();
if depth / 4 >= recursion_limit {
// This can get into rather deep recursion, especially in the
// presence of things like Vec<T> -> Unique<T> -> PhantomData<T> -> T.
// use a higher recursion limit to avoid errors.
return Err(Error::Overflow(context, ty))
}
// canoncialize the regions in `ty` before inserting - infinitely many
// region variables can refer to the same region.
let ty = cx.rcx.resolve_type_and_region_vars_if_possible(&ty);
if !cx.breadcrumbs.insert(ty) {
debug!("iterate_over_potentially_unsafe_regions_in_type \
{}ty: {} scope: {:?} - cached",
(0..depth).map(|_| ' ').collect::<String>(),
ty, cx.parent_scope);
return Ok(()); // we already visited this type
}
debug!("iterate_over_potentially_unsafe_regions_in_type \
{}ty: {} scope: {:?}",
(0..depth).map(|_| ' ').collect::<String>(),
ty, cx.parent_scope);
// If `typ` has a destructor, then we must ensure that all
// borrowed data reachable via `typ` must outlive the parent
// of `scope`. This is handled below.
//
// However, there is an important special case: for any Drop
// impl that is tagged as "blind" to their parameters,
// we assume that data borrowed via such type parameters
// remains unreachable via that Drop impl.
//
// For example, consider:
//
// ```rust
// #[unsafe_destructor_blind_to_params]
// impl<T> Drop for Vec<T> { ... }
// ```
//
// which does have to be able to drop instances of `T`, but
// otherwise cannot read data from `T`.
//
// Of course, for the type expression passed in for any such
// unbounded type parameter `T`, we must resume the recursive
// analysis on `T` (since it would be ignored by
// type_must_outlive).
let dropck_kind = has_dtor_of_interest(tcx, ty);
debug!("iterate_over_potentially_unsafe_regions_in_type \
ty: {:?} dropck_kind: {:?}", ty, dropck_kind);
match dropck_kind {
DropckKind::NoBorrowedDataAccessedInMyDtor => {
// The maximally blind attribute.
}
DropckKind::BorrowedDataMustStrictlyOutliveSelf => {
cx.rcx.type_must_outlive(infer::SubregionOrigin::SafeDestructor(cx.span),
ty, tcx.mk_region(ty::ReScope(cx.parent_scope)));
return Ok(());
}
DropckKind::RevisedSelf(revised_ty) => {
cx.rcx.type_must_outlive(infer::SubregionOrigin::SafeDestructor(cx.span),
revised_ty, tcx.mk_region(ty::ReScope(cx.parent_scope)));
// Do not return early from this case; we want
// to recursively process the internal structure of Self
// (because even though the Drop for Self has been asserted
// safe, the types instantiated for the generics of Self
// may themselves carry dropck constraints.)
}
}
debug!("iterate_over_potentially_unsafe_regions_in_type \
{}ty: {} scope: {:?} - checking interior",
(0..depth).map(|_| ' ').collect::<String>(),
ty, cx.parent_scope);
// We still need to ensure all referenced data is safe.
match ty.sty {
ty::TyBool | ty::TyChar | ty::TyInt(_) | ty::TyUint(_) |
ty::TyFloat(_) | ty::TyStr | ty::TyNever => {
// primitive - definitely safe
Ok(())
}
ty::TyArray(ity, _) | ty::TySlice(ity) => {
// single-element containers, behave like their element
iterate_over_potentially_unsafe_regions_in_type(
cx, context, ity, depth+1)
}
ty::TyAdt(def, substs) if def.is_phantom_data() => {
// PhantomData<T> - behaves identically to T
let ity = substs.type_at(0);
iterate_over_potentially_unsafe_regions_in_type(
cx, context, ity, depth+1)
}
ty::TyAdt(def, substs) => {
let did = def.did;
for variant in &def.variants {
for field in variant.fields.iter() {
let fty = field.ty(tcx, substs);
let fty = cx.rcx.fcx.resolve_type_vars_with_obligations(
cx.rcx.fcx.normalize_associated_types_in(cx.span, &fty));
iterate_over_potentially_unsafe_regions_in_type(
cx,
TypeContext::ADT {
def_id: did,
field: field.name,
variant: variant.name,
},
fty,
depth+1)?
}
for outlive in outlives {
if let Some(r) = outlive.as_region() {
rcx.sub_regions(origin(), parent_scope, r);
} else if let Some(ty) = outlive.as_type() {
rcx.type_must_outlive(origin(), ty, parent_scope);
}
Ok(())
}
ty::TyClosure(def_id, substs) => {
for ty in substs.upvar_tys(def_id, tcx) {
iterate_over_potentially_unsafe_regions_in_type(cx, context, ty, depth+1)?
}
Ok(())
}
ty::TyTuple(tys, _) => {
for ty in tys {
iterate_over_potentially_unsafe_regions_in_type(cx, context, ty, depth+1)?
}
Ok(())
}
ty::TyRawPtr(..) | ty::TyRef(..) | ty::TyParam(..) => {
// these always come with a witness of liveness (references
// explicitly, pointers implicitly, parameters by the
// caller).
Ok(())
}
ty::TyFnDef(..) | ty::TyFnPtr(_) => {
// FIXME(#26656): this type is always destruction-safe, but
// it implicitly witnesses Self: Fn, which can be false.
Ok(())
}
ty::TyInfer(..) | ty::TyError => {
tcx.sess.delay_span_bug(cx.span, "unresolved type in regionck");
Ok(())
}
// these are always dtorck
ty::TyDynamic(..) | ty::TyProjection(_) | ty::TyAnon(..) => bug!(),
}
}
#[derive(Copy, Clone, PartialEq, Eq, Debug)]
enum DropckKind<'tcx> {
/// The "safe" kind; i.e. conservatively assume any borrow
/// accessed by dtor, and therefore such data must strictly
/// outlive self.
///
/// Equivalent to RevisedTy with no change to the self type.
BorrowedDataMustStrictlyOutliveSelf,
/// The nearly completely-unsafe kind.
///
/// Equivalent to RevisedSelf with *all* parameters remapped to ()
/// (maybe...?)
NoBorrowedDataAccessedInMyDtor,
/// Assume all borrowed data access by dtor occurs as if Self has the
/// type carried by this variant. In practice this means that some
/// of the type parameters are remapped to `()` (and some lifetime
/// parameters remapped to `'static`), because the developer has asserted
/// that the destructor will not access their contents.
RevisedSelf(Ty<'tcx>),
}
/// Returns the classification of what kind of check should be applied
/// to `ty`, which may include a revised type where some of the type
/// parameters are re-mapped to `()` to reflect the destructor's
/// "purity" with respect to their actual contents.
fn has_dtor_of_interest<'a, 'gcx, 'tcx>(tcx: TyCtxt<'a, 'gcx, 'tcx>,
ty: Ty<'tcx>)
-> DropckKind<'tcx> {
match ty.sty {
ty::TyAdt(adt_def, substs) => {
if !adt_def.is_dtorck(tcx) {
return DropckKind::NoBorrowedDataAccessedInMyDtor;
}
// Find the `impl<..> Drop for _` to inspect any
// attributes attached to the impl's generics.
let dtor_method = adt_def.destructor(tcx)
.expect("dtorck type without destructor impossible");
let method = tcx.associated_item(dtor_method.did);
let impl_def_id = method.container.id();
let revised_ty = revise_self_ty(tcx, adt_def, impl_def_id, substs);
return DropckKind::RevisedSelf(revised_ty);
}
ty::TyDynamic(..) | ty::TyProjection(..) | ty::TyAnon(..) => {
debug!("ty: {:?} isn't known, and therefore is a dropck type", ty);
return DropckKind::BorrowedDataMustStrictlyOutliveSelf;
},
_ => {
return DropckKind::NoBorrowedDataAccessedInMyDtor;
}
}
}
// Constructs new Ty just like the type defined by `adt_def` coupled
// with `substs`, except each type and lifetime parameter marked as
// `#[may_dangle]` in the Drop impl (identified by `impl_def_id`) is
// respectively mapped to `()` or `'static`.
//
// For example: If the `adt_def` maps to:
//
// enum Foo<'a, X, Y> { ... }
//
// and the `impl_def_id` maps to:
//
// impl<#[may_dangle] 'a, X, #[may_dangle] Y> Drop for Foo<'a, X, Y> { ... }
//
// then revises input: `Foo<'r,i64,&'r i64>` to: `Foo<'static,i64,()>`
fn revise_self_ty<'a, 'gcx, 'tcx>(tcx: TyCtxt<'a, 'gcx, 'tcx>,
adt_def: &'tcx ty::AdtDef,
impl_def_id: DefId,
substs: &Substs<'tcx>)
-> Ty<'tcx> {
// Get generics for `impl Drop` to query for `#[may_dangle]` attr.
let impl_bindings = tcx.item_generics(impl_def_id);
// Get Substs attached to Self on `impl Drop`; process in parallel
// with `substs`, replacing dangling entries as appropriate.
let self_substs = {
let impl_self_ty: Ty<'tcx> = tcx.item_type(impl_def_id);
if let ty::TyAdt(self_adt_def, self_substs) = impl_self_ty.sty {
assert_eq!(adt_def, self_adt_def);
self_substs
} else {
bug!("Self in `impl Drop for _` must be an Adt.");
}
};
// Walk `substs` + `self_substs`, build new substs appropriate for
// `adt_def`; each non-dangling param reuses entry from `substs`.
//
// Note: The manner we map from a right-hand side (i.e. Region or
// Ty) for a given `def` to generic parameter associated with that
// right-hand side is tightly coupled to `Drop` impl constraints.
//
// E.g. we know such a Ty must be `TyParam`, because a destructor
// for `struct Foo<X>` is defined via `impl<Y> Drop for Foo<Y>`,
// and never by (for example) `impl<Z> Drop for Foo<Vec<Z>>`.
let substs = Substs::for_item(
tcx,
adt_def.did,
|def, _| {
let r_orig = substs.region_for_def(def);
let impl_self_orig = self_substs.region_for_def(def);
let r = if let ty::Region::ReEarlyBound(ref ebr) = *impl_self_orig {
if impl_bindings.region_param(ebr).pure_wrt_drop {
tcx.types.re_static
} else {
r_orig
}
} else {
bug!("substs for an impl must map regions to ReEarlyBound");
};
debug!("has_dtor_of_interest mapping def {:?} orig {:?} to {:?}",
def, r_orig, r);
r
},
|def, _| {
let t_orig = substs.type_for_def(def);
let impl_self_orig = self_substs.type_for_def(def);
let t = if let ty::TypeVariants::TyParam(ref pt) = impl_self_orig.sty {
if impl_bindings.type_param(pt).pure_wrt_drop {
tcx.mk_nil()
} else {
t_orig
}
} else {
bug!("substs for an impl must map types to TyParam");
};
debug!("has_dtor_of_interest mapping def {:?} orig {:?} {:?} to {:?} {:?}",
def, t_orig, t_orig.sty, t, t.sty);
t
});
tcx.mk_adt(adt_def, &substs)
Ok(())
}

View File

@ -457,7 +457,8 @@ impl<'a, 'gcx, 'tcx> RegionCtxt<'a, 'gcx, 'tcx> {
self.type_of_node_must_outlive(origin, id, var_region);
let typ = self.resolve_node_type(id);
dropck::check_safety_of_destructor_if_necessary(self, typ, span, var_scope);
let _ = dropck::check_safety_of_destructor_if_necessary(
self, typ, span, var_scope);
})
}
}
@ -995,10 +996,8 @@ impl<'a, 'gcx, 'tcx> RegionCtxt<'a, 'gcx, 'tcx> {
match *region {
ty::ReScope(rvalue_scope) => {
let typ = self.resolve_type(cmt.ty);
dropck::check_safety_of_destructor_if_necessary(self,
typ,
span,
rvalue_scope);
let _ = dropck::check_safety_of_destructor_if_necessary(
self, typ, span, rvalue_scope);
}
ty::ReStatic => {}
_ => {

View File

@ -4154,7 +4154,6 @@ register_diagnostics! {
// E0248, // value used as a type, now reported earlier during resolution as E0412
// E0249,
// E0319, // trait impls for defaulted traits allowed just for structs/enums
E0320, // recursive overflow during dropck
// E0372, // coherence not object safe
E0377, // the trait `CoerceUnsized` may only be implemented for a coercion
// between structures with the same definition