Auto merge of #21972 - pnkfelix:new-dtor-semantics-6, r=nikomatsakis

This is a resurrection and heavy revision/expansion of a PR that pcwalton did to resolve #8861.

The most relevant, user-visible semantic change is this: #[unsafe_destructor] is gone. Instead, if a type expression for some value has a destructor, then any lifetimes referenced within that type expression must strictly outlive the scope of the value.

See discussion on https://github.com/rust-lang/rfcs/pull/769
This commit is contained in:
bors 2015-02-11 17:59:37 +00:00
commit e29f420255
56 changed files with 1863 additions and 136 deletions

View File

@ -42,6 +42,7 @@ use std::cell::{Cell, RefCell};
use std::cmp;
use std::intrinsics::{TyDesc, get_tydesc};
use std::intrinsics;
use std::marker;
use std::mem;
use std::num::{Int, UnsignedInt};
use std::ptr;
@ -88,27 +89,29 @@ impl Chunk {
/// than objects without destructors. This reduces overhead when initializing
/// plain-old-data (`Copy` types) and means we don't need to waste time running
/// their destructors.
pub struct Arena {
pub struct Arena<'longer_than_self> {
// The head is separated out from the list as a unbenchmarked
// microoptimization, to avoid needing to case on the list to access the
// head.
head: RefCell<Chunk>,
copy_head: RefCell<Chunk>,
chunks: RefCell<Vec<Chunk>>,
_invariant: marker::InvariantLifetime<'longer_than_self>,
}
impl Arena {
impl<'a> Arena<'a> {
/// Allocates a new Arena with 32 bytes preallocated.
pub fn new() -> Arena {
pub fn new() -> Arena<'a> {
Arena::new_with_size(32)
}
/// Allocates a new Arena with `initial_size` bytes preallocated.
pub fn new_with_size(initial_size: usize) -> Arena {
pub fn new_with_size(initial_size: usize) -> Arena<'a> {
Arena {
head: RefCell::new(chunk(initial_size, false)),
copy_head: RefCell::new(chunk(initial_size, true)),
chunks: RefCell::new(Vec::new()),
_invariant: marker::InvariantLifetime,
}
}
}
@ -122,7 +125,7 @@ fn chunk(size: usize, is_copy: bool) -> Chunk {
}
#[unsafe_destructor]
impl Drop for Arena {
impl<'longer_than_self> Drop for Arena<'longer_than_self> {
fn drop(&mut self) {
unsafe {
destroy_chunk(&*self.head.borrow());
@ -180,7 +183,7 @@ fn un_bitpack_tydesc_ptr(p: usize) -> (*const TyDesc, bool) {
((p & !1) as *const TyDesc, p & 1 == 1)
}
impl Arena {
impl<'longer_than_self> Arena<'longer_than_self> {
fn chunk_size(&self) -> usize {
self.copy_head.borrow().capacity()
}
@ -293,7 +296,7 @@ impl Arena {
/// Allocates a new item in the arena, using `op` to initialize the value,
/// and returns a reference to it.
#[inline]
pub fn alloc<T, F>(&self, op: F) -> &mut T where F: FnOnce() -> T {
pub fn alloc<T:'longer_than_self, F>(&self, op: F) -> &mut T where F: FnOnce() -> T {
unsafe {
if intrinsics::needs_drop::<T>() {
self.alloc_noncopy(op)
@ -317,20 +320,6 @@ fn test_arena_destructors() {
}
}
#[test]
fn test_arena_alloc_nested() {
struct Inner { value: usize }
struct Outer<'a> { inner: &'a Inner }
let arena = Arena::new();
let result = arena.alloc(|| Outer {
inner: arena.alloc(|| Inner { value: 10 })
});
assert_eq!(result.inner.value, 10);
}
#[test]
#[should_fail]
fn test_arena_destructors_fail() {
@ -365,6 +354,10 @@ pub struct TypedArena<T> {
/// A pointer to the first arena segment.
first: RefCell<*mut TypedArenaChunk<T>>,
/// Marker indicating that dropping the arena causes its owned
/// instances of `T` to be dropped.
_own: marker::PhantomData<T>,
}
struct TypedArenaChunk<T> {
@ -460,6 +453,7 @@ impl<T> TypedArena<T> {
ptr: Cell::new((*chunk).start() as *const T),
end: Cell::new((*chunk).end() as *const T),
first: RefCell::new(chunk),
_own: marker::PhantomData,
}
}
}
@ -523,6 +517,41 @@ mod tests {
z: i32,
}
#[test]
fn test_arena_alloc_nested() {
struct Inner { value: u8 }
struct Outer<'a> { inner: &'a Inner }
enum EI<'e> { I(Inner), O(Outer<'e>) }
struct Wrap<'a>(TypedArena<EI<'a>>);
impl<'a> Wrap<'a> {
fn alloc_inner<F:Fn() -> Inner>(&self, f: F) -> &Inner {
let r: &EI = self.0.alloc(EI::I(f()));
if let &EI::I(ref i) = r {
i
} else {
panic!("mismatch");
}
}
fn alloc_outer<F:Fn() -> Outer<'a>>(&self, f: F) -> &Outer {
let r: &EI = self.0.alloc(EI::O(f()));
if let &EI::O(ref o) = r {
o
} else {
panic!("mismatch");
}
}
}
let arena = Wrap(TypedArena::new());
let result = arena.alloc_outer(|| Outer {
inner: arena.alloc_inner(|| Inner { value: 10 }) });
assert_eq!(result.inner.value, 10);
}
#[test]
pub fn test_copy() {
let arena = TypedArena::new();

View File

@ -278,7 +278,7 @@ impl<T> Drop for RawItems<T> {
#[unsafe_destructor]
impl<K, V> Drop for Node<K, V> {
fn drop(&mut self) {
if self.keys.0.is_null() {
if self.keys.ptr.is_null() {
// We have already cleaned up this node.
return;
}
@ -292,7 +292,7 @@ impl<K, V> Drop for Node<K, V> {
self.destroy();
}
self.keys.0 = ptr::null_mut();
self.keys.ptr = ptr::null_mut();
}
}
@ -337,18 +337,18 @@ impl<K, V> Node<K, V> {
unsafe fn destroy(&mut self) {
let (alignment, size) =
calculate_allocation_generic::<K, V>(self.capacity(), self.is_leaf());
heap::deallocate(self.keys.0 as *mut u8, size, alignment);
heap::deallocate(self.keys.ptr as *mut u8, size, alignment);
}
#[inline]
pub fn as_slices<'a>(&'a self) -> (&'a [K], &'a [V]) {
unsafe {(
mem::transmute(raw::Slice {
data: self.keys.0,
data: self.keys.ptr,
len: self.len()
}),
mem::transmute(raw::Slice {
data: self.vals.0,
data: self.vals.ptr,
len: self.len()
})
)}
@ -368,7 +368,7 @@ impl<K, V> Node<K, V> {
} else {
unsafe {
mem::transmute(raw::Slice {
data: self.edges.0,
data: self.edges.ptr,
len: self.len() + 1
})
}
@ -586,7 +586,7 @@ impl <K, V> Node<K, V> {
/// If the node has any children
pub fn is_leaf(&self) -> bool {
self.edges.0.is_null()
self.edges.ptr.is_null()
}
/// if the node has too few elements
@ -1064,7 +1064,7 @@ impl<K, V> Node<K, V> {
vals: RawItems::from_slice(self.vals()),
edges: RawItems::from_slice(self.edges()),
ptr: self.keys.0 as *mut u8,
ptr: self.keys.ptr as *mut u8,
capacity: self.capacity(),
is_leaf: self.is_leaf()
},

View File

@ -57,7 +57,7 @@ use core::default::Default;
use core::fmt;
use core::hash::{self, Hash};
use core::iter::{repeat, FromIterator, IntoIterator};
use core::marker::{ContravariantLifetime, InvariantType};
use core::marker::{self, ContravariantLifetime, InvariantType};
use core::mem;
use core::nonzero::NonZero;
use core::num::{Int, UnsignedInt};
@ -140,6 +140,7 @@ pub struct Vec<T> {
ptr: NonZero<*mut T>,
len: usize,
cap: usize,
_own: marker::PhantomData<T>,
}
unsafe impl<T: Send> Send for Vec<T> { }
@ -166,7 +167,7 @@ impl<T> Vec<T> {
// non-null value which is fine since we never call deallocate on the ptr
// if cap is 0. The reason for this is because the pointer of a slice
// being NULL would break the null pointer optimization for enums.
Vec { ptr: unsafe { NonZero::new(EMPTY as *mut T) }, len: 0, cap: 0 }
unsafe { Vec::from_raw_parts(EMPTY as *mut T, 0, 0) }
}
/// Constructs a new, empty `Vec<T>` with the specified capacity.
@ -198,7 +199,7 @@ impl<T> Vec<T> {
#[stable(feature = "rust1", since = "1.0.0")]
pub fn with_capacity(capacity: usize) -> Vec<T> {
if mem::size_of::<T>() == 0 {
Vec { ptr: unsafe { NonZero::new(EMPTY as *mut T) }, len: 0, cap: usize::MAX }
unsafe { Vec::from_raw_parts(EMPTY as *mut T, 0, usize::MAX) }
} else if capacity == 0 {
Vec::new()
} else {
@ -206,7 +207,7 @@ impl<T> Vec<T> {
.expect("capacity overflow");
let ptr = unsafe { allocate(size, mem::min_align_of::<T>()) };
if ptr.is_null() { ::alloc::oom() }
Vec { ptr: unsafe { NonZero::new(ptr as *mut T) }, len: 0, cap: capacity }
unsafe { Vec::from_raw_parts(ptr as *mut T, 0, capacity) }
}
}
@ -247,7 +248,12 @@ impl<T> Vec<T> {
#[stable(feature = "rust1", since = "1.0.0")]
pub unsafe fn from_raw_parts(ptr: *mut T, length: usize,
capacity: usize) -> Vec<T> {
Vec { ptr: NonZero::new(ptr), len: length, cap: capacity }
Vec {
ptr: NonZero::new(ptr),
len: length,
cap: capacity,
_own: marker::PhantomData,
}
}
/// Creates a vector by copying the elements from a raw pointer.
@ -1626,7 +1632,7 @@ impl<T> IntoIter<T> {
for _x in self.by_ref() { }
let IntoIter { allocation, cap, ptr: _ptr, end: _end } = self;
mem::forget(self);
Vec { ptr: NonZero::new(allocation), cap: cap, len: 0 }
Vec::from_raw_parts(allocation, 0, cap)
}
}
}

View File

@ -202,6 +202,24 @@ pub unsafe trait Sync {
// Empty
}
/// A marker type that indicates to the compiler that the instances
/// of the type itself owns instances of the type parameter `T`.
///
/// This is used to indicate that one or more instances of the type
/// `T` could be dropped when instances of the type itself is dropped,
/// though that may not be apparent from the other structure of the
/// type itself. For example, the type may hold a `*mut T`, which the
/// compiler does not automatically treat as owned.
#[unstable(feature = "core",
reason = "Newly added to deal with scoping and destructor changes")]
#[lang="phantom_data"]
#[derive(PartialEq, Eq, PartialOrd, Ord)]
pub struct PhantomData<T: ?Sized>;
impl<T: ?Sized> Copy for PhantomData<T> {}
impl<T: ?Sized> Clone for PhantomData<T> {
fn clone(&self) -> PhantomData<T> { *self }
}
/// A marker type whose type parameter `T` is considered to be
/// covariant with respect to the type itself. This is (typically)

View File

@ -92,7 +92,7 @@ use mem;
use clone::Clone;
use intrinsics;
use option::Option::{self, Some, None};
use marker::{Send, Sized, Sync};
use marker::{self, Send, Sized, Sync};
use cmp::{PartialEq, Eq, Ord, PartialOrd};
use cmp::Ordering::{self, Less, Equal, Greater};
@ -522,7 +522,11 @@ impl<T> PartialOrd for *mut T {
/// Useful for building abstractions like `Vec<T>` or `Box<T>`, which
/// internally use raw pointers to manage the memory that they own.
#[unstable(feature = "core", reason = "recently added to this module")]
pub struct Unique<T: ?Sized>(pub *mut T);
pub struct Unique<T: ?Sized> {
/// The wrapped `*mut T`.
pub ptr: *mut T,
_own: marker::PhantomData<T>,
}
/// `Unique` pointers are `Send` if `T` is `Send` because the data they
/// reference is unaliased. Note that this aliasing invariant is
@ -550,6 +554,13 @@ impl<T> Unique<T> {
#[unstable(feature = "core",
reason = "recently added to this module")]
pub unsafe fn offset(self, offset: int) -> *mut T {
self.0.offset(offset)
self.ptr.offset(offset)
}
}
/// Creates a `Unique` wrapped around `ptr`, taking ownership of the
/// data referenced by `ptr`.
#[allow(non_snake_case)]
pub fn Unique<T: ?Sized>(ptr: *mut T) -> Unique<T> {
Unique { ptr: ptr, _own: marker::PhantomData }
}

View File

@ -172,7 +172,7 @@ fn test_set_memory() {
fn test_unsized_unique() {
let xs: &mut [_] = &mut [1, 2, 3];
let ptr = Unique(xs as *mut [_]);
let ys = unsafe { &mut *ptr.0 };
let ys = unsafe { &mut *ptr.ptr };
let zs: &mut [_] = &mut [1, 2, 3];
assert!(ys == zs);
}

View File

@ -45,13 +45,13 @@ pub struct Bytes {
impl Deref for Bytes {
type Target = [u8];
fn deref(&self) -> &[u8] {
unsafe { slice::from_raw_parts_mut(self.ptr.0, self.len) }
unsafe { slice::from_raw_parts_mut(self.ptr.ptr, self.len) }
}
}
impl Drop for Bytes {
fn drop(&mut self) {
unsafe { libc::free(self.ptr.0 as *mut _); }
unsafe { libc::free(self.ptr.ptr as *mut _); }
}
}

View File

@ -349,7 +349,7 @@ fn parse_region_<F>(st: &mut PState, conv: &mut F) -> ty::Region where
}
'f' => {
assert_eq!(next(st), '[');
let scope = parse_scope(st);
let scope = parse_destruction_scope_data(st);
assert_eq!(next(st), '|');
let br = parse_bound_region_(st, conv);
assert_eq!(next(st), ']');
@ -377,6 +377,10 @@ fn parse_scope(st: &mut PState) -> region::CodeExtent {
let node_id = parse_uint(st) as ast::NodeId;
region::CodeExtent::Misc(node_id)
}
'D' => {
let node_id = parse_uint(st) as ast::NodeId;
region::CodeExtent::DestructionScope(node_id)
}
'B' => {
let node_id = parse_uint(st) as ast::NodeId;
let first_stmt_index = parse_uint(st);
@ -389,6 +393,11 @@ fn parse_scope(st: &mut PState) -> region::CodeExtent {
}
}
fn parse_destruction_scope_data(st: &mut PState) -> region::DestructionScopeData {
let node_id = parse_uint(st) as ast::NodeId;
region::DestructionScopeData::new(node_id)
}
fn parse_opt<'a, 'tcx, T, F>(st: &mut PState<'a, 'tcx>, f: F) -> Option<T> where
F: FnOnce(&mut PState<'a, 'tcx>) -> T,
{

View File

@ -251,7 +251,7 @@ pub fn enc_region(w: &mut SeekableMemWriter, cx: &ctxt, r: ty::Region) {
}
ty::ReFree(ref fr) => {
mywrite!(w, "f[");
enc_scope(w, cx, fr.scope);
enc_destruction_scope_data(w, fr.scope);
mywrite!(w, "|");
enc_bound_region(w, cx, fr.bound_region);
mywrite!(w, "]");
@ -279,9 +279,15 @@ fn enc_scope(w: &mut SeekableMemWriter, _cx: &ctxt, scope: region::CodeExtent) {
region::CodeExtent::Misc(node_id) => mywrite!(w, "M{}", node_id),
region::CodeExtent::Remainder(region::BlockRemainder {
block: b, first_statement_index: i }) => mywrite!(w, "B{}{}", b, i),
region::CodeExtent::DestructionScope(node_id) => mywrite!(w, "D{}", node_id),
}
}
fn enc_destruction_scope_data(w: &mut SeekableMemWriter,
d: region::DestructionScopeData) {
mywrite!(w, "{}", d.node_id);
}
fn enc_bound_region(w: &mut SeekableMemWriter, cx: &ctxt, br: ty::BoundRegion) {
match br {
ty::BrAnon(idx) => {

View File

@ -499,6 +499,12 @@ impl tr for region::CodeExtent {
}
}
impl tr for region::DestructionScopeData {
fn tr(&self, dcx: &DecodeContext) -> region::DestructionScopeData {
region::DestructionScopeData { node_id: dcx.tr_id(self.node_id) }
}
}
impl tr for ty::BoundRegion {
fn tr(&self, dcx: &DecodeContext) -> ty::BoundRegion {
match *self {

View File

@ -242,7 +242,7 @@ impl<'a, 'tcx> ErrorReporting<'tcx> for InferCtxt<'a, 'tcx> {
}
}
SubSupConflict(var_origin, _, sub_r, _, sup_r) => {
debug!("processing SubSupConflict");
debug!("processing SubSupConflict sub: {:?} sup: {:?}", sub_r, sup_r);
match free_regions_from_same_fn(self.tcx, sub_r, sup_r) {
Some(ref same_frs) => {
var_origins.push(var_origin);
@ -304,7 +304,7 @@ impl<'a, 'tcx> ErrorReporting<'tcx> for InferCtxt<'a, 'tcx> {
return None
}
assert!(fr1.scope == fr2.scope);
(fr1.scope.node_id(), fr1, fr2)
(fr1.scope.node_id, fr1, fr2)
},
_ => return None
};
@ -709,6 +709,23 @@ impl<'a, 'tcx> ErrorReporting<'tcx> for InferCtxt<'a, 'tcx> {
sup,
"");
}
infer::SafeDestructor(span) => {
self.tcx.sess.span_err(
span,
"unsafe use of destructor: destructor might be called \
while references are dead");
// FIXME (22171): terms "super/subregion" are suboptimal
note_and_explain_region(
self.tcx,
"superregion: ",
sup,
"");
note_and_explain_region(
self.tcx,
"subregion: ",
sub,
"");
}
infer::BindingTypeIsNotValidAtDecl(span) => {
self.tcx.sess.span_err(
span,
@ -1629,6 +1646,12 @@ impl<'a, 'tcx> ErrorReportingHelpers<'tcx> for InferCtxt<'a, 'tcx> {
&format!("...so that the declared lifetime parameter bounds \
are satisfied")[]);
}
infer::SafeDestructor(span) => {
self.tcx.sess.span_note(
span,
"...so that references are valid when the destructor \
runs")
}
}
}
}

View File

@ -215,6 +215,9 @@ pub enum SubregionOrigin<'tcx> {
// An auto-borrow that does not enclose the expr where it occurs
AutoBorrow(Span),
// Region constraint arriving from destructor safety
SafeDestructor(Span),
}
/// Times when we replace late-bound regions with variables:
@ -1197,6 +1200,7 @@ impl<'tcx> SubregionOrigin<'tcx> {
CallReturn(a) => a,
AddrOf(a) => a,
AutoBorrow(a) => a,
SafeDestructor(a) => a,
}
}
}
@ -1259,6 +1263,7 @@ impl<'tcx> Repr<'tcx> for SubregionOrigin<'tcx> {
CallReturn(a) => format!("CallReturn({})", a.repr(tcx)),
AddrOf(a) => format!("AddrOf({})", a.repr(tcx)),
AutoBorrow(a) => format!("AutoBorrow({})", a.repr(tcx)),
SafeDestructor(a) => format!("SafeDestructor({})", a.repr(tcx)),
}
}
}

View File

@ -760,11 +760,12 @@ impl<'a, 'tcx> RegionVarBindings<'a, 'tcx> {
// A "free" region can be interpreted as "some region
// at least as big as the block fr.scope_id". So, we can
// reasonably compare free regions and scopes:
match self.tcx.region_maps.nearest_common_ancestor(fr.scope, s_id) {
let fr_scope = fr.scope.to_code_extent();
match self.tcx.region_maps.nearest_common_ancestor(fr_scope, s_id) {
// if the free region's scope `fr.scope_id` is bigger than
// the scope region `s_id`, then the LUB is the free
// region itself:
Some(r_id) if r_id == fr.scope => f,
Some(r_id) if r_id == fr_scope => f,
// otherwise, we don't know what the free region is,
// so we must conservatively say the LUB is static:
@ -865,8 +866,9 @@ impl<'a, 'tcx> RegionVarBindings<'a, 'tcx> {
// than the scope `s_id`, then we can say that the GLB
// is the scope `s_id`. Otherwise, as we do not know
// big the free region is precisely, the GLB is undefined.
match self.tcx.region_maps.nearest_common_ancestor(fr.scope, s_id) {
Some(r_id) if r_id == fr.scope => Ok(s),
let fr_scope = fr.scope.to_code_extent();
match self.tcx.region_maps.nearest_common_ancestor(fr_scope, s_id) {
Some(r_id) if r_id == fr_scope => Ok(s),
_ => Err(ty::terr_regions_no_overlap(b, a))
}
}
@ -915,7 +917,8 @@ impl<'a, 'tcx> RegionVarBindings<'a, 'tcx> {
Ok(ty::ReFree(*b))
} else {
this.intersect_scopes(ty::ReFree(*a), ty::ReFree(*b),
a.scope, b.scope)
a.scope.to_code_extent(),
b.scope.to_code_extent())
}
}
}
@ -1396,6 +1399,8 @@ impl<'a, 'tcx> RegionVarBindings<'a, 'tcx> {
for upper_bound in &upper_bounds {
if !self.is_subregion_of(lower_bound.region,
upper_bound.region) {
debug!("pushing SubSupConflict sub: {:?} sup: {:?}",
lower_bound.region, upper_bound.region);
errors.push(SubSupConflict(
(*self.var_origins.borrow())[node_idx.index as uint].clone(),
lower_bound.origin.clone(),

View File

@ -312,6 +312,8 @@ lets_do_this! {
ExchangeHeapLangItem, "exchange_heap", exchange_heap;
OwnedBoxLangItem, "owned_box", owned_box;
PhantomDataItem, "phantom_data", phantom_data;
CovariantTypeItem, "covariant_type", covariant_type;
ContravariantTypeItem, "contravariant_type", contravariant_type;
InvariantTypeItem, "invariant_type", invariant_type;

View File

@ -112,7 +112,7 @@ use self::VarKind::*;
use middle::def::*;
use middle::mem_categorization::Typer;
use middle::pat_util;
use middle::region::CodeExtent;
use middle::region;
use middle::ty;
use middle::ty::ClosureTyper;
use lint;
@ -1514,7 +1514,7 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> {
let fn_ret =
ty::liberate_late_bound_regions(
self.ir.tcx,
CodeExtent::from_node_id(body.id),
region::DestructionScopeData::new(body.id),
&self.fn_ret(id));
match fn_ret {

View File

@ -760,7 +760,10 @@ impl<'t,'tcx,TYPER:Typer<'tcx>> MemCategorizationContext<'t,TYPER> {
// Region of environment pointer
let env_region = ty::ReFree(ty::FreeRegion {
scope: region::CodeExtent::from_node_id(fn_body_id),
// The environment of a closure is guaranteed to
// outlive any bindings introduced in the body of the
// closure itself.
scope: region::DestructionScopeData::new(fn_body_id),
bound_region: ty::BrEnv
});

View File

@ -27,11 +27,66 @@ use syntax::{ast, visit};
use syntax::ast::{Block, Item, FnDecl, NodeId, Arm, Pat, Stmt, Expr, Local};
use syntax::ast_util::{stmt_id};
use syntax::ast_map;
use syntax::ptr::P;
use syntax::visit::{Visitor, FnKind};
/// CodeExtent represents a statically-describable extent that can be
/// used to bound the lifetime/region for values.
///
/// `Misc(node_id)`: Any AST node that has any extent at all has the
/// `Misc(node_id)` extent. Other variants represent special cases not
/// immediately derivable from the abstract syntax tree structure.
///
/// `DestructionScope(node_id)` represents the extent of destructors
/// implicitly-attached to `node_id` that run immediately after the
/// expression for `node_id` itself. Not every AST node carries a
/// `DestructionScope`, but those that are `terminating_scopes` do;
/// see discussion with `RegionMaps`.
///
/// `Remainder(BlockRemainder { block, statement_index })` represents
/// the extent of user code running immediately after the initializer
/// expression for the indexed statement, until the end of the block.
///
/// So: the following code can be broken down into the extents beneath:
/// ```
/// let a = f().g( 'b: { let x = d(); let y = d(); x.h(y) } ) ;
/// ```
///
/// +-+ (D12.)
/// +-+ (D11.)
/// +---------+ (R10.)
/// +-+ (D9.)
/// +----------+ (M8.)
/// +----------------------+ (R7.)
/// +-+ (D6.)
/// +----------+ (M5.)
/// +-----------------------------------+ (M4.)
/// +--------------------------------------------------+ (M3.)
/// +--+ (M2.)
/// +-----------------------------------------------------------+ (M1.)
///
/// (M1.): Misc extent of the whole `let a = ...;` statement.
/// (M2.): Misc extent of the `f()` expression.
/// (M3.): Misc extent of the `f().g(..)` expression.
/// (M4.): Misc extent of the block labelled `'b:`.
/// (M5.): Misc extent of the `let x = d();` statement
/// (D6.): DestructionScope for temporaries created during M5.
/// (R7.): Remainder extent for block `'b:`, stmt 0 (let x = ...).
/// (M8.): Misc Extent of the `let y = d();` statement.
/// (D9.): DestructionScope for temporaries created during M8.
/// (R10.): Remainder extent for block `'b:`, stmt 1 (let y = ...).
/// (D11.): DestructionScope for temporaries and bindings from block `'b:`.
/// (D12.): DestructionScope for temporaries created during M1 (e.g. f()).
///
/// Note that while the above picture shows the destruction scopes
/// as following their corresponding misc extents, in the internal
/// data structures of the compiler the destruction scopes are
/// represented as enclosing parents. This is sound because we use the
/// enclosing parent relationship just to ensure that referenced
/// values live long enough; phrased another way, the starting point
/// of each range is not really the important thing in the above
/// picture, but rather the ending point.
///
/// FIXME (pnkfelix): This currently derives `PartialOrd` and `Ord` to
/// placate the same deriving in `ty::FreeRegion`, but we may want to
/// actually attach a more meaningful ordering to scopes than the one
@ -40,7 +95,24 @@ use syntax::visit::{Visitor, FnKind};
RustcDecodable, Debug, Copy)]
pub enum CodeExtent {
Misc(ast::NodeId),
Remainder(BlockRemainder),
DestructionScope(ast::NodeId), // extent of destructors for temporaries of node-id
Remainder(BlockRemainder)
}
/// extent of destructors for temporaries of node-id
#[derive(Clone, PartialEq, PartialOrd, Eq, Ord, Hash, RustcEncodable,
RustcDecodable, Debug, Copy)]
pub struct DestructionScopeData {
pub node_id: ast::NodeId
}
impl DestructionScopeData {
pub fn new(node_id: ast::NodeId) -> DestructionScopeData {
DestructionScopeData { node_id: node_id }
}
pub fn to_code_extent(&self) -> CodeExtent {
CodeExtent::DestructionScope(self.node_id)
}
}
/// Represents a subscope of `block` for a binding that is introduced
@ -82,6 +154,7 @@ impl CodeExtent {
match *self {
CodeExtent::Misc(node_id) => node_id,
CodeExtent::Remainder(br) => br.block,
CodeExtent::DestructionScope(node_id) => node_id,
}
}
@ -95,6 +168,8 @@ impl CodeExtent {
CodeExtent::Remainder(br) =>
CodeExtent::Remainder(BlockRemainder {
block: f_id(br.block), first_statement_index: br.first_statement_index }),
CodeExtent::DestructionScope(node_id) =>
CodeExtent::DestructionScope(f_id(node_id)),
}
}
@ -105,7 +180,8 @@ impl CodeExtent {
match ast_map.find(self.node_id()) {
Some(ast_map::NodeBlock(ref blk)) => {
match *self {
CodeExtent::Misc(_) => Some(blk.span),
CodeExtent::Misc(_) |
CodeExtent::DestructionScope(_) => Some(blk.span),
CodeExtent::Remainder(r) => {
assert_eq!(r.block, blk.id);
@ -455,7 +531,7 @@ impl RegionMaps {
}
(ty::ReScope(sub_scope), ty::ReFree(ref fr)) => {
self.is_subscope_of(sub_scope, fr.scope)
self.is_subscope_of(sub_scope, fr.scope.to_code_extent())
}
(ty::ReFree(sub_fr), ty::ReFree(super_fr)) => {
@ -567,7 +643,18 @@ fn resolve_block(visitor: &mut RegionResolutionVisitor, blk: &ast::Block) {
let prev_cx = visitor.cx;
let blk_scope = CodeExtent::Misc(blk.id);
record_superlifetime(visitor, blk_scope, blk.span);
// If block was previously marked as a terminating scope during
// the recursive visit of its parent node in the AST, then we need
// to account for the destruction scope representing the extent of
// the destructors that run immediately after the the block itself
// completes.
if visitor.region_maps.terminating_scopes.borrow().contains(&blk_scope) {
let dtor_scope = CodeExtent::DestructionScope(blk.id);
record_superlifetime(visitor, dtor_scope, blk.span);
visitor.region_maps.record_encl_scope(blk_scope, dtor_scope);
} else {
record_superlifetime(visitor, blk_scope, blk.span);
}
// We treat the tail expression in the block (if any) somewhat
// differently from the statements. The issue has to do with
@ -675,7 +762,9 @@ fn resolve_stmt(visitor: &mut RegionResolutionVisitor, stmt: &ast::Stmt) {
// statement plus its destructors, and thus the extent for which
// regions referenced by the destructors need to survive.
visitor.region_maps.mark_as_terminating_scope(stmt_scope);
record_superlifetime(visitor, stmt_scope, stmt.span);
let dtor_scope = CodeExtent::DestructionScope(stmt_id);
visitor.region_maps.record_encl_scope(stmt_scope, dtor_scope);
record_superlifetime(visitor, dtor_scope, stmt.span);
let prev_parent = visitor.cx.parent;
visitor.cx.parent = InnermostEnclosingExpr::Some(stmt_id);
@ -687,15 +776,30 @@ fn resolve_expr(visitor: &mut RegionResolutionVisitor, expr: &ast::Expr) {
debug!("resolve_expr(expr.id={:?})", expr.id);
let expr_scope = CodeExtent::Misc(expr.id);
record_superlifetime(visitor, expr_scope, expr.span);
// If expr was previously marked as a terminating scope during the
// recursive visit of its parent node in the AST, then we need to
// account for the destruction scope representing the extent of
// the destructors that run immediately after the the expression
// itself completes.
if visitor.region_maps.terminating_scopes.borrow().contains(&expr_scope) {
let dtor_scope = CodeExtent::DestructionScope(expr.id);
record_superlifetime(visitor, dtor_scope, expr.span);
visitor.region_maps.record_encl_scope(expr_scope, dtor_scope);
} else {
record_superlifetime(visitor, expr_scope, expr.span);
}
let prev_cx = visitor.cx;
visitor.cx.parent = InnermostEnclosingExpr::Some(expr.id);
{
let region_maps = &mut visitor.region_maps;
let terminating = |id| {
let scope = CodeExtent::from_node_id(id);
let terminating = |e: &P<ast::Expr>| {
let scope = CodeExtent::from_node_id(e.id);
region_maps.mark_as_terminating_scope(scope)
};
let terminating_block = |b: &P<ast::Block>| {
let scope = CodeExtent::from_node_id(b.id);
region_maps.mark_as_terminating_scope(scope)
};
match expr.node {
@ -707,26 +811,26 @@ fn resolve_expr(visitor: &mut RegionResolutionVisitor, expr: &ast::Expr) {
ast::ExprBinary(codemap::Spanned { node: ast::BiOr, .. }, _, ref r) => {
// For shortcircuiting operators, mark the RHS as a terminating
// scope since it only executes conditionally.
terminating(r.id);
terminating(r);
}
ast::ExprIf(_, ref then, Some(ref otherwise)) => {
terminating(then.id);
terminating(otherwise.id);
terminating_block(then);
terminating(otherwise);
}
ast::ExprIf(ref expr, ref then, None) => {
terminating(expr.id);
terminating(then.id);
terminating(expr);
terminating_block(then);
}
ast::ExprLoop(ref body, _) => {
terminating(body.id);
terminating_block(body);
}
ast::ExprWhile(ref expr, ref body, _) => {
terminating(expr.id);
terminating(body.id);
terminating(expr);
terminating_block(body);
}
ast::ExprMatch(..) => {
@ -1021,6 +1125,9 @@ fn resolve_fn(visitor: &mut RegionResolutionVisitor,
let body_scope = CodeExtent::from_node_id(body.id);
visitor.region_maps.mark_as_terminating_scope(body_scope);
let dtor_scope = CodeExtent::DestructionScope(body.id);
visitor.region_maps.record_encl_scope(body_scope, dtor_scope);
record_superlifetime(visitor, dtor_scope, body.span);
let outer_cx = visitor.cx;

View File

@ -41,7 +41,7 @@ pub enum DefRegion {
/* lifetime decl */ ast::NodeId),
DefLateBoundRegion(ty::DebruijnIndex,
/* lifetime decl */ ast::NodeId),
DefFreeRegion(/* block scope */ region::CodeExtent,
DefFreeRegion(/* block scope */ region::DestructionScopeData,
/* lifetime decl */ ast::NodeId),
}
@ -81,7 +81,7 @@ enum ScopeChain<'a> {
LateScope(&'a Vec<ast::LifetimeDef>, Scope<'a>),
/// lifetimes introduced by items within a code block are scoped
/// to that block.
BlockScope(region::CodeExtent, Scope<'a>),
BlockScope(region::DestructionScopeData, Scope<'a>),
RootScope
}
@ -191,7 +191,8 @@ impl<'a, 'v> Visitor<'v> for LifetimeContext<'a> {
}
fn visit_block(&mut self, b: &ast::Block) {
self.with(BlockScope(region::CodeExtent::from_node_id(b.id), self.scope),
self.with(BlockScope(region::DestructionScopeData::new(b.id),
self.scope),
|_, this| visit::walk_block(this, b));
}
@ -393,9 +394,13 @@ impl<'a> LifetimeContext<'a> {
}
fn resolve_free_lifetime_ref(&mut self,
scope_data: region::CodeExtent,
scope_data: region::DestructionScopeData,
lifetime_ref: &ast::Lifetime,
scope: Scope) {
debug!("resolve_free_lifetime_ref \
scope_data: {:?} lifetime_ref: {:?} scope: {:?}",
scope_data, lifetime_ref, scope);
// Walk up the scope chain, tracking the outermost free scope,
// until we encounter a scope that contains the named lifetime
// or we run out of scopes.
@ -403,6 +408,9 @@ impl<'a> LifetimeContext<'a> {
let mut scope = scope;
let mut search_result = None;
loop {
debug!("resolve_free_lifetime_ref \
scope_data: {:?} scope: {:?} search_result: {:?}",
scope_data, scope, search_result);
match *scope {
BlockScope(blk_scope_data, s) => {
scope_data = blk_scope_data;

View File

@ -72,6 +72,8 @@ use std::cell::{Cell, RefCell};
use std::cmp;
use std::fmt;
use std::hash::{Hash, Writer, SipHasher, Hasher};
#[cfg(stage0)]
use std::marker;
use std::mem;
use std::ops;
use std::rc::Rc;
@ -931,6 +933,26 @@ pub struct TyS<'tcx> {
// the maximal depth of any bound regions appearing in this type.
region_depth: u32,
// force the lifetime to be invariant to work-around
// region-inference issues with a covariant lifetime.
#[cfg(stage0)]
marker: ShowInvariantLifetime<'tcx>,
}
#[cfg(stage0)]
struct ShowInvariantLifetime<'a>(marker::InvariantLifetime<'a>);
#[cfg(stage0)]
impl<'a> ShowInvariantLifetime<'a> {
fn new() -> ShowInvariantLifetime<'a> {
ShowInvariantLifetime(marker::InvariantLifetime)
}
}
#[cfg(stage0)]
impl<'a> fmt::Debug for ShowInvariantLifetime<'a> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "InvariantLifetime")
}
}
impl fmt::Debug for TypeFlags {
@ -939,9 +961,18 @@ impl fmt::Debug for TypeFlags {
}
}
#[cfg(stage0)]
impl<'tcx> PartialEq for TyS<'tcx> {
fn eq<'a,'b>(&'a self, other: &'b TyS<'tcx>) -> bool {
let other: &'a TyS<'tcx> = unsafe { mem::transmute(other) };
(self as *const _) == (other as *const _)
}
}
#[cfg(not(stage0))]
impl<'tcx> PartialEq for TyS<'tcx> {
fn eq(&self, other: &TyS<'tcx>) -> bool {
(self as *const _) == (other as *const _)
// (self as *const _) == (other as *const _)
(self as *const TyS<'tcx>) == (other as *const TyS<'tcx>)
}
}
impl<'tcx> Eq for TyS<'tcx> {}
@ -1174,7 +1205,9 @@ pub enum Region {
/// region parameters.
ReFree(FreeRegion),
/// A concrete region naming some expression within the current function.
/// A concrete region naming some statically determined extent
/// (e.g. an expression or sequence of statements) within the
/// current function.
ReScope(region::CodeExtent),
/// Static data that has an "infinite" lifetime. Top in the region lattice.
@ -1296,7 +1329,7 @@ impl Region {
/// A "free" region `fr` can be interpreted as "some region
/// at least as big as the scope `fr.scope`".
pub struct FreeRegion {
pub scope: region::CodeExtent,
pub scope: region::DestructionScopeData,
pub bound_region: BoundRegion
}
@ -2473,11 +2506,17 @@ fn intern_ty<'tcx>(type_arena: &'tcx TypedArena<TyS<'tcx>>,
let flags = FlagComputation::for_sty(&st);
let ty = type_arena.alloc(TyS {
sty: st,
flags: flags.flags,
region_depth: flags.depth,
});
let ty = match () {
#[cfg(stage0)]
() => type_arena.alloc(TyS { sty: st,
flags: flags.flags,
region_depth: flags.depth,
marker: ShowInvariantLifetime::new(), }),
#[cfg(not(stage0))]
() => type_arena.alloc(TyS { sty: st,
flags: flags.flags,
region_depth: flags.depth, }),
};
debug!("Interned type: {:?} Pointer: {:?}",
ty, ty as *const _);
@ -4192,12 +4231,16 @@ pub fn ty_region(tcx: &ctxt,
}
}
pub fn free_region_from_def(free_id: ast::NodeId, def: &RegionParameterDef)
pub fn free_region_from_def(outlives_extent: region::DestructionScopeData,
def: &RegionParameterDef)
-> ty::Region
{
ty::ReFree(ty::FreeRegion { scope: region::CodeExtent::from_node_id(free_id),
bound_region: ty::BrNamed(def.def_id,
def.name) })
let ret =
ty::ReFree(ty::FreeRegion { scope: outlives_extent,
bound_region: ty::BrNamed(def.def_id,
def.name) });
debug!("free_region_from_def returns {:?}", ret);
ret
}
// Returns the type of a pattern as a monotype. Like @expr_ty, this function
@ -6252,9 +6295,11 @@ pub fn construct_free_substs<'a,'tcx>(
let mut types = VecPerParamSpace::empty();
push_types_from_defs(tcx, &mut types, generics.types.as_slice());
let free_id_outlive = region::DestructionScopeData::new(free_id);
// map bound 'a => free 'a
let mut regions = VecPerParamSpace::empty();
push_region_params(&mut regions, free_id, generics.regions.as_slice());
push_region_params(&mut regions, free_id_outlive, generics.regions.as_slice());
return Substs {
types: types,
@ -6262,11 +6307,11 @@ pub fn construct_free_substs<'a,'tcx>(
};
fn push_region_params(regions: &mut VecPerParamSpace<ty::Region>,
free_id: ast::NodeId,
all_outlive_extent: region::DestructionScopeData,
region_params: &[RegionParameterDef])
{
for r in region_params {
regions.push(r.space, ty::free_region_from_def(free_id, r));
regions.push(r.space, ty::free_region_from_def(all_outlive_extent, r));
}
}
@ -6295,14 +6340,14 @@ pub fn construct_parameter_environment<'a,'tcx>(
//
let free_substs = construct_free_substs(tcx, generics, free_id);
let free_id_scope = region::CodeExtent::from_node_id(free_id);
let free_id_outlive = region::DestructionScopeData::new(free_id);
//
// Compute the bounds on Self and the type parameters.
//
let bounds = generics.to_bounds(tcx, &free_substs);
let bounds = liberate_late_bound_regions(tcx, free_id_scope, &ty::Binder(bounds));
let bounds = liberate_late_bound_regions(tcx, free_id_outlive, &ty::Binder(bounds));
let predicates = bounds.predicates.into_vec();
//
@ -6335,7 +6380,7 @@ pub fn construct_parameter_environment<'a,'tcx>(
let unnormalized_env = ty::ParameterEnvironment {
tcx: tcx,
free_substs: free_substs,
implicit_region_bound: ty::ReScope(free_id_scope),
implicit_region_bound: ty::ReScope(free_id_outlive.to_code_extent()),
caller_bounds: predicates,
selection_cache: traits::SelectionCache::new(),
};
@ -6603,14 +6648,14 @@ impl<'tcx> AutoDerefRef<'tcx> {
/// `scope_id`.
pub fn liberate_late_bound_regions<'tcx, T>(
tcx: &ty::ctxt<'tcx>,
scope: region::CodeExtent,
all_outlive_scope: region::DestructionScopeData,
value: &Binder<T>)
-> T
where T : TypeFoldable<'tcx> + Repr<'tcx>
{
replace_late_bound_regions(
tcx, value,
|br| ty::ReFree(ty::FreeRegion{scope: scope, bound_region: br})).0
|br| ty::ReFree(ty::FreeRegion{scope: all_outlive_scope, bound_region: br})).0
}
pub fn count_late_bound_regions<'tcx, T>(

View File

@ -113,6 +113,10 @@ pub fn explain_region_and_span(cx: &ctxt, region: ty::Region)
};
let scope_decorated_tag = match scope {
region::CodeExtent::Misc(_) => tag,
region::CodeExtent::DestructionScope(_) => {
new_string = format!("destruction scope surrounding {}", tag);
new_string.as_slice()
}
region::CodeExtent::Remainder(r) => {
new_string = format!("block suffix following statement {}",
r.first_statement_index);
@ -135,7 +139,7 @@ pub fn explain_region_and_span(cx: &ctxt, region: ty::Region)
}
};
match cx.map.find(fr.scope.node_id()) {
match cx.map.find(fr.scope.node_id) {
Some(ast_map::NodeBlock(ref blk)) => {
let (msg, opt_span) = explain_span(cx, "block", blk.span);
(format!("{} {}", prefix, msg), opt_span)
@ -921,7 +925,7 @@ impl<'tcx> UserString<'tcx> for ty::Region {
impl<'tcx> Repr<'tcx> for ty::FreeRegion {
fn repr(&self, tcx: &ctxt) -> String {
format!("ReFree({}, {})",
self.scope.node_id(),
self.scope.repr(tcx),
self.bound_region.repr(tcx))
}
}
@ -931,12 +935,23 @@ impl<'tcx> Repr<'tcx> for region::CodeExtent {
match *self {
region::CodeExtent::Misc(node_id) =>
format!("Misc({})", node_id),
region::CodeExtent::DestructionScope(node_id) =>
format!("DestructionScope({})", node_id),
region::CodeExtent::Remainder(rem) =>
format!("Remainder({}, {})", rem.block, rem.first_statement_index),
}
}
}
impl<'tcx> Repr<'tcx> for region::DestructionScopeData {
fn repr(&self, _tcx: &ctxt) -> String {
match *self {
region::DestructionScopeData{ node_id } =>
format!("DestructionScopeData {{ node_id: {} }}", node_id),
}
}
}
impl<'tcx> Repr<'tcx> for ast::DefId {
fn repr(&self, tcx: &ctxt) -> String {
// Unfortunately, there seems to be no way to attempt to print

View File

@ -286,7 +286,7 @@ impl<'a, 'tcx> GatherLoanCtxt<'a, 'tcx> {
let loan_scope = match loan_region {
ty::ReScope(scope) => scope,
ty::ReFree(ref fr) => fr.scope,
ty::ReFree(ref fr) => fr.scope.to_code_extent(),
ty::ReStatic => {
// If we get here, an error must have been

View File

@ -15,7 +15,7 @@ use diagnostic::Emitter;
use driver;
use rustc_resolve as resolve;
use rustc_typeck::middle::lang_items;
use rustc_typeck::middle::region::{self, CodeExtent};
use rustc_typeck::middle::region::{self, CodeExtent, DestructionScopeData};
use rustc_typeck::middle::resolve_lifetime;
use rustc_typeck::middle::stability;
use rustc_typeck::middle::subst;
@ -325,7 +325,7 @@ impl<'a, 'tcx> Env<'a, 'tcx> {
}
pub fn re_free(&self, nid: ast::NodeId, id: u32) -> ty::Region {
ty::ReFree(ty::FreeRegion { scope: CodeExtent::from_node_id(nid),
ty::ReFree(ty::FreeRegion { scope: DestructionScopeData::new(nid),
bound_region: ty::BrAnon(id)})
}

View File

@ -2196,7 +2196,9 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
// Search for external modules.
if namespace == TypeNS {
if let Some(module) = module_.external_module_children.borrow().get(&name).cloned() {
// FIXME (21114): In principle unclear `child` *has* to be lifted.
let child = module_.external_module_children.borrow().get(&name).cloned();
if let Some(module) = child {
let name_bindings =
Rc::new(Resolver::create_name_bindings_from_module(module));
debug!("lower name bindings succeeded");
@ -2481,7 +2483,9 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
// Finally, search through external children.
if namespace == TypeNS {
if let Some(module) = module_.external_module_children.borrow().get(&name).cloned() {
// FIXME (21114): In principle unclear `child` *has* to be lifted.
let child = module_.external_module_children.borrow().get(&name).cloned();
if let Some(module) = child {
let name_bindings =
Rc::new(Resolver::create_name_bindings_from_module(module));
return Success((Target::new(module_,

View File

@ -130,12 +130,17 @@ impl<'blk, 'tcx> CleanupMethods<'blk, 'tcx> for FunctionContext<'blk, 'tcx> {
// this new AST scope had better be its immediate child.
let top_scope = self.top_ast_scope();
if top_scope.is_some() {
assert_eq!(self.ccx
.tcx()
.region_maps
.opt_encl_scope(region::CodeExtent::from_node_id(debug_loc.id))
.map(|s|s.node_id()),
top_scope);
assert!((self.ccx
.tcx()
.region_maps
.opt_encl_scope(region::CodeExtent::from_node_id(debug_loc.id))
.map(|s|s.node_id()) == top_scope)
||
(self.ccx
.tcx()
.region_maps
.opt_encl_scope(region::CodeExtent::DestructionScope(debug_loc.id))
.map(|s|s.node_id()) == top_scope));
}
self.push_scope(CleanupScope::new(AstScopeKind(debug_loc.id),

View File

@ -13,7 +13,7 @@
use super::{check_fn, Expectation, FnCtxt};
use astconv;
use middle::region::CodeExtent;
use middle::region;
use middle::subst;
use middle::ty::{self, ToPolyTraitRef, Ty};
use rscope::RegionScope;
@ -78,7 +78,9 @@ fn check_closure<'a,'tcx>(fcx: &FnCtxt<'a,'tcx>,
fcx.write_ty(expr.id, closure_type);
let fn_sig =
ty::liberate_late_bound_regions(fcx.tcx(), CodeExtent::from_node_id(body.id), &fn_ty.sig);
ty::liberate_late_bound_regions(fcx.tcx(),
region::DestructionScopeData::new(body.id),
&fn_ty.sig);
check_fn(fcx.ccx,
ast::Unsafety::Normal,

View File

@ -0,0 +1,292 @@
// Copyright 2014-2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use check::regionck::{self, Rcx};
use middle::infer;
use middle::region;
use middle::subst;
use middle::ty::{self, Ty};
use util::ppaux::{Repr};
use syntax::codemap::Span;
pub fn check_safety_of_destructor_if_necessary<'a, 'tcx>(rcx: &mut Rcx<'a, 'tcx>,
typ: ty::Ty<'tcx>,
span: Span,
scope: region::CodeExtent) {
debug!("check_safety_of_destructor_if_necessary typ: {} scope: {:?}",
typ.repr(rcx.tcx()), scope);
// types that have been traversed so far by `traverse_type_if_unseen`
let mut breadcrumbs: Vec<Ty<'tcx>> = Vec::new();
iterate_over_potentially_unsafe_regions_in_type(
rcx,
&mut breadcrumbs,
typ,
span,
scope,
0);
}
fn iterate_over_potentially_unsafe_regions_in_type<'a, 'tcx>(
rcx: &mut Rcx<'a, 'tcx>,
breadcrumbs: &mut Vec<Ty<'tcx>>,
ty_root: ty::Ty<'tcx>,
span: Span,
scope: region::CodeExtent,
depth: uint)
{
let origin = |&:| infer::SubregionOrigin::SafeDestructor(span);
let mut walker = ty_root.walk();
let opt_phantom_data_def_id = rcx.tcx().lang_items.phantom_data();
let destructor_for_type = rcx.tcx().destructor_for_type.borrow();
while let Some(typ) = walker.next() {
// Avoid recursing forever.
if breadcrumbs.contains(&typ) {
continue;
}
breadcrumbs.push(typ);
// If we encounter `PhantomData<T>`, then we should replace it
// with `T`, the type it represents as owned by the
// surrounding context, before doing further analysis.
let typ = if let ty::ty_struct(struct_did, substs) = typ.sty {
if opt_phantom_data_def_id == Some(struct_did) {
let item_type = ty::lookup_item_type(rcx.tcx(), struct_did);
let tp_def = item_type.generics.types
.opt_get(subst::TypeSpace, 0).unwrap();
let new_typ = substs.type_for_def(tp_def);
debug!("replacing phantom {} with {}",
typ.repr(rcx.tcx()), new_typ.repr(rcx.tcx()));
new_typ
} else {
typ
}
} else {
typ
};
let opt_type_did = match typ.sty {
ty::ty_struct(struct_did, _) => Some(struct_did),
ty::ty_enum(enum_did, _) => Some(enum_did),
_ => None,
};
let opt_dtor =
opt_type_did.and_then(|did| destructor_for_type.get(&did));
debug!("iterate_over_potentially_unsafe_regions_in_type \
{}typ: {} scope: {:?} opt_dtor: {:?}",
(0..depth).map(|_| ' ').collect::<String>(),
typ.repr(rcx.tcx()), scope, opt_dtor);
// If `typ` has a destructor, then we must ensure that all
// borrowed data reachable via `typ` must outlive the parent
// of `scope`. This is handled below.
//
// However, there is an important special case: by
// parametricity, any generic type parameters have *no* trait
// bounds in the Drop impl can not be used in any way (apart
// from being dropped), and thus we can treat data borrowed
// via such type parameters remains unreachable.
//
// For example, consider `impl<T> Drop for Vec<T> { ... }`,
// which does have to be able to drop instances of `T`, but
// otherwise cannot read data from `T`.
//
// Of course, for the type expression passed in for any such
// unbounded type parameter `T`, we must resume the recursive
// analysis on `T` (since it would be ignored by
// type_must_outlive).
//
// FIXME (pnkfelix): Long term, we could be smart and actually
// feed which generic parameters can be ignored *into* `fn
// type_must_outlive` (or some generalization thereof). But
// for the short term, it probably covers most cases of
// interest to just special case Drop impls where: (1.) there
// are no generic lifetime parameters and (2.) *all* generic
// type parameters are unbounded. If both conditions hold, we
// simply skip the `type_must_outlive` call entirely (but
// resume the recursive checking of the type-substructure).
let has_dtor_of_interest;
if let Some(&dtor_method_did) = opt_dtor {
let impl_did = ty::impl_of_method(rcx.tcx(), dtor_method_did)
.unwrap_or_else(|| {
rcx.tcx().sess.span_bug(
span, "no Drop impl found for drop method")
});
let dtor_typescheme = ty::lookup_item_type(rcx.tcx(), impl_did);
let dtor_generics = dtor_typescheme.generics;
let has_pred_of_interest = dtor_generics.predicates.iter().any(|pred| {
// In `impl<T> Drop where ...`, we automatically
// assume some predicate will be meaningful and thus
// represents a type through which we could reach
// borrowed data. However, there can be implicit
// predicates (namely for Sized), and so we still need
// to walk through and filter out those cases.
let result = match *pred {
ty::Predicate::Trait(ty::Binder(ref t_pred)) => {
let def_id = t_pred.trait_ref.def_id;
match rcx.tcx().lang_items.to_builtin_kind(def_id) {
Some(ty::BoundSend) |
Some(ty::BoundSized) |
Some(ty::BoundCopy) |
Some(ty::BoundSync) => false,
_ => true,
}
}
ty::Predicate::Equate(..) |
ty::Predicate::RegionOutlives(..) |
ty::Predicate::TypeOutlives(..) |
ty::Predicate::Projection(..) => {
// we assume all of these where-clauses may
// give the drop implementation the capabilty
// to access borrowed data.
true
}
};
if result {
debug!("typ: {} has interesting dtor due to generic preds, e.g. {}",
typ.repr(rcx.tcx()), pred.repr(rcx.tcx()));
}
result
});
// In `impl<'a> Drop ...`, we automatically assume
// `'a` is meaningful and thus represents a bound
// through which we could reach borrowed data.
//
// FIXME (pnkfelix): In the future it would be good to
// extend the language to allow the user to express,
// in the impl signature, that a lifetime is not
// actually used (something like `where 'a: ?Live`).
let has_region_param_of_interest =
dtor_generics.has_region_params(subst::TypeSpace);
has_dtor_of_interest =
has_region_param_of_interest ||
has_pred_of_interest;
if has_dtor_of_interest {
debug!("typ: {} has interesting dtor, due to \
region params: {} or pred: {}",
typ.repr(rcx.tcx()),
has_region_param_of_interest,
has_pred_of_interest);
} else {
debug!("typ: {} has dtor, but it is uninteresting",
typ.repr(rcx.tcx()));
}
} else {
debug!("typ: {} has no dtor, and thus is uninteresting",
typ.repr(rcx.tcx()));
has_dtor_of_interest = false;
}
if has_dtor_of_interest {
// If `typ` has a destructor, then we must ensure that all
// borrowed data reachable via `typ` must outlive the
// parent of `scope`. (It does not suffice for it to
// outlive `scope` because that could imply that the
// borrowed data is torn down in between the end of
// `scope` and when the destructor itself actually runs.)
let parent_region =
match rcx.tcx().region_maps.opt_encl_scope(scope) {
Some(parent_scope) => ty::ReScope(parent_scope),
None => rcx.tcx().sess.span_bug(
span, format!("no enclosing scope found for scope: {:?}",
scope).as_slice()),
};
regionck::type_must_outlive(rcx, origin(), typ, parent_region);
} else {
// Okay, `typ` itself is itself not reachable by a
// destructor; but it may contain substructure that has a
// destructor.
match typ.sty {
ty::ty_struct(struct_did, substs) => {
// Don't recurse; we extract type's substructure,
// so do not process subparts of type expression.
walker.skip_current_subtree();
let fields =
ty::lookup_struct_fields(rcx.tcx(), struct_did);
for field in fields.iter() {
let field_type =
ty::lookup_field_type(rcx.tcx(),
struct_did,
field.id,
substs);
iterate_over_potentially_unsafe_regions_in_type(
rcx,
breadcrumbs,
field_type,
span,
scope,
depth+1)
}
}
ty::ty_enum(enum_did, substs) => {
// Don't recurse; we extract type's substructure,
// so do not process subparts of type expression.
walker.skip_current_subtree();
let all_variant_info =
ty::substd_enum_variants(rcx.tcx(),
enum_did,
substs);
for variant_info in all_variant_info.iter() {
for argument_type in variant_info.args.iter() {
iterate_over_potentially_unsafe_regions_in_type(
rcx,
breadcrumbs,
*argument_type,
span,
scope,
depth+1)
}
}
}
ty::ty_rptr(..) | ty::ty_ptr(_) | ty::ty_bare_fn(..) => {
// Don't recurse, since references, pointers,
// boxes, and bare functions don't own instances
// of the types appearing within them.
walker.skip_current_subtree();
}
_ => {}
};
// You might be tempted to pop breadcrumbs here after
// processing type's internals above, but then you hit
// exponential time blowup e.g. on
// compile-fail/huge-struct.rs. Instead, we do not remove
// anything from the breadcrumbs vector during any particular
// traversal, and instead clear it after the whole traversal
// is done.
}
}
}

View File

@ -525,7 +525,8 @@ impl<'a,'tcx> ProbeContext<'a,'tcx> {
trait_def_id);
let trait_impls = self.tcx().trait_impls.borrow();
let impl_def_ids = match trait_impls.get(&trait_def_id) {
let impl_def_ids = trait_impls.get(&trait_def_id);
let impl_def_ids = match impl_def_ids {
None => { return; }
Some(impls) => impls,
};

View File

@ -90,7 +90,7 @@ use middle::infer;
use middle::mem_categorization as mc;
use middle::mem_categorization::McResult;
use middle::pat_util::{self, pat_id_map};
use middle::region::CodeExtent;
use middle::region::{self, CodeExtent};
use middle::subst::{self, Subst, Substs, VecPerParamSpace, ParamSpace, TypeSpace};
use middle::traits;
use middle::ty::{FnSig, VariantInfo, TypeScheme};
@ -127,6 +127,7 @@ use syntax::ptr::P;
use syntax::visit::{self, Visitor};
mod assoc;
pub mod dropck;
pub mod _match;
pub mod vtable;
pub mod writeback;
@ -495,7 +496,9 @@ fn check_bare_fn<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>,
let fn_sig =
fn_ty.sig.subst(ccx.tcx, &inh.param_env.free_substs);
let fn_sig =
liberate_late_bound_regions(ccx.tcx, CodeExtent::from_node_id(body.id), &fn_sig);
liberate_late_bound_regions(ccx.tcx,
region::DestructionScopeData::new(body.id),
&fn_sig);
let fn_sig =
inh.normalize_associated_types_in(&inh.param_env, body.span, body.id, &fn_sig);
@ -1686,7 +1689,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
let mut bounds_checker = wf::BoundsChecker::new(self,
ast_t.span,
CodeExtent::from_node_id(self.body_id),
self.body_id,
None);
bounds_checker.check_ty(t);

View File

@ -83,6 +83,7 @@
//! contents.
use astconv::AstConv;
use check::dropck;
use check::FnCtxt;
use check::regionmanip;
use check::vtable;
@ -171,6 +172,7 @@ pub struct Rcx<'a, 'tcx: 'a> {
// id of AST node being analyzed (the subject of the analysis).
subject: SubjectNode,
}
/// Returns the validity region of `def` -- that is, how long is `def` valid?
@ -198,7 +200,8 @@ impl<'a, 'tcx> Rcx<'a, 'tcx> {
Rcx { fcx: fcx,
repeating_scope: initial_repeating_scope,
subject: subject,
region_bound_pairs: Vec::new() }
region_bound_pairs: Vec::new()
}
}
pub fn tcx(&self) -> &'a ty::ctxt<'tcx> {
@ -469,6 +472,10 @@ fn constrain_bindings_in_pat(pat: &ast::Pat, rcx: &mut Rcx) {
type_of_node_must_outlive(
rcx, infer::BindingTypeIsNotValidAtDecl(span),
id, var_region);
let var_scope = tcx.region_maps.var_scope(id);
let typ = rcx.resolve_node_type(id);
dropck::check_safety_of_destructor_if_necessary(rcx, typ, span, var_scope);
})
}
@ -517,6 +524,40 @@ fn visit_expr(rcx: &mut Rcx, expr: &ast::Expr) {
*/
_ => {}
}
// If necessary, constrain destructors in the unadjusted form of this
// expression.
let cmt_result = {
let mc = mc::MemCategorizationContext::new(rcx.fcx);
mc.cat_expr_unadjusted(expr)
};
match cmt_result {
Ok(head_cmt) => {
check_safety_of_rvalue_destructor_if_necessary(rcx,
head_cmt,
expr.span);
}
Err(..) => {
rcx.fcx.tcx().sess.span_note(expr.span,
"cat_expr_unadjusted Errd during dtor check");
}
}
}
// If necessary, constrain destructors in this expression. This will be
// the adjusted form if there is an adjustment.
let cmt_result = {
let mc = mc::MemCategorizationContext::new(rcx.fcx);
mc.cat_expr(expr)
};
match cmt_result {
Ok(head_cmt) => {
check_safety_of_rvalue_destructor_if_necessary(rcx, head_cmt, expr.span);
}
Err(..) => {
rcx.fcx.tcx().sess.span_note(expr.span,
"cat_expr Errd during dtor check");
}
}
match expr.node {
@ -995,6 +1036,33 @@ pub fn mk_subregion_due_to_dereference(rcx: &mut Rcx,
minimum_lifetime, maximum_lifetime)
}
fn check_safety_of_rvalue_destructor_if_necessary<'a, 'tcx>(rcx: &mut Rcx<'a, 'tcx>,
cmt: mc::cmt<'tcx>,
span: Span) {
match cmt.cat {
mc::cat_rvalue(region) => {
match region {
ty::ReScope(rvalue_scope) => {
let typ = rcx.resolve_type(cmt.ty);
dropck::check_safety_of_destructor_if_necessary(rcx,
typ,
span,
rvalue_scope);
}
ty::ReStatic => {}
region => {
rcx.tcx()
.sess
.span_bug(span,
format!("unexpected rvalue region in rvalue \
destructor safety checking: `{}`",
region.repr(rcx.tcx())).as_slice());
}
}
}
_ => {}
}
}
/// Invoked on any index expression that occurs. Checks that if this is a slice being indexed, the
/// lifetime of the pointer includes the deref expr.
@ -1404,7 +1472,7 @@ fn link_reborrowed_region<'a, 'tcx>(rcx: &Rcx<'a, 'tcx>,
}
/// Ensures that all borrowed data reachable via `ty` outlives `region`.
fn type_must_outlive<'a, 'tcx>(rcx: &mut Rcx<'a, 'tcx>,
pub fn type_must_outlive<'a, 'tcx>(rcx: &mut Rcx<'a, 'tcx>,
origin: infer::SubregionOrigin<'tcx>,
ty: Ty<'tcx>,
region: ty::Region)

View File

@ -145,9 +145,11 @@ impl<'ccx, 'tcx> CheckTypeWellFormedVisitor<'ccx, 'tcx> {
let variants = lookup_fields(fcx);
let mut bounds_checker = BoundsChecker::new(fcx,
item.span,
region::CodeExtent::from_node_id(item.id),
item.id,
Some(&mut this.cache));
for variant in &variants {
debug!("check_type_defn at bounds_checker.scope: {:?}", bounds_checker.scope);
for variant in &variants {
for field in &variant.fields {
// Regions are checked below.
bounds_checker.check_traits_in_ty(field.ty);
@ -180,8 +182,9 @@ impl<'ccx, 'tcx> CheckTypeWellFormedVisitor<'ccx, 'tcx> {
self.with_fcx(item, |this, fcx| {
let mut bounds_checker = BoundsChecker::new(fcx,
item.span,
region::CodeExtent::from_node_id(item.id),
item.id,
Some(&mut this.cache));
debug!("check_item_type at bounds_checker.scope: {:?}", bounds_checker.scope);
let type_scheme = ty::lookup_item_type(fcx.tcx(), local_def(item.id));
let item_ty = fcx.instantiate_type_scheme(item.span,
@ -196,12 +199,11 @@ impl<'ccx, 'tcx> CheckTypeWellFormedVisitor<'ccx, 'tcx> {
item: &ast::Item)
{
self.with_fcx(item, |this, fcx| {
let item_scope = region::CodeExtent::from_node_id(item.id);
let mut bounds_checker = BoundsChecker::new(fcx,
item.span,
item_scope,
item.id,
Some(&mut this.cache));
debug!("check_impl at bounds_checker.scope: {:?}", bounds_checker.scope);
// Find the impl self type as seen from the "inside" --
// that is, with all type parameters converted from bound
@ -383,7 +385,12 @@ impl<'ccx, 'tcx, 'v> Visitor<'v> for CheckTypeWellFormedVisitor<'ccx, 'tcx> {
pub struct BoundsChecker<'cx,'tcx:'cx> {
fcx: &'cx FnCtxt<'cx,'tcx>,
span: Span,
scope: region::CodeExtent,
// This field is often attached to item impls; it is not clear
// that `CodeExtent` is well-defined for such nodes, so pnkfelix
// has left it as a NodeId rather than porting to CodeExtent.
scope: ast::NodeId,
binding_count: uint,
cache: Option<&'cx mut HashSet<Ty<'tcx>>>,
}
@ -391,7 +398,7 @@ pub struct BoundsChecker<'cx,'tcx:'cx> {
impl<'cx,'tcx> BoundsChecker<'cx,'tcx> {
pub fn new(fcx: &'cx FnCtxt<'cx,'tcx>,
span: Span,
scope: region::CodeExtent,
scope: ast::NodeId,
cache: Option<&'cx mut HashSet<Ty<'tcx>>>)
-> BoundsChecker<'cx,'tcx> {
BoundsChecker { fcx: fcx, span: span, scope: scope,
@ -446,9 +453,12 @@ impl<'cx,'tcx> TypeFolder<'tcx> for BoundsChecker<'cx,'tcx> {
where T : TypeFoldable<'tcx> + Repr<'tcx>
{
self.binding_count += 1;
let value = liberate_late_bound_regions(self.fcx.tcx(), self.scope, binder);
debug!("BoundsChecker::fold_binder: late-bound regions replaced: {}",
value.repr(self.tcx()));
let value = liberate_late_bound_regions(
self.fcx.tcx(),
region::DestructionScopeData::new(self.scope),
binder);
debug!("BoundsChecker::fold_binder: late-bound regions replaced: {} at scope: {:?}",
value.repr(self.tcx()), self.scope);
let value = value.fold_with(self);
self.binding_count -= 1;
ty::Binder(value)

View File

@ -1564,7 +1564,7 @@ fn check_method_self_type<'a, 'tcx, RS:RegionScope>(
_ => typ,
};
let body_scope = region::CodeExtent::from_node_id(body_id);
let body_scope = region::DestructionScopeData::new(body_id);
// "Required type" comes from the trait definition. It may
// contain late-bound regions from the method, but not the
@ -1608,7 +1608,7 @@ fn check_method_self_type<'a, 'tcx, RS:RegionScope>(
fn liberate_early_bound_regions<'tcx,T>(
tcx: &ty::ctxt<'tcx>,
scope: region::CodeExtent,
scope: region::DestructionScopeData,
value: &T)
-> T
where T : TypeFoldable<'tcx> + Repr<'tcx>

View File

@ -50,7 +50,8 @@
//! ```rust
//! use std::old_io as io;
//!
//! for line in io::stdin().lock().lines() {
//! let mut stdin = io::stdin();
//! for line in stdin.lock().lines() {
//! print!("{}", line.unwrap());
//! }
//! ```

View File

@ -143,7 +143,8 @@ impl StdinReader {
/// ```rust
/// use std::old_io;
///
/// for line in old_io::stdin().lock().lines() {
/// let mut stdin = old_io::stdin();
/// for line in stdin.lock().lines() {
/// println!("{}", line.unwrap());
/// }
/// ```

View File

@ -19,5 +19,6 @@ pub type header_map = HashMap<String, Rc<RefCell<Vec<Rc<String>>>>>;
// the unused ty param is necessary so this gets monomorphized
pub fn request<T>(req: &header_map) {
let _x = req["METHOD".to_string()].clone().borrow().clone()[0].clone();
let data = req["METHOD".to_string()].clone();
let _x = data.borrow().clone()[0].clone();
}

View File

@ -295,7 +295,9 @@ fn main() {
let fd = std::old_io::File::open(&Path::new("shootout-k-nucleotide.data"));
get_sequence(&mut std::old_io::BufferedReader::new(fd), ">THREE")
} else {
get_sequence(&mut *std::old_io::stdin().lock(), ">THREE")
let mut stdin = std::old_io::stdin();
let mut stdin = stdin.lock();
get_sequence(&mut *stdin, ">THREE")
};
let input = Arc::new(input);

View File

@ -274,7 +274,9 @@ fn main() {
let mut sudoku = if use_default {
Sudoku::from_vec(&DEFAULT_SUDOKU)
} else {
Sudoku::read(&mut *old_io::stdin().lock())
let mut stdin = old_io::stdin();
let mut stdin = stdin.lock();
Sudoku::read(&mut *stdin)
};
sudoku.solve();
sudoku.write(&mut old_io::stdout());

View File

@ -0,0 +1,21 @@
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Tests the new destructor semantics.
use std::cell::RefCell;
fn main() {
let b = {
let a = Box::new(RefCell::new(4i8));
*a.borrow() + 1i8 //~ ERROR `*a` does not live long enough
};
println!("{}", b);
}

View File

@ -0,0 +1,115 @@
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Reject mixing cyclic structure and Drop when using fixed length
// arrays.
//
// (Compare against compile-fail/dropck_vec_cycle_checked.rs)
#![feature(unsafe_destructor)]
use std::cell::Cell;
use id::Id;
mod s {
#![allow(unstable)]
use std::sync::atomic::{AtomicUint, ATOMIC_UINT_INIT, Ordering};
static S_COUNT: AtomicUint = ATOMIC_UINT_INIT;
pub fn next_count() -> usize {
S_COUNT.fetch_add(1, Ordering::SeqCst) + 1
}
}
mod id {
use s;
#[derive(Debug)]
pub struct Id {
orig_count: usize,
count: usize,
}
impl Id {
pub fn new() -> Id {
let c = s::next_count();
println!("building Id {}", c);
Id { orig_count: c, count: c }
}
pub fn count(&self) -> usize {
println!("Id::count on {} returns {}", self.orig_count, self.count);
self.count
}
}
impl Drop for Id {
fn drop(&mut self) {
println!("dropping Id {}", self.count);
self.count = 0;
}
}
}
trait HasId {
fn count(&self) -> usize;
}
#[derive(Debug)]
struct CheckId<T:HasId> {
v: T
}
#[allow(non_snake_case)]
fn CheckId<T:HasId>(t: T) -> CheckId<T> { CheckId{ v: t } }
#[unsafe_destructor]
impl<T:HasId> Drop for CheckId<T> {
fn drop(&mut self) {
assert!(self.v.count() > 0);
}
}
#[derive(Debug)]
struct B<'a> {
id: Id,
a: [CheckId<Cell<Option<&'a B<'a>>>>; 2]
}
impl<'a> HasId for Cell<Option<&'a B<'a>>> {
fn count(&self) -> usize {
match self.get() {
None => 1,
Some(b) => b.id.count(),
}
}
}
impl<'a> B<'a> {
fn new() -> B<'a> {
B { id: Id::new(), a: [CheckId(Cell::new(None)), CheckId(Cell::new(None))] }
}
}
fn f() {
let (b1, b2, b3);
b1 = B::new();
b2 = B::new();
b3 = B::new();
b1.a[0].v.set(Some(&b2)); //~ ERROR `b2` does not live long enough
b1.a[1].v.set(Some(&b3)); //~ ERROR `b3` does not live long enough
b2.a[0].v.set(Some(&b2)); //~ ERROR `b2` does not live long enough
b2.a[1].v.set(Some(&b3)); //~ ERROR `b3` does not live long enough
b3.a[0].v.set(Some(&b1)); //~ ERROR `b1` does not live long enough
b3.a[1].v.set(Some(&b2)); //~ ERROR `b2` does not live long enough
}
fn main() {
f();
}

View File

@ -0,0 +1,55 @@
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// A simple example of an unsound mixing of cyclic structure and Drop.
//
// Each `D` has a name and an optional reference to another `D`
// sibling, but also implements a drop method that prints out its own
// name as well as the name of its sibling.
//
// By setting up a cyclic structure, the drop code cannot possibly
// work. Therefore this code must be rejected.
//
// (As it turns out, essentially any attempt to install a sibling here
// will be rejected, regardless of whether it forms a cyclic
// structure or not. This is because the use of the same lifetime
// `'a` in `&'a D<'a>` cannot be satisfied when `D<'a>` implements
// `Drop`.)
#![feature(unsafe_destructor)]
use std::cell::Cell;
struct D<'a> {
name: String,
p: Cell<Option<&'a D<'a>>>,
}
impl<'a> D<'a> {
fn new(name: String) -> D<'a> { D { name: name, p: Cell::new(None) } }
}
#[unsafe_destructor]
impl<'a> Drop for D<'a> {
fn drop(&mut self) {
println!("dropping {} whose sibling is {:?}",
self.name, self.p.get().map(|d| &d.name));
}
}
fn g() {
let (d1, d2) = (D::new(format!("d1")), D::new(format!("d2")));
d1.p.set(Some(&d2)); //~ ERROR `d2` does not live long enough
d2.p.set(Some(&d1)); //~ ERROR `d1` does not live long enough
}
fn main() {
g();
}

View File

@ -0,0 +1,130 @@
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Reject mixing cyclic structure and Drop when using TypedArena.
//
// (Compare against compile-fail/dropck_vec_cycle_checked.rs)
//
// (Also compare against compile-fail/dropck_tarena_unsound_drop.rs,
// which is a reduction of this code to more directly show the reason
// for the error message we see here.)
#![allow(unstable)]
#![feature(unsafe_destructor)]
extern crate arena;
use arena::TypedArena;
use std::cell::Cell;
use id::Id;
mod s {
#![allow(unstable)]
use std::sync::atomic::{AtomicUint, ATOMIC_UINT_INIT, Ordering};
static S_COUNT: AtomicUint = ATOMIC_UINT_INIT;
pub fn next_count() -> usize {
S_COUNT.fetch_add(1, Ordering::SeqCst) + 1
}
}
mod id {
use s;
#[derive(Debug)]
pub struct Id {
orig_count: usize,
count: usize,
}
impl Id {
pub fn new() -> Id {
let c = s::next_count();
println!("building Id {}", c);
Id { orig_count: c, count: c }
}
pub fn count(&self) -> usize {
println!("Id::count on {} returns {}", self.orig_count, self.count);
self.count
}
}
impl Drop for Id {
fn drop(&mut self) {
println!("dropping Id {}", self.count);
self.count = 0;
}
}
}
trait HasId {
fn count(&self) -> usize;
}
#[derive(Debug)]
struct CheckId<T:HasId> {
v: T
}
#[allow(non_snake_case)]
fn CheckId<T:HasId>(t: T) -> CheckId<T> { CheckId{ v: t } }
#[unsafe_destructor]
impl<T:HasId> Drop for CheckId<T> {
fn drop(&mut self) {
assert!(self.v.count() > 0);
}
}
#[derive(Debug)]
struct C<'a> {
id: Id,
v: Vec<CheckId<Cell<Option<&'a C<'a>>>>>,
}
impl<'a> HasId for Cell<Option<&'a C<'a>>> {
fn count(&self) -> usize {
match self.get() {
None => 1,
Some(c) => c.id.count(),
}
}
}
impl<'a> C<'a> {
fn new() -> C<'a> {
C { id: Id::new(), v: Vec::new() }
}
}
fn f<'a>(arena: &'a TypedArena<C<'a>>) {
let c1 = arena.alloc(C::new());
let c2 = arena.alloc(C::new());
let c3 = arena.alloc(C::new());
c1.v.push(CheckId(Cell::new(None)));
c1.v.push(CheckId(Cell::new(None)));
c2.v.push(CheckId(Cell::new(None)));
c2.v.push(CheckId(Cell::new(None)));
c3.v.push(CheckId(Cell::new(None)));
c3.v.push(CheckId(Cell::new(None)));
c1.v[0].v.set(Some(c2));
c1.v[1].v.set(Some(c3));
c2.v[0].v.set(Some(c2));
c2.v[1].v.set(Some(c3));
c3.v[0].v.set(Some(c1));
c3.v[1].v.set(Some(c2));
}
fn main() {
let arena = TypedArena::new();
f(&arena); //~ ERROR `arena` does not live long enough
}

View File

@ -0,0 +1,54 @@
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Check that a arena (TypedArena) cannot carry elements whose drop
// methods might access borrowed data of lifetime that does not
// strictly outlive the arena itself.
//
// Compare against run-pass/dropck_tarena_sound_drop.rs, which shows a
// similar setup, but loosens `f` so that the struct `C<'a>` can be
// fed a lifetime longer than that of the arena.
//
// (Also compare against dropck_tarena_cycle_checked.rs, from which
// this was reduced to better understand its error message.)
#![allow(unstable)]
#![feature(unsafe_destructor)]
extern crate arena;
use arena::TypedArena;
trait HasId { fn count(&self) -> usize; }
struct CheckId<T:HasId> { v: T }
// In the code below, the impl of HasId for `&'a usize` does not
// actually access the borrowed data, but the point is that the
// interface to CheckId does not (and cannot) know that, and therefore
// when encountering the a value V of type CheckId<S>, we must
// conservatively force the type S to strictly outlive V.
#[unsafe_destructor]
impl<T:HasId> Drop for CheckId<T> {
fn drop(&mut self) {
assert!(self.v.count() > 0);
}
}
struct C<'a> { v: CheckId<&'a usize>, }
impl<'a> HasId for &'a usize { fn count(&self) -> usize { 1 } }
fn f<'a>(_arena: &'a TypedArena<C<'a>>) {}
fn main() {
let arena: TypedArena<C> = TypedArena::new();
f(&arena); //~ ERROR `arena` does not live long enough
}

View File

@ -0,0 +1,122 @@
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Reject mixing cyclic structure and Drop when using Vec.
//
// (Compare against compile-fail/dropck_arr_cycle_checked.rs)
#![feature(unsafe_destructor)]
use std::cell::Cell;
use id::Id;
mod s {
#![allow(unstable)]
use std::sync::atomic::{AtomicUint, ATOMIC_UINT_INIT, Ordering};
static S_COUNT: AtomicUint = ATOMIC_UINT_INIT;
pub fn next_count() -> usize {
S_COUNT.fetch_add(1, Ordering::SeqCst) + 1
}
}
mod id {
use s;
#[derive(Debug)]
pub struct Id {
orig_count: usize,
count: usize,
}
impl Id {
pub fn new() -> Id {
let c = s::next_count();
println!("building Id {}", c);
Id { orig_count: c, count: c }
}
pub fn count(&self) -> usize {
println!("Id::count on {} returns {}", self.orig_count, self.count);
self.count
}
}
impl Drop for Id {
fn drop(&mut self) {
println!("dropping Id {}", self.count);
self.count = 0;
}
}
}
trait HasId {
fn count(&self) -> usize;
}
#[derive(Debug)]
struct CheckId<T:HasId> {
v: T
}
#[allow(non_snake_case)]
fn CheckId<T:HasId>(t: T) -> CheckId<T> { CheckId{ v: t } }
#[unsafe_destructor]
impl<T:HasId> Drop for CheckId<T> {
fn drop(&mut self) {
assert!(self.v.count() > 0);
}
}
#[derive(Debug)]
struct C<'a> {
id: Id,
v: Vec<CheckId<Cell<Option<&'a C<'a>>>>>,
}
impl<'a> HasId for Cell<Option<&'a C<'a>>> {
fn count(&self) -> usize {
match self.get() {
None => 1,
Some(c) => c.id.count(),
}
}
}
impl<'a> C<'a> {
fn new() -> C<'a> {
C { id: Id::new(), v: Vec::new() }
}
}
fn f() {
let (mut c1, mut c2, mut c3);
c1 = C::new();
c2 = C::new();
c3 = C::new();
c1.v.push(CheckId(Cell::new(None)));
c1.v.push(CheckId(Cell::new(None)));
c2.v.push(CheckId(Cell::new(None)));
c2.v.push(CheckId(Cell::new(None)));
c3.v.push(CheckId(Cell::new(None)));
c3.v.push(CheckId(Cell::new(None)));
c1.v[0].v.set(Some(&c2)); //~ ERROR `c2` does not live long enough
c1.v[1].v.set(Some(&c3)); //~ ERROR `c3` does not live long enough
c2.v[0].v.set(Some(&c2)); //~ ERROR `c2` does not live long enough
c2.v[1].v.set(Some(&c3)); //~ ERROR `c3` does not live long enough
c3.v[0].v.set(Some(&c1)); //~ ERROR `c1` does not live long enough
c3.v[1].v.set(Some(&c2)); //~ ERROR `c2` does not live long enough
}
fn main() {
f();
}

View File

@ -0,0 +1,135 @@
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Checking that `Vec<T>` cannot hide lifetimes within `T` when `T`
// implements `Drop` and might access methods of values that have
// since been deallocated.
//
// In this case, the values in question hold (non-zero) unique-ids
// that zero themselves out when dropped, and are wrapped in another
// type with a destructor that asserts that the ids it references are
// indeed non-zero (i.e., effectively checking that the id's are not
// dropped while there are still any outstanding references).
//
// However, the values in question are also formed into a
// cyclic-structure, ensuring that there is no way for all of the
// conditions above to be satisfied, meaning that if the dropck is
// sound, it should reject this code.
#![feature(unsafe_destructor)]
use std::cell::Cell;
use id::Id;
mod s {
#![allow(unstable)]
use std::sync::atomic::{AtomicUint, ATOMIC_UINT_INIT, Ordering};
static S_COUNT: AtomicUint = ATOMIC_UINT_INIT;
/// generates globally unique count (global across the current
/// process, that is)
pub fn next_count() -> usize {
S_COUNT.fetch_add(1, Ordering::SeqCst) + 1
}
}
mod id {
use s;
/// Id represents a globally unique identifier (global across the
/// current process, that is). When dropped, it automatically
/// clears its `count` field, but leaves `orig_count` untouched,
/// so that if there are subsequent (erroneous) invocations of its
/// method (which is unsound), we can observe it by seeing that
/// the `count` is 0 while the `orig_count` is non-zero.
#[derive(Debug)]
pub struct Id {
orig_count: usize,
count: usize,
}
impl Id {
/// Creates an `Id` with a globally unique count.
pub fn new() -> Id {
let c = s::next_count();
println!("building Id {}", c);
Id { orig_count: c, count: c }
}
/// returns the `count` of self; should be non-zero if
/// everything is working.
pub fn count(&self) -> usize {
println!("Id::count on {} returns {}", self.orig_count, self.count);
self.count
}
}
impl Drop for Id {
fn drop(&mut self) {
println!("dropping Id {}", self.count);
self.count = 0;
}
}
}
trait HasId {
fn count(&self) -> usize;
}
#[derive(Debug)]
struct CheckId<T:HasId> {
v: T
}
#[allow(non_snake_case)]
fn CheckId<T:HasId>(t: T) -> CheckId<T> { CheckId{ v: t } }
#[unsafe_destructor]
impl<T:HasId> Drop for CheckId<T> {
fn drop(&mut self) {
assert!(self.v.count() > 0);
}
}
#[derive(Debug)]
struct C<'a> {
id: Id,
v: Vec<CheckId<Cell<Option<&'a C<'a>>>>>,
}
impl<'a> HasId for Cell<Option<&'a C<'a>>> {
fn count(&self) -> usize {
match self.get() {
None => 1,
Some(c) => c.id.count(),
}
}
}
impl<'a> C<'a> {
fn new() -> C<'a> {
C { id: Id::new(), v: Vec::new() }
}
}
fn f() {
let (mut c1, mut c2);
c1 = C::new();
c2 = C::new();
c1.v.push(CheckId(Cell::new(None)));
c2.v.push(CheckId(Cell::new(None)));
c1.v[0].v.set(Some(&c2)); //~ ERROR `c2` does not live long enough
c2.v[0].v.set(Some(&c1)); //~ ERROR `c1` does not live long enough
}
fn main() {
f();
}

View File

@ -0,0 +1,31 @@
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// This test is a simple example of code that violates the dropck
// rules: it pushes `&x` and `&y` into `v`, but the referenced data
// will be dropped before the vector itself is.
// (In principle we know that `Vec` does not reference the data it
// owns from within its drop code, apart from calling drop on each
// element it owns; thus, for data like this, it seems like we could
// loosen the restrictions here if we wanted. But it also is not
// clear whether such loosening is terribly important.)
fn main() {
let mut v = Vec::new();
let x: i8 = 3;
let y: i8 = 4;
v.push(&x); //~ ERROR `x` does not live long enough
v.push(&y); //~ ERROR `y` does not live long enough
assert_eq!(v.as_slice(), [&3, &4]);
}

View File

@ -0,0 +1,39 @@
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::cell::Cell;
#[derive(Show)]
struct B<'a> {
a: [Cell<Option<&'a B<'a>>>; 2]
}
impl<'a> B<'a> {
fn new() -> B<'a> {
B { a: [Cell::new(None), Cell::new(None)] }
}
}
fn f() {
let (b1, b2, b3);
b1 = B::new();
b2 = B::new();
b3 = B::new();
b1.a[0].set(Some(&b2));
b1.a[1].set(Some(&b3));
b2.a[0].set(Some(&b2));
b2.a[1].set(Some(&b3));
b3.a[0].set(Some(&b1));
b3.a[1].set(Some(&b2));
}
fn main() {
f();
}

View File

@ -0,0 +1,51 @@
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Check that a arena (TypedArena) can carry elements whose drop
// methods might access borrowed data, as long as the borrowed data
// has lifetime that strictly outlives the arena itself.
//
// Compare against compile-fail/dropck_tarena_unsound_drop.rs, which
// shows a similar setup, but restricts `f` so that the struct `C<'a>`
// is force-fed a lifetime equal to that of the borrowed arena.
#![allow(unstable)]
#![feature(unsafe_destructor)]
extern crate arena;
use arena::TypedArena;
trait HasId { fn count(&self) -> usize; }
struct CheckId<T:HasId> { v: T }
// In the code below, the impl of HasId for `&'a usize` does not
// actually access the borrowed data, but the point is that the
// interface to CheckId does not (and cannot) know that, and therefore
// when encountering the a value V of type CheckId<S>, we must
// conservatively force the type S to strictly outlive V.
#[unsafe_destructor]
impl<T:HasId> Drop for CheckId<T> {
fn drop(&mut self) {
assert!(self.v.count() > 0);
}
}
struct C<'a> { _v: CheckId<&'a usize>, }
impl<'a> HasId for &'a usize { fn count(&self) -> usize { 1 } }
fn f<'a, 'b>(_arena: &'a TypedArena<C<'b>>) {}
fn main() {
let arena: TypedArena<C> = TypedArena::new();
f(&arena);
}

View File

@ -37,7 +37,8 @@ fn parent() {
}
fn child() {
for line in old_io::stdin().lock().lines() {
let mut stdin = old_io::stdin();
for line in stdin.lock().lines() {
println!("{}", line.unwrap());
}
}

View File

@ -27,7 +27,8 @@ fn main() {
fn child() {
old_io::stdout().write_line("foo").unwrap();
old_io::stderr().write_line("bar").unwrap();
assert_eq!(old_io::stdin().lock().read_line().err().unwrap().kind, old_io::EndOfFile);
let mut stdin = old_io::stdin();
assert_eq!(stdin.lock().read_line().err().unwrap().kind, old_io::EndOfFile);
}
fn test() {

View File

@ -29,5 +29,5 @@ impl Bar {
fn main() {
let b = RefCell::new(Bar);
b.borrow().foo()
b.borrow().foo();
}

View File

@ -0,0 +1,38 @@
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::cell::Cell;
struct C<'a> {
p: Cell<Option<&'a C<'a>>>,
}
impl<'a> C<'a> {
fn new() -> C<'a> { C { p: Cell::new(None) } }
}
fn f1() {
let (c1, c2) = (C::new(), C::new());
c1.p.set(Some(&c2));
c2.p.set(Some(&c1));
}
fn f2() {
let (c1, c2);
c1 = C::new();
c2 = C::new();
c1.p.set(Some(&c2));
c2.p.set(Some(&c1));
}
fn main() {
f1();
f2();
}

View File

@ -22,8 +22,9 @@ struct Point {
}
pub fn main() {
let box_5 = box 5u;
assert_eq!(Rc::new(5u).to_uint(), Some(5));
assert_eq!((box &box &Rc::new(box box &box 5u)).to_uint(), Some(5));
assert_eq!((box &box &Rc::new(box box &box_5)).to_uint(), Some(5));
let point = Rc::new(Point {x: 2, y: 4});
assert_eq!(point.x, 2);
assert_eq!(point.y, 4);

View File

@ -0,0 +1,53 @@
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// This is a regression test for something that only came up while
// attempting to bootstrap librustc with new destructor lifetime
// semantics.
use std::collections::HashMap;
use std::cell::RefCell;
// This version does not yet work (associated type issues)...
#[cfg(cannot_use_this_yet)]
fn foo<'a>(map: RefCell<HashMap<&'static str, &'a [u8]>>) {
let one = [1u];
assert_eq!(map.borrow().get("one"), Some(&one[]));
}
#[cfg(cannot_use_this_yet_either)]
// ... and this version does not work (the lifetime of `one` is
// supposed to match the lifetime `'a`) ...
fn foo<'a>(map: RefCell<HashMap<&'static str, &'a [u8]>>) {
let one = [1u];
assert_eq!(map.borrow().get("one"), Some(&one.as_slice()));
}
#[cfg(all(not(cannot_use_this_yet),not(cannot_use_this_yet_either)))]
fn foo<'a>(map: RefCell<HashMap<&'static str, &'a [u8]>>) {
// ...so instead we walk through the trivial slice and make sure
// it contains the element we expect.
for (i, &x) in map.borrow().get("one").unwrap().iter().enumerate() {
assert_eq!((i, x), (0, 1));
}
}
fn main() {
let zer = [0u8];
let one = [1u8];
let two = [2u8];
let mut map = HashMap::new();
map.insert("zero", &zer[]);
map.insert("one", &one[]);
map.insert("two", &two[]);
let map = RefCell::new(map);
foo(map);
}

View File

@ -0,0 +1,43 @@
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// This is a regression test for something that only came up while
// attempting to bootstrap libsyntax; it is adapted from
// `syntax::ext::tt::generic_extension`.
pub struct E<'a> {
pub f: &'a u8,
}
impl<'b> E<'b> {
pub fn m(&self) -> &'b u8 { self.f }
}
pub struct P<'c> {
pub g: &'c u8,
}
pub trait M {
fn n(&self) -> u8;
}
impl<'d> M for P<'d> {
fn n(&self) -> u8 { *self.g }
}
fn extension<'e>(x: &'e E<'e>) -> Box<M+'e> {
loop {
let p = P { g: x.m() };
return Box::new(p) as Box<M+'e>;
}
}
fn main() {
let w = E { f: &10u8 };
let o = extension(&w);
assert_eq!(o.n(), 10u8);
}

View File

@ -0,0 +1,42 @@
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Uncovered during work on new scoping rules for safe destructors
// as an important use case to support properly.
pub struct E<'a> {
pub f: &'a u8,
}
impl<'b> E<'b> {
pub fn m(&self) -> &'b u8 { self.f }
}
pub struct P<'c> {
pub g: &'c u8,
}
pub trait M {
fn n(&self) -> u8;
}
impl<'d> M for P<'d> {
fn n(&self) -> u8 { *self.g }
}
fn extension<'e>(x: &'e E<'e>) -> Box<M+'e> {
loop {
let p = P { g: x.m() };
return Box::new(p) as Box<M+'e>;
}
}
fn main() {
let w = E { f: &10u8 };
let o = extension(&w);
assert_eq!(o.n(), 10u8);
}

View File

@ -0,0 +1,47 @@
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::cell::Cell;
#[derive(Show)]
struct C<'a> {
v: Vec<Cell<Option<&'a C<'a>>>>,
}
impl<'a> C<'a> {
fn new() -> C<'a> {
C { v: Vec::new() }
}
}
fn f() {
let (mut c1, mut c2, mut c3);
c1 = C::new();
c2 = C::new();
c3 = C::new();
c1.v.push(Cell::new(None));
c1.v.push(Cell::new(None));
c2.v.push(Cell::new(None));
c2.v.push(Cell::new(None));
c3.v.push(Cell::new(None));
c3.v.push(Cell::new(None));
c1.v[0].set(Some(&c2));
c1.v[1].set(Some(&c3));
c2.v[0].set(Some(&c2));
c2.v[1].set(Some(&c3));
c3.v[0].set(Some(&c1));
c3.v[1].set(Some(&c2));
}
fn main() {
f();
}

View File

@ -0,0 +1,58 @@
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::cell::Cell;
#[derive(Show)]
struct Refs<'a> {
v: Vec<Cell<Option<&'a C<'a>>>>,
}
#[derive(Show)]
struct C<'a> {
refs: Refs<'a>,
}
impl<'a> Refs<'a> {
fn new() -> Refs<'a> {
Refs { v: Vec::new() }
}
}
impl<'a> C<'a> {
fn new() -> C<'a> {
C { refs: Refs::new() }
}
}
fn f() {
let (mut c1, mut c2, mut c3);
c1 = C::new();
c2 = C::new();
c3 = C::new();
c1.refs.v.push(Cell::new(None));
c1.refs.v.push(Cell::new(None));
c2.refs.v.push(Cell::new(None));
c2.refs.v.push(Cell::new(None));
c3.refs.v.push(Cell::new(None));
c3.refs.v.push(Cell::new(None));
c1.refs.v[0].set(Some(&c2));
c1.refs.v[1].set(Some(&c3));
c2.refs.v[0].set(Some(&c2));
c2.refs.v[1].set(Some(&c3));
c3.refs.v[0].set(Some(&c1));
c3.refs.v[1].set(Some(&c2));
}
fn main() {
f();
}