Added lifetime param to Arena.

It (1.) is invariant, (2.) must strictly outlive the arena itself,
(3.) constrains the inputs to the arena so that their borrows must
also strictly outlive the arena itself.

This implies that, for now, one can no longer have cross-references
between data allocated via the same `Arena` (even when the data is not
subject to the Drop Check rule).  Instead one must carry multiple
`Arena` instances, or (more commonly), use one or more `TypedArena`
instances with enums encoding the different variants of allocated
data.
This commit is contained in:
Felix S. Klock II 2015-02-02 14:10:36 +01:00
parent c1cda0793e
commit 2c9d81b2d4
1 changed files with 44 additions and 21 deletions

View File

@ -89,27 +89,29 @@ impl Chunk {
/// than objects without destructors. This reduces overhead when initializing
/// plain-old-data (`Copy` types) and means we don't need to waste time running
/// their destructors.
pub struct Arena {
pub struct Arena<'longer_than_self> {
// The head is separated out from the list as a unbenchmarked
// microoptimization, to avoid needing to case on the list to access the
// head.
head: RefCell<Chunk>,
copy_head: RefCell<Chunk>,
chunks: RefCell<Vec<Chunk>>,
_invariant: marker::InvariantLifetime<'longer_than_self>,
}
impl Arena {
impl<'a> Arena<'a> {
/// Allocates a new Arena with 32 bytes preallocated.
pub fn new() -> Arena {
pub fn new() -> Arena<'a> {
Arena::new_with_size(32)
}
/// Allocates a new Arena with `initial_size` bytes preallocated.
pub fn new_with_size(initial_size: usize) -> Arena {
pub fn new_with_size(initial_size: usize) -> Arena<'a> {
Arena {
head: RefCell::new(chunk(initial_size, false)),
copy_head: RefCell::new(chunk(initial_size, true)),
chunks: RefCell::new(Vec::new()),
_invariant: marker::InvariantLifetime,
}
}
}
@ -123,7 +125,7 @@ fn chunk(size: usize, is_copy: bool) -> Chunk {
}
#[unsafe_destructor]
impl Drop for Arena {
impl<'longer_than_self> Drop for Arena<'longer_than_self> {
fn drop(&mut self) {
unsafe {
destroy_chunk(&*self.head.borrow());
@ -181,7 +183,7 @@ fn un_bitpack_tydesc_ptr(p: usize) -> (*const TyDesc, bool) {
((p & !1) as *const TyDesc, p & 1 == 1)
}
impl Arena {
impl<'longer_than_self> Arena<'longer_than_self> {
fn chunk_size(&self) -> usize {
self.copy_head.borrow().capacity()
}
@ -294,7 +296,7 @@ impl Arena {
/// Allocates a new item in the arena, using `op` to initialize the value,
/// and returns a reference to it.
#[inline]
pub fn alloc<T, F>(&self, op: F) -> &mut T where F: FnOnce() -> T {
pub fn alloc<T:'longer_than_self, F>(&self, op: F) -> &mut T where F: FnOnce() -> T {
unsafe {
if intrinsics::needs_drop::<T>() {
self.alloc_noncopy(op)
@ -318,20 +320,6 @@ fn test_arena_destructors() {
}
}
#[test]
fn test_arena_alloc_nested() {
struct Inner { value: usize }
struct Outer<'a> { inner: &'a Inner }
let arena = Arena::new();
let result = arena.alloc(|| Outer {
inner: arena.alloc(|| Inner { value: 10 })
});
assert_eq!(result.inner.value, 10);
}
#[test]
#[should_fail]
fn test_arena_destructors_fail() {
@ -529,6 +517,41 @@ mod tests {
z: i32,
}
#[test]
fn test_arena_alloc_nested() {
struct Inner { value: u8 }
struct Outer<'a> { inner: &'a Inner }
enum EI<'e> { I(Inner), O(Outer<'e>) }
struct Wrap<'a>(TypedArena<EI<'a>>);
impl<'a> Wrap<'a> {
fn alloc_inner<F:Fn() -> Inner>(&self, f: F) -> &Inner {
let r: &EI = self.0.alloc(EI::I(f()));
if let &EI::I(ref i) = r {
i
} else {
panic!("mismatch");
}
}
fn alloc_outer<F:Fn() -> Outer<'a>>(&self, f: F) -> &Outer {
let r: &EI = self.0.alloc(EI::O(f()));
if let &EI::O(ref o) = r {
o
} else {
panic!("mismatch");
}
}
}
let arena = Wrap(TypedArena::new());
let result = arena.alloc_outer(|| Outer {
inner: arena.alloc_inner(|| Inner { value: 10 }) });
assert_eq!(result.inner.value, 10);
}
#[test]
pub fn test_copy() {
let arena = TypedArena::new();