Auto merge of #76838 - est31:dogfood_uninit_features, r=oli-obk

Dogfood new_uninit and maybe_uninit_slice in rustc_arena

Dogfoods a few cool `MaybeUninit` related features in the compiler's rustc_arena crate.

Split off from #76821

r? `@oli-obk`
This commit is contained in:
bors 2020-09-19 02:21:19 +00:00
commit a2c82df1f5
2 changed files with 28 additions and 25 deletions

View File

@ -13,12 +13,11 @@
)] )]
#![feature(core_intrinsics)] #![feature(core_intrinsics)]
#![feature(dropck_eyepatch)] #![feature(dropck_eyepatch)]
#![feature(raw_vec_internals)] #![feature(new_uninit)]
#![feature(maybe_uninit_slice)]
#![cfg_attr(test, feature(test))] #![cfg_attr(test, feature(test))]
#![allow(deprecated)] #![allow(deprecated)]
extern crate alloc;
use rustc_data_structures::cold_path; use rustc_data_structures::cold_path;
use smallvec::SmallVec; use smallvec::SmallVec;
@ -27,12 +26,10 @@ use std::cell::{Cell, RefCell};
use std::cmp; use std::cmp;
use std::intrinsics; use std::intrinsics;
use std::marker::{PhantomData, Send}; use std::marker::{PhantomData, Send};
use std::mem; use std::mem::{self, MaybeUninit};
use std::ptr; use std::ptr;
use std::slice; use std::slice;
use alloc::raw_vec::RawVec;
/// An arena that can hold objects of only one type. /// An arena that can hold objects of only one type.
pub struct TypedArena<T> { pub struct TypedArena<T> {
/// A pointer to the next object to be allocated. /// A pointer to the next object to be allocated.
@ -52,7 +49,7 @@ pub struct TypedArena<T> {
struct TypedArenaChunk<T> { struct TypedArenaChunk<T> {
/// The raw storage for the arena chunk. /// The raw storage for the arena chunk.
storage: RawVec<T>, storage: Box<[MaybeUninit<T>]>,
/// The number of valid entries in the chunk. /// The number of valid entries in the chunk.
entries: usize, entries: usize,
} }
@ -60,7 +57,7 @@ struct TypedArenaChunk<T> {
impl<T> TypedArenaChunk<T> { impl<T> TypedArenaChunk<T> {
#[inline] #[inline]
unsafe fn new(capacity: usize) -> TypedArenaChunk<T> { unsafe fn new(capacity: usize) -> TypedArenaChunk<T> {
TypedArenaChunk { storage: RawVec::with_capacity(capacity), entries: 0 } TypedArenaChunk { storage: Box::new_uninit_slice(capacity), entries: 0 }
} }
/// Destroys this arena chunk. /// Destroys this arena chunk.
@ -69,30 +66,25 @@ impl<T> TypedArenaChunk<T> {
// The branch on needs_drop() is an -O1 performance optimization. // The branch on needs_drop() is an -O1 performance optimization.
// Without the branch, dropping TypedArena<u8> takes linear time. // Without the branch, dropping TypedArena<u8> takes linear time.
if mem::needs_drop::<T>() { if mem::needs_drop::<T>() {
let mut start = self.start(); ptr::drop_in_place(MaybeUninit::slice_assume_init_mut(&mut self.storage[..len]));
// Destroy all allocated objects.
for _ in 0..len {
ptr::drop_in_place(start);
start = start.offset(1);
}
} }
} }
// Returns a pointer to the first allocated object. // Returns a pointer to the first allocated object.
#[inline] #[inline]
fn start(&self) -> *mut T { fn start(&mut self) -> *mut T {
self.storage.ptr() MaybeUninit::slice_as_mut_ptr(&mut self.storage)
} }
// Returns a pointer to the end of the allocated space. // Returns a pointer to the end of the allocated space.
#[inline] #[inline]
fn end(&self) -> *mut T { fn end(&mut self) -> *mut T {
unsafe { unsafe {
if mem::size_of::<T>() == 0 { if mem::size_of::<T>() == 0 {
// A pointer as large as possible for zero-sized elements. // A pointer as large as possible for zero-sized elements.
!0 as *mut T !0 as *mut T
} else { } else {
self.start().add(self.storage.capacity()) self.start().add(self.storage.len())
} }
} }
} }
@ -226,10 +218,10 @@ impl<T> TypedArena<T> {
let used_bytes = self.ptr.get() as usize - last_chunk.start() as usize; let used_bytes = self.ptr.get() as usize - last_chunk.start() as usize;
last_chunk.entries = used_bytes / mem::size_of::<T>(); last_chunk.entries = used_bytes / mem::size_of::<T>();
// If the previous chunk's capacity is less than HUGE_PAGE // If the previous chunk's len is less than HUGE_PAGE
// bytes, then this chunk will be least double the previous // bytes, then this chunk will be least double the previous
// chunk's size. // chunk's size.
new_cap = last_chunk.storage.capacity(); new_cap = last_chunk.storage.len();
if new_cap < HUGE_PAGE / elem_size { if new_cap < HUGE_PAGE / elem_size {
new_cap = new_cap.checked_mul(2).unwrap(); new_cap = new_cap.checked_mul(2).unwrap();
} }
@ -239,7 +231,7 @@ impl<T> TypedArena<T> {
// Also ensure that this chunk can fit `additional`. // Also ensure that this chunk can fit `additional`.
new_cap = cmp::max(additional, new_cap); new_cap = cmp::max(additional, new_cap);
let chunk = TypedArenaChunk::<T>::new(new_cap); let mut chunk = TypedArenaChunk::<T>::new(new_cap);
self.ptr.set(chunk.start()); self.ptr.set(chunk.start());
self.end.set(chunk.end()); self.end.set(chunk.end());
chunks.push(chunk); chunks.push(chunk);
@ -301,7 +293,7 @@ unsafe impl<#[may_dangle] T> Drop for TypedArena<T> {
chunk.destroy(chunk.entries); chunk.destroy(chunk.entries);
} }
} }
// RawVec handles deallocation of `last_chunk` and `self.chunks`. // Box handles deallocation of `last_chunk` and `self.chunks`.
} }
} }
} }
@ -344,10 +336,10 @@ impl DroplessArena {
// There is no need to update `last_chunk.entries` because that // There is no need to update `last_chunk.entries` because that
// field isn't used by `DroplessArena`. // field isn't used by `DroplessArena`.
// If the previous chunk's capacity is less than HUGE_PAGE // If the previous chunk's len is less than HUGE_PAGE
// bytes, then this chunk will be least double the previous // bytes, then this chunk will be least double the previous
// chunk's size. // chunk's size.
new_cap = last_chunk.storage.capacity(); new_cap = last_chunk.storage.len();
if new_cap < HUGE_PAGE { if new_cap < HUGE_PAGE {
new_cap = new_cap.checked_mul(2).unwrap(); new_cap = new_cap.checked_mul(2).unwrap();
} }
@ -357,7 +349,7 @@ impl DroplessArena {
// Also ensure that this chunk can fit `additional`. // Also ensure that this chunk can fit `additional`.
new_cap = cmp::max(additional, new_cap); new_cap = cmp::max(additional, new_cap);
let chunk = TypedArenaChunk::<u8>::new(new_cap); let mut chunk = TypedArenaChunk::<u8>::new(new_cap);
self.ptr.set(chunk.start()); self.ptr.set(chunk.start());
self.end.set(chunk.end()); self.end.set(chunk.end());
chunks.push(chunk); chunks.push(chunk);

View File

@ -121,6 +121,17 @@ pub fn bench_typed_arena_clear(b: &mut Bencher) {
}) })
} }
#[bench]
pub fn bench_typed_arena_clear_100(b: &mut Bencher) {
let mut arena = TypedArena::default();
b.iter(|| {
for _ in 0..100 {
arena.alloc(Point { x: 1, y: 2, z: 3 });
}
arena.clear();
})
}
// Drop tests // Drop tests
struct DropCounter<'a> { struct DropCounter<'a> {