Support allocating iterators with arenas
This commit is contained in:
parent
9ebf47851a
commit
30e7e9c5f0
|
@ -54,6 +54,7 @@ name = "arena"
|
||||||
version = "0.0.0"
|
version = "0.0.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"rustc_data_structures 0.0.0",
|
"rustc_data_structures 0.0.0",
|
||||||
|
"smallvec 0.6.7 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
|
|
@ -11,3 +11,4 @@ crate-type = ["dylib"]
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
rustc_data_structures = { path = "../librustc_data_structures" }
|
rustc_data_structures = { path = "../librustc_data_structures" }
|
||||||
|
smallvec = { version = "0.6.7", features = ["union", "may_dangle"] }
|
||||||
|
|
|
@ -23,7 +23,9 @@
|
||||||
|
|
||||||
extern crate alloc;
|
extern crate alloc;
|
||||||
|
|
||||||
|
use rustc_data_structures::cold_path;
|
||||||
use rustc_data_structures::sync::MTLock;
|
use rustc_data_structures::sync::MTLock;
|
||||||
|
use smallvec::SmallVec;
|
||||||
|
|
||||||
use std::cell::{Cell, RefCell};
|
use std::cell::{Cell, RefCell};
|
||||||
use std::cmp;
|
use std::cmp;
|
||||||
|
@ -55,6 +57,8 @@ pub struct TypedArena<T> {
|
||||||
struct TypedArenaChunk<T> {
|
struct TypedArenaChunk<T> {
|
||||||
/// The raw storage for the arena chunk.
|
/// The raw storage for the arena chunk.
|
||||||
storage: RawVec<T>,
|
storage: RawVec<T>,
|
||||||
|
/// The number of valid entries in the chunk.
|
||||||
|
entries: usize,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T> TypedArenaChunk<T> {
|
impl<T> TypedArenaChunk<T> {
|
||||||
|
@ -62,6 +66,7 @@ impl<T> TypedArenaChunk<T> {
|
||||||
unsafe fn new(capacity: usize) -> TypedArenaChunk<T> {
|
unsafe fn new(capacity: usize) -> TypedArenaChunk<T> {
|
||||||
TypedArenaChunk {
|
TypedArenaChunk {
|
||||||
storage: RawVec::with_capacity(capacity),
|
storage: RawVec::with_capacity(capacity),
|
||||||
|
entries: 0,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -149,6 +154,27 @@ impl<T> TypedArena<T> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn can_allocate(&self, len: usize) -> bool {
|
||||||
|
let available_capacity_bytes = self.end.get() as usize - self.ptr.get() as usize;
|
||||||
|
let at_least_bytes = len.checked_mul(mem::size_of::<T>()).unwrap();
|
||||||
|
available_capacity_bytes >= at_least_bytes
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
unsafe fn alloc_raw_slice(&self, len: usize) -> *mut T {
|
||||||
|
assert!(mem::size_of::<T>() != 0);
|
||||||
|
assert!(len != 0);
|
||||||
|
|
||||||
|
if !self.can_allocate(len) {
|
||||||
|
self.grow(len);
|
||||||
|
}
|
||||||
|
|
||||||
|
let start_ptr = self.ptr.get();
|
||||||
|
self.ptr.set(start_ptr.add(len));
|
||||||
|
start_ptr
|
||||||
|
}
|
||||||
|
|
||||||
/// Allocates a slice of objects that are copied into the `TypedArena`, returning a mutable
|
/// Allocates a slice of objects that are copied into the `TypedArena`, returning a mutable
|
||||||
/// reference to it. Will panic if passed a zero-sized types.
|
/// reference to it. Will panic if passed a zero-sized types.
|
||||||
///
|
///
|
||||||
|
@ -161,21 +187,63 @@ impl<T> TypedArena<T> {
|
||||||
where
|
where
|
||||||
T: Copy,
|
T: Copy,
|
||||||
{
|
{
|
||||||
assert!(mem::size_of::<T>() != 0);
|
|
||||||
assert!(slice.len() != 0);
|
|
||||||
|
|
||||||
let available_capacity_bytes = self.end.get() as usize - self.ptr.get() as usize;
|
|
||||||
let at_least_bytes = slice.len() * mem::size_of::<T>();
|
|
||||||
if available_capacity_bytes < at_least_bytes {
|
|
||||||
self.grow(slice.len());
|
|
||||||
}
|
|
||||||
|
|
||||||
unsafe {
|
unsafe {
|
||||||
let start_ptr = self.ptr.get();
|
let len = slice.len();
|
||||||
let arena_slice = slice::from_raw_parts_mut(start_ptr, slice.len());
|
let start_ptr = self.alloc_raw_slice(len);
|
||||||
self.ptr.set(start_ptr.add(arena_slice.len()));
|
slice.as_ptr().copy_to_nonoverlapping(start_ptr, len);
|
||||||
arena_slice.copy_from_slice(slice);
|
slice::from_raw_parts_mut(start_ptr, len)
|
||||||
arena_slice
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
pub fn alloc_from_iter<I: IntoIterator<Item = T>>(&self, iter: I) -> &mut [T] {
|
||||||
|
assert!(mem::size_of::<T>() != 0);
|
||||||
|
let mut iter = iter.into_iter();
|
||||||
|
let size_hint = iter.size_hint();
|
||||||
|
|
||||||
|
match size_hint {
|
||||||
|
(min, Some(max)) if min == max => {
|
||||||
|
if min == 0 {
|
||||||
|
return &mut [];
|
||||||
|
}
|
||||||
|
|
||||||
|
if !self.can_allocate(min) {
|
||||||
|
self.grow(min);
|
||||||
|
}
|
||||||
|
|
||||||
|
let slice = self.ptr.get();
|
||||||
|
|
||||||
|
unsafe {
|
||||||
|
let mut ptr = self.ptr.get();
|
||||||
|
for _ in 0..min {
|
||||||
|
// Write into uninitialized memory.
|
||||||
|
ptr::write(ptr, iter.next().unwrap());
|
||||||
|
// Advance the pointer.
|
||||||
|
ptr = ptr.offset(1);
|
||||||
|
// Update the pointer per iteration so if `iter.next()` panics
|
||||||
|
// we destroy the correct amount
|
||||||
|
self.ptr.set(ptr);
|
||||||
|
}
|
||||||
|
slice::from_raw_parts_mut(slice, min)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
cold_path(move || -> &mut [T] {
|
||||||
|
let mut vec: SmallVec<[_; 8]> = iter.collect();
|
||||||
|
if vec.is_empty() {
|
||||||
|
return &mut [];
|
||||||
|
}
|
||||||
|
// Move the content to the arena by copying it and then forgetting
|
||||||
|
// the content of the SmallVec
|
||||||
|
unsafe {
|
||||||
|
let len = vec.len();
|
||||||
|
let start_ptr = self.alloc_raw_slice(len);
|
||||||
|
vec.as_ptr().copy_to_nonoverlapping(start_ptr, len);
|
||||||
|
mem::forget(vec.drain());
|
||||||
|
slice::from_raw_parts_mut(start_ptr, len)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -189,6 +257,7 @@ impl<T> TypedArena<T> {
|
||||||
if let Some(last_chunk) = chunks.last_mut() {
|
if let Some(last_chunk) = chunks.last_mut() {
|
||||||
let used_bytes = self.ptr.get() as usize - last_chunk.start() as usize;
|
let used_bytes = self.ptr.get() as usize - last_chunk.start() as usize;
|
||||||
let currently_used_cap = used_bytes / mem::size_of::<T>();
|
let currently_used_cap = used_bytes / mem::size_of::<T>();
|
||||||
|
last_chunk.entries = currently_used_cap;
|
||||||
if last_chunk.storage.reserve_in_place(currently_used_cap, n) {
|
if last_chunk.storage.reserve_in_place(currently_used_cap, n) {
|
||||||
self.end.set(last_chunk.end());
|
self.end.set(last_chunk.end());
|
||||||
return;
|
return;
|
||||||
|
@ -222,8 +291,7 @@ impl<T> TypedArena<T> {
|
||||||
let len = chunks_borrow.len();
|
let len = chunks_borrow.len();
|
||||||
// If `T` is ZST, code below has no effect.
|
// If `T` is ZST, code below has no effect.
|
||||||
for mut chunk in chunks_borrow.drain(..len-1) {
|
for mut chunk in chunks_borrow.drain(..len-1) {
|
||||||
let cap = chunk.storage.cap();
|
chunk.destroy(chunk.entries);
|
||||||
chunk.destroy(cap);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -265,8 +333,7 @@ unsafe impl<#[may_dangle] T> Drop for TypedArena<T> {
|
||||||
self.clear_last_chunk(&mut last_chunk);
|
self.clear_last_chunk(&mut last_chunk);
|
||||||
// The last chunk will be dropped. Destroy all other chunks.
|
// The last chunk will be dropped. Destroy all other chunks.
|
||||||
for chunk in chunks_borrow.iter_mut() {
|
for chunk in chunks_borrow.iter_mut() {
|
||||||
let cap = chunk.storage.cap();
|
chunk.destroy(chunk.entries);
|
||||||
chunk.destroy(cap);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// RawVec handles deallocation of `last_chunk` and `self.chunks`.
|
// RawVec handles deallocation of `last_chunk` and `self.chunks`.
|
||||||
|
@ -410,6 +477,51 @@ impl DroplessArena {
|
||||||
arena_slice
|
arena_slice
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
pub fn alloc_from_iter<T, I: IntoIterator<Item = T>>(&self, iter: I) -> &mut [T] {
|
||||||
|
let mut iter = iter.into_iter();
|
||||||
|
assert!(mem::size_of::<T>() != 0);
|
||||||
|
assert!(!mem::needs_drop::<T>());
|
||||||
|
|
||||||
|
let size_hint = iter.size_hint();
|
||||||
|
|
||||||
|
match size_hint {
|
||||||
|
(min, Some(max)) if min == max => {
|
||||||
|
if min == 0 {
|
||||||
|
return &mut []
|
||||||
|
}
|
||||||
|
let size = min.checked_mul(mem::size_of::<T>()).unwrap();
|
||||||
|
let mem = self.alloc_raw(size, mem::align_of::<T>()) as *mut _ as *mut T;
|
||||||
|
unsafe {
|
||||||
|
for i in 0..min {
|
||||||
|
ptr::write(mem.offset(i as isize), iter.next().unwrap())
|
||||||
|
}
|
||||||
|
slice::from_raw_parts_mut(mem, min)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
(_, _) => {
|
||||||
|
cold_path(move || -> &mut [T] {
|
||||||
|
let mut vec: SmallVec<[_; 8]> = iter.collect();
|
||||||
|
if vec.is_empty() {
|
||||||
|
return &mut [];
|
||||||
|
}
|
||||||
|
// Move the content to the arena by copying it and then forgetting
|
||||||
|
// the content of the SmallVec
|
||||||
|
unsafe {
|
||||||
|
let len = vec.len();
|
||||||
|
let start_ptr = self.alloc_raw(
|
||||||
|
len * mem::size_of::<T>(),
|
||||||
|
mem::align_of::<T>()
|
||||||
|
) as *mut _ as *mut T;
|
||||||
|
vec.as_ptr().copy_to_nonoverlapping(start_ptr, len);
|
||||||
|
mem::forget(vec.drain());
|
||||||
|
slice::from_raw_parts_mut(start_ptr, len)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Default)]
|
#[derive(Default)]
|
||||||
|
|
Loading…
Reference in New Issue