Rollup merge of #73237 - tmiasko:arena, r=nnethercote

Check for overflow in DroplessArena and align returned memory

* Check for overflow when calculating the slice start & end position.
* Align the pointer obtained from the allocator, ensuring that it
  satisfies user requested alignment (the allocator is only asked for
  layout compatible with u8 slice).
* Remove an incorrect assertion from DroplessArena::align.
* Avoid forming references to an uninitialized memory in DroplessArena.

Helps with #73007, #72624.
This commit is contained in:
Dylan DPC 2020-06-16 15:08:35 +02:00 committed by GitHub
commit c65f39dac4
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 42 additions and 32 deletions

View File

@ -333,13 +333,6 @@ impl Default for DroplessArena {
}
impl DroplessArena {
#[inline]
fn align(&self, align: usize) {
let final_address = ((self.ptr.get() as usize) + align - 1) & !(align - 1);
self.ptr.set(final_address as *mut u8);
assert!(self.ptr <= self.end);
}
#[inline(never)]
#[cold]
fn grow(&self, additional: usize) {
@ -370,22 +363,42 @@ impl DroplessArena {
}
}
/// Allocates a byte slice with specified size and alignment from the
/// current memory chunk. Returns `None` if there is no free space left to
/// satisfy the request.
#[inline]
pub fn alloc_raw(&self, bytes: usize, align: usize) -> &mut [u8] {
unsafe {
assert!(bytes != 0);
fn alloc_raw_without_grow(&self, bytes: usize, align: usize) -> Option<*mut u8> {
let ptr = self.ptr.get() as usize;
let end = self.end.get() as usize;
// The allocation request fits into the current chunk iff:
//
// let aligned = align_to(ptr, align);
// ptr <= aligned && aligned + bytes <= end
//
// Except that we work with fixed width integers and need to be careful
// about potential overflow in the calcuation. If the overflow does
// happen, then we definitely don't have enough free and need to grow
// the arena.
let aligned = ptr.checked_add(align - 1)? & !(align - 1);
let new_ptr = aligned.checked_add(bytes)?;
if new_ptr <= end {
self.ptr.set(new_ptr as *mut u8);
Some(aligned as *mut u8)
} else {
None
}
}
self.align(align);
let future_end = intrinsics::arith_offset(self.ptr.get(), bytes as isize);
if (future_end as *mut u8) > self.end.get() {
self.grow(bytes);
#[inline]
pub fn alloc_raw(&self, bytes: usize, align: usize) -> *mut u8 {
assert!(bytes != 0);
loop {
if let Some(a) = self.alloc_raw_without_grow(bytes, align) {
break a;
}
let ptr = self.ptr.get();
// Set the pointer past ourselves
self.ptr.set(intrinsics::arith_offset(self.ptr.get(), bytes as isize) as *mut u8);
slice::from_raw_parts_mut(ptr, bytes)
// No free space left. Allocate a new chunk to satisfy the request.
// On failure the grow will panic or abort.
self.grow(bytes);
}
}
@ -393,7 +406,7 @@ impl DroplessArena {
pub fn alloc<T>(&self, object: T) -> &mut T {
assert!(!mem::needs_drop::<T>());
let mem = self.alloc_raw(mem::size_of::<T>(), mem::align_of::<T>()) as *mut _ as *mut T;
let mem = self.alloc_raw(mem::size_of::<T>(), mem::align_of::<T>()) as *mut T;
unsafe {
// Write into uninitialized memory.
@ -418,13 +431,11 @@ impl DroplessArena {
assert!(mem::size_of::<T>() != 0);
assert!(!slice.is_empty());
let mem = self.alloc_raw(slice.len() * mem::size_of::<T>(), mem::align_of::<T>()) as *mut _
as *mut T;
let mem = self.alloc_raw(slice.len() * mem::size_of::<T>(), mem::align_of::<T>()) as *mut T;
unsafe {
let arena_slice = slice::from_raw_parts_mut(mem, slice.len());
arena_slice.copy_from_slice(slice);
arena_slice
mem.copy_from_nonoverlapping(slice.as_ptr(), slice.len());
slice::from_raw_parts_mut(mem, slice.len())
}
}
@ -467,7 +478,7 @@ impl DroplessArena {
return &mut [];
}
let size = len.checked_mul(mem::size_of::<T>()).unwrap();
let mem = self.alloc_raw(size, mem::align_of::<T>()) as *mut _ as *mut T;
let mem = self.alloc_raw(size, mem::align_of::<T>()) as *mut T;
unsafe { self.write_from_iter(iter, len, mem) }
}
(_, _) => {
@ -482,7 +493,7 @@ impl DroplessArena {
let len = vec.len();
let start_ptr = self
.alloc_raw(len * mem::size_of::<T>(), mem::align_of::<T>())
as *mut _ as *mut T;
as *mut T;
vec.as_ptr().copy_to_nonoverlapping(start_ptr, len);
vec.set_len(0);
slice::from_raw_parts_mut(start_ptr, len)
@ -526,8 +537,7 @@ pub struct DropArena {
impl DropArena {
#[inline]
pub unsafe fn alloc<T>(&self, object: T) -> &mut T {
let mem =
self.arena.alloc_raw(mem::size_of::<T>(), mem::align_of::<T>()) as *mut _ as *mut T;
let mem = self.arena.alloc_raw(mem::size_of::<T>(), mem::align_of::<T>()) as *mut T;
// Write into uninitialized memory.
ptr::write(mem, object);
let result = &mut *mem;
@ -550,7 +560,7 @@ impl DropArena {
let start_ptr = self
.arena
.alloc_raw(len.checked_mul(mem::size_of::<T>()).unwrap(), mem::align_of::<T>())
as *mut _ as *mut T;
as *mut T;
let mut destructors = self.destructors.borrow_mut();
// Reserve space for the destructors so we can't panic while adding them

View File

@ -55,7 +55,7 @@ impl<T: Copy> List<T> {
.dropless
.alloc_raw(size, cmp::max(mem::align_of::<T>(), mem::align_of::<usize>()));
unsafe {
let result = &mut *(mem.as_mut_ptr() as *mut List<T>);
let result = &mut *(mem as *mut List<T>);
// Write the length
result.len = slice.len();