Specialize Vec::from_elem<u8> to use calloc or memset

Fixes #38723.
This commit is contained in:
Matt Brubeck 2017-03-09 17:53:01 -08:00
parent 5637ed7566
commit 675475c4d3
7 changed files with 144 additions and 7 deletions

View File

@ -51,6 +51,11 @@ pub extern fn __rust_allocate(size: usize, _align: usize) -> *mut u8 {
unsafe { libc::malloc(size as libc::size_t) as *mut u8 }
}
#[no_mangle]
pub extern fn __rust_allocate_zeroed(size: usize, _align: usize) -> *mut u8 {
unsafe { libc::calloc(size as libc::size_t, 1) as *mut u8 }
}
#[no_mangle]
pub extern fn __rust_deallocate(ptr: *mut u8, _old_size: usize, _align: usize) {
unsafe { libc::free(ptr as *mut libc::c_void) }

View File

@ -23,6 +23,7 @@ use core::intrinsics::{min_align_of_val, size_of_val};
extern "C" {
#[allocator]
fn __rust_allocate(size: usize, align: usize) -> *mut u8;
fn __rust_allocate_zeroed(size: usize, align: usize) -> *mut u8;
fn __rust_deallocate(ptr: *mut u8, old_size: usize, align: usize);
fn __rust_reallocate(ptr: *mut u8, old_size: usize, size: usize, align: usize) -> *mut u8;
fn __rust_reallocate_inplace(ptr: *mut u8,
@ -59,6 +60,20 @@ pub unsafe fn allocate(size: usize, align: usize) -> *mut u8 {
__rust_allocate(size, align)
}
/// Return a pointer to `size` bytes of memory aligned to `align` and
/// initialized to zeroes.
///
/// On failure, return a null pointer.
///
/// Behavior is undefined if the requested size is 0 or the alignment is not a
/// power of 2. The alignment must be no larger than the largest supported page
/// size on the platform.
#[inline]
pub unsafe fn allocate_zeroed(size: usize, align: usize) -> *mut u8 {
check_size_and_alignment(size, align);
__rust_allocate_zeroed(size, align)
}
/// Resize the allocation referenced by `ptr` to `size` bytes.
///
/// On failure, return a null pointer and leave the original allocation intact.
@ -162,6 +177,25 @@ mod tests {
use boxed::Box;
use heap;
#[test]
fn allocate_zeroed() {
unsafe {
let size = 1024;
let ptr = heap::allocate_zeroed(size, 1);
if ptr.is_null() {
::oom()
}
let end = ptr.offset(size as isize);
let mut i = ptr;
while i < end {
assert_eq!(*i, 0);
i = i.offset(1);
}
heap::deallocate(ptr, size, 1);
}
}
#[test]
fn basic_reallocate_inplace_noop() {
unsafe {

View File

@ -81,7 +81,18 @@ impl<T> RawVec<T> {
/// # Aborts
///
/// Aborts on OOM
#[inline]
pub fn with_capacity(cap: usize) -> Self {
RawVec::allocate(cap, false)
}
/// Like `with_capacity` but guarantees the buffer is zeroed.
#[inline]
pub fn with_capacity_zeroed(cap: usize) -> Self {
RawVec::allocate(cap, true)
}
fn allocate(cap: usize, zeroed: bool) -> Self {
unsafe {
let elem_size = mem::size_of::<T>();
@ -93,7 +104,11 @@ impl<T> RawVec<T> {
heap::EMPTY as *mut u8
} else {
let align = mem::align_of::<T>();
let ptr = heap::allocate(alloc_size, align);
let ptr = if zeroed {
heap::allocate_zeroed(alloc_size, align)
} else {
heap::allocate(alloc_size, align)
};
if ptr.is_null() {
oom()
}

View File

@ -38,6 +38,10 @@ mod imp {
target_os = "dragonfly", target_os = "windows", target_env = "musl"),
link_name = "je_mallocx")]
fn mallocx(size: size_t, flags: c_int) -> *mut c_void;
#[cfg_attr(any(target_os = "macos", target_os = "android", target_os = "ios",
target_os = "dragonfly", target_os = "windows", target_env = "musl"),
link_name = "je_calloc")]
fn calloc(size: size_t, flags: c_int) -> *mut c_void;
#[cfg_attr(any(target_os = "macos", target_os = "android", target_os = "ios",
target_os = "dragonfly", target_os = "windows", target_env = "musl"),
link_name = "je_rallocx")]
@ -56,6 +60,8 @@ mod imp {
fn nallocx(size: size_t, flags: c_int) -> size_t;
}
const MALLOCX_ZERO: c_int = 0x40;
// The minimum alignment guaranteed by the architecture. This value is used to
// add fast paths for low alignment values. In practice, the alignment is a
// constant at the call site and the branch will be optimized out.
@ -91,6 +97,16 @@ mod imp {
unsafe { mallocx(size as size_t, flags) as *mut u8 }
}
#[no_mangle]
pub extern "C" fn __rust_allocate_zeroed(size: usize, align: usize) -> *mut u8 {
if align <= MIN_ALIGN {
unsafe { calloc(size as size_t, 1) as *mut u8 }
} else {
let flags = align_to_flags(align) | MALLOCX_ZERO;
unsafe { mallocx(size as size_t, flags) as *mut u8 }
}
}
#[no_mangle]
pub extern "C" fn __rust_reallocate(ptr: *mut u8,
_old_size: usize,
@ -135,6 +151,11 @@ mod imp {
bogus()
}
#[no_mangle]
pub extern "C" fn __rust_allocate_zeroed(_size: usize, _align: usize) -> *mut u8 {
bogus()
}
#[no_mangle]
pub extern "C" fn __rust_reallocate(_ptr: *mut u8,
_old_size: usize,

View File

@ -44,6 +44,11 @@ pub extern "C" fn __rust_allocate(size: usize, align: usize) -> *mut u8 {
unsafe { imp::allocate(size, align) }
}
#[no_mangle]
pub extern "C" fn __rust_allocate_zeroed(size: usize, align: usize) -> *mut u8 {
unsafe { imp::allocate_zeroed(size, align) }
}
#[no_mangle]
pub extern "C" fn __rust_deallocate(ptr: *mut u8, old_size: usize, align: usize) {
unsafe { imp::deallocate(ptr, old_size, align) }
@ -121,6 +126,18 @@ mod imp {
}
}
pub unsafe fn allocate_zeroed(size: usize, align: usize) -> *mut u8 {
if align <= MIN_ALIGN {
libc::calloc(size as libc::size_t, 1) as *mut u8
} else {
let ptr = aligned_malloc(size, align);
if !ptr.is_null() {
ptr::write_bytes(ptr, 0, size);
}
ptr
}
}
pub unsafe fn reallocate(ptr: *mut u8, old_size: usize, size: usize, align: usize) -> *mut u8 {
if align <= MIN_ALIGN {
libc::realloc(ptr as *mut libc::c_void, size as libc::size_t) as *mut u8
@ -173,6 +190,8 @@ mod imp {
#[repr(C)]
struct Header(*mut u8);
const HEAP_ZERO_MEMORY: DWORD = 0x00000008;
const HEAP_REALLOC_IN_PLACE_ONLY: DWORD = 0x00000010;
unsafe fn get_header<'a>(ptr: *mut u8) -> &'a mut Header {
@ -185,11 +204,12 @@ mod imp {
aligned
}
pub unsafe fn allocate(size: usize, align: usize) -> *mut u8 {
#[inline]
unsafe fn allocate_with_flags(size: usize, align: usize, flags: DWORD) -> *mut u8 {
if align <= MIN_ALIGN {
HeapAlloc(GetProcessHeap(), 0, size as SIZE_T) as *mut u8
HeapAlloc(GetProcessHeap(), flags, size as SIZE_T) as *mut u8
} else {
let ptr = HeapAlloc(GetProcessHeap(), 0, (size + align) as SIZE_T) as *mut u8;
let ptr = HeapAlloc(GetProcessHeap(), flags, (size + align) as SIZE_T) as *mut u8;
if ptr.is_null() {
return ptr;
}
@ -197,6 +217,14 @@ mod imp {
}
}
pub unsafe fn allocate(size: usize, align: usize) -> *mut u8 {
allocate_with_flags(size, align, 0)
}
pub unsafe fn allocate_zeroed(size: usize, align: usize) -> *mut u8 {
allocate_with_flags(size, align, HEAP_ZERO_MEMORY)
}
pub unsafe fn reallocate(ptr: *mut u8, _old_size: usize, size: usize, align: usize) -> *mut u8 {
if align <= MIN_ALIGN {
HeapReAlloc(GetProcessHeap(), 0, ptr as LPVOID, size as SIZE_T) as *mut u8

View File

@ -1370,9 +1370,38 @@ impl<T: PartialEq> Vec<T> {
#[doc(hidden)]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn from_elem<T: Clone>(elem: T, n: usize) -> Vec<T> {
let mut v = Vec::with_capacity(n);
v.extend_with_element(n, elem);
v
<T as SpecFromElem>::from_elem(elem, n)
}
// Specialization trait used for Vec::from_elem
trait SpecFromElem: Sized {
fn from_elem(elem: Self, n: usize) -> Vec<Self>;
}
impl<T: Clone> SpecFromElem for T {
default fn from_elem(elem: Self, n: usize) -> Vec<Self> {
let mut v = Vec::with_capacity(n);
v.extend_with_element(n, elem);
v
}
}
impl SpecFromElem for u8 {
#[inline]
fn from_elem(elem: u8, n: usize) -> Vec<u8> {
if elem == 0 {
return Vec {
buf: RawVec::with_capacity_zeroed(n),
len: n,
}
}
unsafe {
let mut v = Vec::with_capacity(n);
ptr::write_bytes(v.as_mut_ptr(), elem, n);
v.set_len(n);
v
}
}
}
////////////////////////////////////////////////////////////////////////////////

View File

@ -27,6 +27,11 @@ pub extern fn __rust_allocate(size: usize, align: usize) -> *mut u8 {
}
}
#[no_mangle]
pub extern fn __rust_allocate_zeroed(size: usize, _align: usize) -> *mut u8 {
unsafe { libc::calloc(size as libc::size_t, 1) as *mut u8 }
}
#[no_mangle]
pub extern fn __rust_deallocate(ptr: *mut u8, old_size: usize, align: usize) {
unsafe {