add an align parameter to exchange_malloc

Closes #13094
This commit is contained in:
Daniel Micay 2014-04-25 21:24:51 -04:00
parent 1b1ca6d546
commit 03a5eb4b52
6 changed files with 101 additions and 15 deletions

View File

@ -33,6 +33,7 @@ use std::cmp;
use std::intrinsics::{TyDesc, get_tydesc};
use std::intrinsics;
use std::mem;
use std::mem::min_align_of;
use std::num;
use std::ptr::read;
use std::rc::Rc;
@ -204,7 +205,7 @@ impl Arena {
#[inline]
fn alloc_copy<'a, T>(&'a mut self, op: || -> T) -> &'a T {
unsafe {
let ptr = self.alloc_copy_inner(mem::size_of::<T>(), mem::min_align_of::<T>());
let ptr = self.alloc_copy_inner(mem::size_of::<T>(), min_align_of::<T>());
let ptr: *mut T = transmute(ptr);
mem::move_val_init(&mut (*ptr), op());
return transmute(ptr);
@ -261,7 +262,7 @@ impl Arena {
unsafe {
let tydesc = get_tydesc::<T>();
let (ty_ptr, ptr) =
self.alloc_noncopy_inner(mem::size_of::<T>(), mem::min_align_of::<T>());
self.alloc_noncopy_inner(mem::size_of::<T>(), min_align_of::<T>());
let ty_ptr: *mut uint = transmute(ty_ptr);
let ptr: *mut T = transmute(ptr);
// Write in our tydesc along with a bit indicating that it
@ -353,7 +354,29 @@ struct TypedArenaChunk<T> {
}
impl<T> TypedArenaChunk<T> {
#[cfg(stage0)]
#[inline]
fn new(next: Option<Box<TypedArenaChunk<T>>>, capacity: uint)
-> Box<TypedArenaChunk<T>> {
let mut size = mem::size_of::<TypedArenaChunk<T>>();
size = round_up(size, min_align_of::<T>());
let elem_size = mem::size_of::<T>();
let elems_size = elem_size.checked_mul(&capacity).unwrap();
size = size.checked_add(&elems_size).unwrap();
let mut chunk = unsafe {
let chunk = global_heap::exchange_malloc(size);
let mut chunk: Box<TypedArenaChunk<T>> = cast::transmute(chunk);
mem::move_val_init(&mut chunk.next, next);
chunk
};
chunk.capacity = capacity;
chunk
}
#[inline]
#[cfg(not(stage0))]
fn new(next: Option<Box<TypedArenaChunk<T>>>, capacity: uint)
-> Box<TypedArenaChunk<T>> {
let mut size = mem::size_of::<TypedArenaChunk<T>>();
@ -363,7 +386,7 @@ impl<T> TypedArenaChunk<T> {
size = size.checked_add(&elems_size).unwrap();
let mut chunk = unsafe {
let chunk = global_heap::exchange_malloc(size);
let chunk = global_heap::exchange_malloc(size, min_align_of::<TypedArenaChunk<T>>());
let mut chunk: Box<TypedArenaChunk<T>> = cast::transmute(chunk);
mem::move_val_init(&mut chunk.next, next);
chunk
@ -402,7 +425,7 @@ impl<T> TypedArenaChunk<T> {
fn start(&self) -> *u8 {
let this: *TypedArenaChunk<T> = self;
unsafe {
cast::transmute(round_up(this.offset(1) as uint, mem::min_align_of::<T>()))
cast::transmute(round_up(this.offset(1) as uint, min_align_of::<T>()))
}
}

View File

@ -29,13 +29,29 @@ use str::StrSlice;
#[allow(ctypes)]
extern {
fn malloc(size: uint) -> *u8;
fn free(ptr: *u8);
#[cfg(stage0)]
fn rust_malloc(size: uint) -> *u8;
#[cfg(not(stage0))]
fn rust_malloc(size: uint, align: uint) -> *u8;
fn rust_free(ptr: *u8);
}
#[cfg(stage0)]
unsafe fn alloc(cap: uint) -> *mut Vec<()> {
let cap = cap.checked_add(&mem::size_of::<Vec<()>>()).unwrap();
let ret = malloc(cap) as *mut Vec<()>;
let ret = rust_malloc(cap) as *mut Vec<()>;
if ret.is_null() {
intrinsics::abort();
}
(*ret).fill = 0;
(*ret).alloc = cap;
ret
}
#[cfg(not(stage0))]
unsafe fn alloc(cap: uint) -> *mut Vec<()> {
let cap = cap.checked_add(&mem::size_of::<Vec<()>>()).unwrap();
let ret = rust_malloc(cap, 8) as *mut Vec<()>;
if ret.is_null() {
intrinsics::abort();
}
@ -102,7 +118,7 @@ impl FromIterator<char> for ~str {
ptr::copy_nonoverlapping_memory(&mut (*ptr2).data,
&(*ptr).data,
len);
free(ptr as *u8);
rust_free(ptr as *u8);
cast::forget(ret);
ret = cast::transmute(ptr2);
ptr = ptr2;
@ -172,7 +188,7 @@ impl<A: Clone> Clone for ~[A] {
for j in range(0, *i as int) {
ptr::read(&*p.offset(j));
}
free(ret as *u8);
rust_free(ret as *u8);
});
cast::transmute(ret)
}

View File

@ -345,7 +345,8 @@ fn require_alloc_fn(bcx: &Block, info_ty: ty::t, it: LangItem) -> ast::DefId {
pub fn malloc_raw_dyn<'a>(bcx: &'a Block<'a>,
ptr_ty: ty::t,
size: ValueRef)
size: ValueRef,
align: ValueRef)
-> Result<'a> {
let _icx = push_ctxt("malloc_raw_exchange");
let ccx = bcx.ccx();
@ -353,7 +354,7 @@ pub fn malloc_raw_dyn<'a>(bcx: &'a Block<'a>,
// Allocate space:
let r = callee::trans_lang_call(bcx,
require_alloc_fn(bcx, ptr_ty, ExchangeMallocFnLangItem),
[size],
[size, align],
None);
let llty_ptr = type_of::type_of(ccx, ptr_ty);

View File

@ -67,7 +67,7 @@ use middle::typeck::MethodCall;
use util::common::indenter;
use util::ppaux::Repr;
use util::nodemap::NodeMap;
use middle::trans::machine::{llsize_of, llsize_of_alloc};
use middle::trans::machine::{llalign_of_min, llsize_of, llsize_of_alloc};
use middle::trans::type_::Type;
use syntax::ast;
@ -1170,10 +1170,11 @@ fn trans_uniq_expr<'a>(bcx: &'a Block<'a>,
let fcx = bcx.fcx;
let llty = type_of::type_of(bcx.ccx(), contents_ty);
let size = llsize_of(bcx.ccx(), llty);
let align = C_uint(bcx.ccx(), llalign_of_min(bcx.ccx(), llty) as uint);
// We need to a make a pointer type because box_ty is ty_bot
// if content_ty is, e.g. box fail!().
let real_box_ty = ty::mk_uniq(bcx.tcx(), contents_ty);
let Result { bcx, val } = malloc_raw_dyn(bcx, real_box_ty, size);
let Result { bcx, val } = malloc_raw_dyn(bcx, real_box_ty, size, align);
// Unique boxes do not allocate for zero-size types. The standard library
// may assume that `free` is never called on the pointer returned for
// `Box<ZeroSizeType>`.

View File

@ -278,7 +278,9 @@ pub fn trans_uniq_vstore<'a>(bcx: &'a Block<'a>,
let vecsize = Add(bcx, alloc, llsize_of(ccx, ccx.opaque_vec_type));
let Result { bcx: bcx, val: val } = malloc_raw_dyn(bcx, vec_ty, vecsize);
// ~[T] is not going to be changed to support alignment, since it's obsolete.
let align = C_uint(ccx, 8);
let Result { bcx: bcx, val: val } = malloc_raw_dyn(bcx, vec_ty, vecsize, align);
Store(bcx, fill, GEPi(bcx, val, [0u, abi::vec_elt_fill]));
Store(bcx, alloc, GEPi(bcx, val, [0u, abi::vec_elt_alloc]));

View File

@ -68,7 +68,7 @@ pub unsafe fn realloc_raw(ptr: *mut u8, size: uint) -> *mut u8 {
}
/// The allocator for unique pointers without contained managed pointers.
#[cfg(not(test))]
#[cfg(not(test), stage0)]
#[lang="exchange_malloc"]
#[inline]
pub unsafe fn exchange_malloc(size: uint) -> *mut u8 {
@ -85,6 +85,23 @@ pub unsafe fn exchange_malloc(size: uint) -> *mut u8 {
}
}
/// The allocator for unique pointers without contained managed pointers.
#[cfg(not(test), not(stage0))]
#[lang="exchange_malloc"]
#[inline]
pub unsafe fn exchange_malloc(size: uint, _align: uint) -> *mut u8 {
// The compiler never calls `exchange_free` on ~ZeroSizeType, so zero-size
// allocations can point to this `static`. It would be incorrect to use a null
// pointer, due to enums assuming types like unique pointers are never null.
static EMPTY: () = ();
if size == 0 {
&EMPTY as *() as *mut u8
} else {
malloc_raw(size)
}
}
// FIXME: #7496
#[cfg(not(test))]
#[lang="closure_exchange_malloc"]
@ -118,6 +135,32 @@ pub unsafe fn exchange_free(ptr: *u8) {
free(ptr as *mut c_void);
}
// hack for libcore
#[no_mangle]
#[doc(hidden)]
#[deprecated]
#[cfg(stage0)]
pub extern "C" fn rust_malloc(size: uint) -> *mut u8 {
unsafe { exchange_malloc(size) }
}
// hack for libcore
#[no_mangle]
#[doc(hidden)]
#[deprecated]
#[cfg(not(stage0))]
pub extern "C" fn rust_malloc(size: uint, align: uint) -> *mut u8 {
unsafe { exchange_malloc(size, align) }
}
// hack for libcore
#[no_mangle]
#[doc(hidden)]
#[deprecated]
pub extern "C" fn rust_free(ptr: *u8) {
unsafe { exchange_free(ptr) }
}
#[cfg(test)]
mod bench {
extern crate test;