remove type descriptors from proc and @T

This also drops support for the managed pointer POISON_ON_FREE feature
as it's not worth adding back the support for it. After a snapshot, the
leftovers can be removed.
This commit is contained in:
Daniel Micay 2014-02-05 23:05:30 -05:00
parent 56565eb129
commit 940d1ae2f3
10 changed files with 171 additions and 42 deletions

View File

@ -352,7 +352,6 @@ pub fn malloc_raw_dyn<'a>(
if heap == heap_exchange {
let llty_value = type_of::type_of(ccx, t);
// Allocate space:
let r = callee::trans_lang_call(
bcx,
@ -375,17 +374,18 @@ pub fn malloc_raw_dyn<'a>(
// Grab the TypeRef type of box_ptr_ty.
let box_ptr_ty = ty::mk_box(bcx.tcx(), t);
let llty = type_of(ccx, box_ptr_ty);
let llalign = C_uint(ccx, llalign_of_min(ccx, llty) as uint);
// Get the tydesc for the body:
let static_ti = get_tydesc(ccx, t);
glue::lazily_emit_tydesc_glue(ccx, abi::tydesc_field_drop_glue, static_ti);
// Allocate space:
let tydesc = PointerCast(bcx, static_ti.tydesc, Type::i8p());
let drop_glue = static_ti.drop_glue.get().unwrap();
let r = callee::trans_lang_call(
bcx,
langcall,
[tydesc, size],
[PointerCast(bcx, drop_glue, Type::glue_fn(Type::i8p()).ptr_to()), size, llalign],
None);
rslt(r.bcx, PointerCast(r.bcx, r.val, llty))
}

View File

@ -1779,7 +1779,7 @@ fn boxed_type_metadata(cx: &CrateContext,
offset: ComputedMemberOffset,
},
MemberDescription {
name: ~"tydesc",
name: ~"drop_glue",
llvm_type: member_llvm_types[1],
type_metadata: nil_pointer_type_metadata,
offset: ComputedMemberOffset,
@ -1824,7 +1824,7 @@ fn boxed_type_metadata(cx: &CrateContext,
-> bool {
member_llvm_types.len() == 5 &&
member_llvm_types[0] == cx.int_type &&
member_llvm_types[1] == cx.tydesc_type.ptr_to() &&
member_llvm_types[1] == Type::generic_glue_fn(cx).ptr_to() &&
member_llvm_types[2] == Type::i8().ptr_to() &&
member_llvm_types[3] == Type::i8().ptr_to() &&
member_llvm_types[4] == content_llvm_type

View File

@ -355,9 +355,9 @@ fn make_drop_glue<'a>(bcx: &'a Block<'a>, v0: ValueRef, t: ty::t) -> &'a Block<'
let lluniquevalue = GEPi(bcx, v0, [0, abi::trt_field_box]);
// Only drop the value when it is non-null
with_cond(bcx, IsNotNull(bcx, Load(bcx, lluniquevalue)), |bcx| {
let lldtor_ptr = Load(bcx, GEPi(bcx, v0, [0, abi::trt_field_vtable]));
let lldtor = Load(bcx, lldtor_ptr);
Call(bcx, lldtor, [PointerCast(bcx, lluniquevalue, Type::i8p())], []);
let dtor_ptr = Load(bcx, GEPi(bcx, v0, [0, abi::trt_field_vtable]));
let dtor = Load(bcx, dtor_ptr);
Call(bcx, dtor, [PointerCast(bcx, lluniquevalue, Type::i8p())], []);
bcx
})
}
@ -367,18 +367,12 @@ fn make_drop_glue<'a>(bcx: &'a Block<'a>, v0: ValueRef, t: ty::t) -> &'a Block<'
let env_ptr_ty = Type::at_box(ccx, Type::i8()).ptr_to();
let env = PointerCast(bcx, env, env_ptr_ty);
with_cond(bcx, IsNotNull(bcx, env), |bcx| {
// Load the type descr found in the env
let lltydescty = ccx.tydesc_type.ptr_to();
let tydescptr = GEPi(bcx, env, [0u, abi::box_field_tydesc]);
let tydesc = Load(bcx, tydescptr);
let tydesc = PointerCast(bcx, tydesc, lltydescty);
// Drop the tuple data then free the descriptor
let dtor_ptr = GEPi(bcx, env, [0u, abi::box_field_tydesc]);
let dtor = Load(bcx, dtor_ptr);
let cdata = GEPi(bcx, env, [0u, abi::box_field_body]);
call_tydesc_glue_full(bcx, cdata, tydesc,
abi::tydesc_field_drop_glue, None);
Call(bcx, dtor, [PointerCast(bcx, cdata, Type::i8p())], []);
// Free the ty descr (if necc) and the env itself
// Free the environment itself
trans_exchange_free(bcx, env)
})
}

View File

@ -238,7 +238,7 @@ impl Type {
// The box pointed to by @T.
pub fn at_box(ctx: &CrateContext, ty: Type) -> Type {
Type::struct_([
ctx.int_type, ctx.tydesc_type.ptr_to(),
ctx.int_type, Type::glue_fn(Type::i8p()).ptr_to(),
Type::i8p(), Type::i8p(), ty
], false)
}

View File

@ -11,11 +11,8 @@
#[doc(hidden)];
use ptr;
use unstable::intrinsics::TyDesc;
use unstable::raw;
type DropGlue<'a> = 'a |**TyDesc, *u8|;
static RC_IMMORTAL : uint = 0x77777777;
/*
@ -24,11 +21,6 @@ static RC_IMMORTAL : uint = 0x77777777;
* This runs at task death to free all boxes.
*/
struct AnnihilateStats {
n_total_boxes: uint,
n_bytes_freed: uint
}
unsafe fn each_live_alloc(read_next_before: bool,
f: |alloc: *mut raw::Box<()>| -> bool)
-> bool {
@ -65,21 +57,18 @@ fn debug_mem() -> bool {
}
/// Destroys all managed memory (i.e. @ boxes) held by the current task.
#[cfg(stage0)]
pub unsafe fn annihilate() {
use rt::local_heap::local_free;
use mem;
let mut stats = AnnihilateStats {
n_total_boxes: 0,
n_bytes_freed: 0
};
let mut n_total_boxes = 0u;
// Pass 1: Make all boxes immortal.
//
// In this pass, nothing gets freed, so it does not matter whether
// we read the next field before or after the callback.
each_live_alloc(true, |alloc| {
stats.n_total_boxes += 1;
n_total_boxes += 1;
(*alloc).ref_count = RC_IMMORTAL;
true
});
@ -103,18 +92,58 @@ pub unsafe fn annihilate() {
// left), so we must read the `next` field before, since it will
// not be valid after.
each_live_alloc(true, |alloc| {
stats.n_bytes_freed +=
(*((*alloc).type_desc)).size
+ mem::size_of::<raw::Box<()>>();
local_free(alloc as *u8);
true
});
if debug_mem() {
// We do logging here w/o allocation.
debug!("annihilator stats:\n \
total boxes: {}\n \
bytes freed: {}",
stats.n_total_boxes, stats.n_bytes_freed);
debug!("total boxes annihilated: {}", n_total_boxes);
}
}
/// Destroys all managed memory (i.e. @ boxes) held by the current task.
#[cfg(not(stage0))]
pub unsafe fn annihilate() {
use rt::local_heap::local_free;
let mut n_total_boxes = 0u;
// Pass 1: Make all boxes immortal.
//
// In this pass, nothing gets freed, so it does not matter whether
// we read the next field before or after the callback.
each_live_alloc(true, |alloc| {
n_total_boxes += 1;
(*alloc).ref_count = RC_IMMORTAL;
true
});
// Pass 2: Drop all boxes.
//
// In this pass, unique-managed boxes may get freed, but not
// managed boxes, so we must read the `next` field *after* the
// callback, as the original value may have been freed.
each_live_alloc(false, |alloc| {
let drop_glue = (*alloc).drop_glue;
let data = &mut (*alloc).data as *mut ();
drop_glue(data as *mut u8);
true
});
// Pass 3: Free all boxes.
//
// In this pass, managed boxes may get freed (but not
// unique-managed boxes, though I think that none of those are
// left), so we must read the `next` field before, since it will
// not be valid after.
each_live_alloc(true, |alloc| {
local_free(alloc as *u8);
true
});
if debug_mem() {
// We do logging here w/o allocation.
debug!("total boxes annihilated: {}", n_total_boxes);
}
}

View File

@ -10,6 +10,8 @@
//! Runtime environment settings
// NOTE: remove `POISON_ON_FREE` after a snapshot
use from_str::from_str;
use option::{Some, None};
use os;

View File

@ -10,7 +10,9 @@
use libc::{c_void, size_t, free, malloc, realloc};
use ptr::{RawPtr, mut_null};
use unstable::intrinsics::{TyDesc, abort};
#[cfg(stage0)]
use unstable::intrinsics::TyDesc;
use unstable::intrinsics::abort;
use unstable::raw;
use mem::size_of;
@ -73,14 +75,23 @@ pub unsafe fn exchange_malloc(size: uint) -> *u8 {
}
// FIXME: #7496
#[cfg(not(test))]
#[cfg(not(test), stage0)]
#[lang="closure_exchange_malloc"]
#[inline]
pub unsafe fn closure_exchange_malloc_(td: *u8, size: uint) -> *u8 {
closure_exchange_malloc(td, size)
}
// FIXME: #7496
#[cfg(not(test), not(stage0))]
#[lang="closure_exchange_malloc"]
#[inline]
pub unsafe fn closure_exchange_malloc_(drop_glue: fn(*mut u8), size: uint, align: uint) -> *u8 {
closure_exchange_malloc(drop_glue, size, align)
}
#[inline]
#[cfg(stage0)]
pub unsafe fn closure_exchange_malloc(td: *u8, size: uint) -> *u8 {
let td = td as *TyDesc;
let size = size;
@ -96,6 +107,18 @@ pub unsafe fn closure_exchange_malloc(td: *u8, size: uint) -> *u8 {
alloc as *u8
}
#[inline]
#[cfg(not(stage0))]
pub unsafe fn closure_exchange_malloc(drop_glue: fn(*mut u8), size: uint, align: uint) -> *u8 {
let total_size = get_box_size(size, align);
let p = malloc_raw(total_size);
let alloc = p as *mut raw::Box<()>;
(*alloc).drop_glue = drop_glue;
alloc as *u8
}
// NB: Calls to free CANNOT be allowed to fail, as throwing an exception from
// inside a landing pad may corrupt the state of the exception handler.
#[cfg(not(test))]

View File

@ -21,6 +21,7 @@ use rt::env;
use rt::global_heap;
use rt::local::Local;
use rt::task::Task;
#[cfg(stage0)]
use unstable::intrinsics::TyDesc;
use unstable::raw;
use vec::ImmutableVector;
@ -60,6 +61,7 @@ impl LocalHeap {
}
#[inline]
#[cfg(stage0)]
pub fn alloc(&mut self, td: *TyDesc, size: uint) -> *mut Box {
let total_size = global_heap::get_box_size(size, unsafe { (*td).align });
let alloc = self.memory_region.malloc(total_size);
@ -80,6 +82,28 @@ impl LocalHeap {
return alloc;
}
#[inline]
#[cfg(not(stage0))]
pub fn alloc(&mut self, drop_glue: fn(*mut u8), size: uint, align: uint) -> *mut Box {
let total_size = global_heap::get_box_size(size, align);
let alloc = self.memory_region.malloc(total_size);
{
// Make sure that we can't use `mybox` outside of this scope
let mybox: &mut Box = unsafe { cast::transmute(alloc) };
// Clear out this box, and move it to the front of the live
// allocations list
mybox.drop_glue = drop_glue;
mybox.ref_count = 1;
mybox.prev = ptr::mut_null();
mybox.next = self.live_allocs;
if !self.live_allocs.is_null() {
unsafe { (*self.live_allocs).prev = alloc; }
}
self.live_allocs = alloc;
}
return alloc;
}
#[inline]
pub fn realloc(&mut self, ptr: *mut Box, size: uint) -> *mut Box {
// Make sure that we can't use `mybox` outside of this scope
@ -102,6 +126,7 @@ impl LocalHeap {
}
#[inline]
#[cfg(stage0)]
pub fn free(&mut self, alloc: *mut Box) {
{
// Make sure that we can't use `mybox` outside of this scope
@ -133,6 +158,28 @@ impl LocalHeap {
self.memory_region.free(alloc);
}
#[inline]
#[cfg(not(stage0))]
pub fn free(&mut self, alloc: *mut Box) {
{
// Make sure that we can't use `mybox` outside of this scope
let mybox: &mut Box = unsafe { cast::transmute(alloc) };
// Unlink it from the linked list
if !mybox.prev.is_null() {
unsafe { (*mybox.prev).next = mybox.next; }
}
if !mybox.next.is_null() {
unsafe { (*mybox.next).prev = mybox.prev; }
}
if self.live_allocs == alloc {
self.live_allocs = mybox.next;
}
}
self.memory_region.free(alloc);
}
}
impl Drop for LocalHeap {
@ -292,6 +339,7 @@ impl Drop for MemoryRegion {
}
#[inline]
#[cfg(stage0)]
pub unsafe fn local_malloc(td: *u8, size: uint) -> *u8 {
// FIXME: Unsafe borrow for speed. Lame.
let task: Option<*mut Task> = Local::try_unsafe_borrow();
@ -303,6 +351,19 @@ pub unsafe fn local_malloc(td: *u8, size: uint) -> *u8 {
}
}
#[inline]
#[cfg(not(stage0))]
pub unsafe fn local_malloc(drop_glue: fn(*mut u8), size: uint, align: uint) -> *u8 {
// FIXME: Unsafe borrow for speed. Lame.
let task: Option<*mut Task> = Local::try_unsafe_borrow();
match task {
Some(task) => {
(*task).heap.alloc(drop_glue, size, align) as *u8
}
None => rtabort!("local malloc outside of task")
}
}
// A little compatibility function
#[inline]
pub unsafe fn local_free(ptr: *u8) {

View File

@ -27,11 +27,19 @@ pub fn fail_bounds_check(file: *u8, line: uint, index: uint, len: uint) -> ! {
}
#[lang="malloc"]
#[cfg(stage0)]
#[inline]
pub unsafe fn local_malloc(td: *u8, size: uint) -> *u8 {
::rt::local_heap::local_malloc(td, size)
}
#[lang="malloc"]
#[cfg(not(stage0))]
#[inline]
pub unsafe fn local_malloc(drop_glue: fn(*mut u8), size: uint, align: uint) -> *u8 {
::rt::local_heap::local_malloc(drop_glue, size, align)
}
// NB: Calls to free CANNOT be allowed to fail, as throwing an exception from
// inside a landing pad may corrupt the state of the exception handler. If a
// problem occurs, call exit instead.

View File

@ -9,9 +9,11 @@
// except according to those terms.
use cast;
#[cfg(stage0)]
use unstable::intrinsics::TyDesc;
/// The representation of a Rust managed box
#[cfg(stage0)]
pub struct Box<T> {
ref_count: uint,
type_desc: *TyDesc,
@ -20,6 +22,16 @@ pub struct Box<T> {
data: T
}
/// The representation of a Rust managed box
#[cfg(not(stage0))]
pub struct Box<T> {
ref_count: uint,
drop_glue: fn(ptr: *mut u8),
prev: *mut Box<T>,
next: *mut Box<T>,
data: T
}
/// The representation of a Rust vector
pub struct Vec<T> {
fill: uint,