diff --git a/src/libcore/cleanup.rs b/src/libcore/cleanup.rs new file mode 100644 index 00000000000..46dad7496da --- /dev/null +++ b/src/libcore/cleanup.rs @@ -0,0 +1,152 @@ +use libc::{c_char, c_void, intptr_t, uintptr_t}; +use ptr::{mut_null, null, to_unsafe_ptr}; +use repr::BoxRepr; +use rt::rt_free; +use sys::TypeDesc; +use unsafe::transmute; + +/** + * Runtime structures + * + * NB: These must match the representation in the C++ runtime. + */ + +type DropGlue = fn(**TypeDesc, *c_void); +type FreeGlue = fn(**TypeDesc, *c_void); + +type TaskID = uintptr_t; + +struct StackSegment { priv opaque: () } +struct Scheduler { priv opaque: () } +struct SchedulerLoop { priv opaque: () } +struct Kernel { priv opaque: () } +struct Env { priv opaque: () } +struct AllocHeader { priv opaque: () } +struct MemoryRegion { priv opaque: () } + +// XXX: i386 +struct Registers { + data: [u64 * 22] +} + +struct Context { + regs: Registers, + next: *Context, + pad: u64 +} + +struct BoxedRegion { + env: *Env, + backing_region: *MemoryRegion, + live_allocs: *BoxRepr +} + +struct Task { + // Public fields + refcount: intptr_t, + id: TaskID, + ctx: Context, + stack_segment: *StackSegment, + runtime_sp: uintptr_t, + scheduler: *Scheduler, + scheduler_loop: *SchedulerLoop, + + // Fields known only to the runtime + kernel: *Kernel, + name: *c_char, + list_index: *i32, + rendezvous_ptr: *uintptr_t, + boxed_region: BoxedRegion +} + +/* + * Box annihilation + * + * This runs at task death to free all boxes. + */ + +/// Destroys all managed memory (i.e. @ boxes) held by the current task. +pub unsafe fn annihilate() { + let task: *Task = transmute(rustrt::rust_get_task()); + + // Pass 1: Make all boxes immortal. + let box = (*task).boxed_region.live_allocs; + let mut box: *mut BoxRepr = transmute(copy box); + assert (*box).prev == null(); + while box != mut_null() { + debug!("making box immortal: %x", box as uint); + (*box).ref_count = 0x77777777; + box = transmute(copy (*box).next); + } + + // Pass 2: Drop all boxes. + let box = (*task).boxed_region.live_allocs; + let mut box: *mut BoxRepr = transmute(copy box); + assert (*box).prev == null(); + while box != mut_null() { + debug!("calling drop glue for box: %x", box as uint); + let tydesc: *TypeDesc = transmute(copy (*box).type_desc); + let drop_glue: DropGlue = transmute(((*tydesc).drop_glue, 0)); + drop_glue(to_unsafe_ptr(&tydesc), transmute(&(*box).data)); + + box = transmute(copy (*box).next); + } + + // Pass 3: Free all boxes. + loop { + let box = (*task).boxed_region.live_allocs; + if box == null() { break; } + let mut box: *mut BoxRepr = transmute(copy box); + assert (*box).prev == null(); + + debug!("freeing box: %x", box as uint); + rt_free(transmute(box)); + } +} + +/// Bindings to the runtime +extern mod rustrt { + #[rust_stack] + /*priv*/ fn rust_get_task() -> *c_void; +} + +/* + * Tests + */ + +#[cfg(test)] +mod tests { + /*struct Knot { + mut a: Option<@Knot> + } + + struct Blah { + x: int, + drop { io::println("Blah!"); } + } + + #[test] + fn test_box_annihilation() { + let knot = @Knot { a: None }; + knot.a = Some(knot); + + let x = @~"foo"; + + let blah = @Blah { x: 3 }; + + let f_ref = @mut None; + let f = || { util::ignore(f_ref); }; + *f_ref = Some(f); + + unsafe { annihilate(); } + + unsafe { + unsafe::forget(knot); + unsafe::forget(x); + unsafe::forget(blah); + unsafe::forget(f_ref); + unsafe::forget(f); + } + }*/ +} + diff --git a/src/libcore/core.rc b/src/libcore/core.rc index d6091801da4..8b5a218bb93 100644 --- a/src/libcore/core.rc +++ b/src/libcore/core.rc @@ -244,6 +244,7 @@ mod unicode; mod private; mod cmath; mod stackwalk; +mod cleanup; // Local Variables: // mode: rust; diff --git a/src/libcore/ptr.rs b/src/libcore/ptr.rs index acb9953b148..82eacaed606 100644 --- a/src/libcore/ptr.rs +++ b/src/libcore/ptr.rs @@ -9,6 +9,7 @@ export offset; export const_offset; export mut_offset; export null; +export mut_null; export is_null; export is_not_null; export memcpy; @@ -98,6 +99,10 @@ unsafe fn position(buf: *T, f: fn(T) -> bool) -> uint { #[inline(always)] pure fn null() -> *T { unsafe { unsafe::reinterpret_cast(&0u) } } +/// Create an unsafe mutable null pointer +#[inline(always)] +pure fn mut_null() -> *mut T { unsafe { unsafe::reinterpret_cast(&0u) } } + /// Returns true if the pointer is equal to the null pointer. pure fn is_null(ptr: *const T) -> bool { ptr == null() }