From 0e885e42b1292fe2592488a52c35d54b9fe50fb8 Mon Sep 17 00:00:00 2001 From: Daniel Micay Date: Tue, 14 Jan 2014 02:46:58 -0500 Subject: [PATCH] remove reference counting headers from ~ Unique pointers and vectors currently contain a reference counting header when containing a managed pointer. This `{ ref_count, type_desc, prev, next }` header is not necessary and not a sensible foundation for tracing. It adds needless complexity to library code and is responsible for breakage in places where the branch has been left out. The `borrow_offset` field can now be removed from `TyDesc` along with the associated handling in the compiler. Closes #9510 Closes #11533 --- src/librustc/middle/trans/_match.rs | 14 +-- src/librustc/middle/trans/base.rs | 27 +---- src/librustc/middle/trans/closure.rs | 10 +- src/librustc/middle/trans/common.rs | 3 +- src/librustc/middle/trans/datum.rs | 7 +- src/librustc/middle/trans/debuginfo.rs | 12 +-- src/librustc/middle/trans/expr.rs | 8 +- src/librustc/middle/trans/glue.rs | 7 +- src/librustc/middle/trans/reflect.rs | 12 +-- src/librustc/middle/trans/tvec.rs | 14 ++- src/librustc/middle/trans/type_.rs | 6 +- src/librustc/middle/trans/type_of.rs | 14 +-- src/libstd/rc.rs | 8 ++ src/libstd/reflect.rs | 2 + src/libstd/repr.rs | 2 + src/libstd/unstable/intrinsics.rs | 2 + src/libstd/vec.rs | 99 ++++++++++++++++++- .../managed-pointer-within-unique-vec.rs | 8 +- .../managed-pointer-within-unique.rs | 18 ++-- src/test/debug-info/recursive-struct.rs | 4 +- src/test/run-pass/reflect-visit-data.rs | 16 --- src/test/run-pass/reflect-visit-type.rs | 7 -- 22 files changed, 152 insertions(+), 148 deletions(-) diff --git a/src/librustc/middle/trans/_match.rs b/src/librustc/middle/trans/_match.rs index aa2bd656b90..a7924946ed1 100644 --- a/src/librustc/middle/trans/_match.rs +++ b/src/librustc/middle/trans/_match.rs @@ -1584,14 +1584,9 @@ fn compile_submatch_continue<'r, } if any_uniq_pat(m, col) { - let pat_ty = node_id_type(bcx, pat_id); let llbox = Load(bcx, val); - let unboxed = match ty::get(pat_ty).sty { - ty::ty_uniq(..) if !ty::type_contents(bcx.tcx(), pat_ty).owns_managed() => llbox, - _ => GEPi(bcx, llbox, [0u, abi::box_field_body]) - }; compile_submatch(bcx, enter_uniq(bcx, dm, m, col, val), - vec::append(~[unboxed], vals_left), chk); + vec::append(~[llbox], vals_left), chk); return; } @@ -2231,13 +2226,8 @@ fn bind_irrefutable_pat<'a>( } } ast::PatUniq(inner) => { - let pat_ty = node_id_type(bcx, pat.id); let llbox = Load(bcx, val); - let unboxed = match ty::get(pat_ty).sty { - ty::ty_uniq(..) if !ty::type_contents(bcx.tcx(), pat_ty).owns_managed() => llbox, - _ => GEPi(bcx, llbox, [0u, abi::box_field_body]) - }; - bcx = bind_irrefutable_pat(bcx, inner, unboxed, binding_mode); + bcx = bind_irrefutable_pat(bcx, inner, llbox, binding_mode); } ast::PatRegion(inner) => { let loaded_val = Load(bcx, val); diff --git a/src/librustc/middle/trans/base.rs b/src/librustc/middle/trans/base.rs index 5e189bb2ab2..a7e5dfcf67b 100644 --- a/src/librustc/middle/trans/base.rs +++ b/src/librustc/middle/trans/base.rs @@ -358,7 +358,7 @@ pub fn malloc_raw_dyn<'a>( } else { // we treat ~fn, @fn and @[] as @ here, which isn't ideal let langcall = match heap { - heap_managed | heap_managed_unique => { + heap_managed => { require_alloc_fn(bcx, t, MallocFnLangItem) } heap_exchange_closure => { @@ -382,9 +382,7 @@ pub fn malloc_raw_dyn<'a>( langcall, [tydesc, size], None); - let r = rslt(r.bcx, PointerCast(r.bcx, r.val, llty)); - maybe_set_managed_unique_rc(r.bcx, r.val, heap); - r + rslt(r.bcx, PointerCast(r.bcx, r.val, llty)) } } @@ -431,27 +429,6 @@ pub fn malloc_general<'a>(bcx: &'a Block, t: ty::t, heap: heap) malloc_general_dyn(bcx, t, heap, llsize_of(bcx.ccx(), ty)) } -pub fn heap_for_unique(bcx: &Block, t: ty::t) -> heap { - if ty::type_contents(bcx.tcx(), t).owns_managed() { - heap_managed_unique - } else { - heap_exchange - } -} - -pub fn maybe_set_managed_unique_rc(bcx: &Block, bx: ValueRef, heap: heap) { - assert!(heap != heap_exchange); - if heap == heap_managed_unique { - // In cases where we are looking at a unique-typed allocation in the - // managed heap (thus have refcount 1 from the managed allocator), - // such as a ~(@foo) or such. These need to have their refcount forced - // to -2 so the annihilator ignores them. - let rc = GEPi(bcx, bx, [0u, abi::box_field_refcnt]); - let rc_val = C_int(bcx.ccx(), -2); - Store(bcx, rc_val, rc); - } -} - // Type descriptor and type glue stuff pub fn get_tydesc_simple(ccx: &CrateContext, t: ty::t) -> ValueRef { diff --git a/src/librustc/middle/trans/closure.rs b/src/librustc/middle/trans/closure.rs index d371637e1a7..f3d061f70b1 100644 --- a/src/librustc/middle/trans/closure.rs +++ b/src/librustc/middle/trans/closure.rs @@ -150,14 +150,6 @@ pub fn mk_closure_tys(tcx: ty::ctxt, return cdata_ty; } -fn heap_for_unique_closure(bcx: &Block, t: ty::t) -> heap { - if ty::type_contents(bcx.tcx(), t).owns_managed() { - heap_managed_unique - } else { - heap_exchange_closure - } -} - pub fn allocate_cbox<'a>( bcx: &'a Block<'a>, sigil: ast::Sigil, @@ -173,7 +165,7 @@ pub fn allocate_cbox<'a>( tcx.sess.bug("trying to trans allocation of @fn") } ast::OwnedSigil => { - malloc_raw(bcx, cdata_ty, heap_for_unique_closure(bcx, cdata_ty)) + malloc_raw(bcx, cdata_ty, heap_exchange_closure) } ast::BorrowedSigil => { let cbox_ty = tuplify_box_ty(tcx, cdata_ty); diff --git a/src/librustc/middle/trans/common.rs b/src/librustc/middle/trans/common.rs index 7f6ab2dd9d1..fb0b241b5fc 100644 --- a/src/librustc/middle/trans/common.rs +++ b/src/librustc/middle/trans/common.rs @@ -316,7 +316,6 @@ pub fn warn_not_to_commit(ccx: &CrateContext, msg: &str) { #[deriving(Eq)] pub enum heap { heap_managed, - heap_managed_unique, heap_exchange, heap_exchange_closure } @@ -498,7 +497,7 @@ pub fn add_clean_temp_mem_in_scope_(bcx: &Block, scope_id: Option, pub fn add_clean_free(cx: &Block, ptr: ValueRef, heap: heap) { let free_fn = match heap { - heap_managed | heap_managed_unique => { + heap_managed => { @GCHeapFreeingCleanupFunction { ptr: ptr, } as @CleanupFunction diff --git a/src/librustc/middle/trans/datum.rs b/src/librustc/middle/trans/datum.rs index 35414162a27..c2591beac4a 100644 --- a/src/librustc/middle/trans/datum.rs +++ b/src/librustc/middle/trans/datum.rs @@ -570,11 +570,6 @@ impl Datum { let (content_ty, header) = match ty::get(self.ty).sty { ty::ty_box(typ) => (typ, true), ty::ty_uniq(typ) => (typ, false), - ty::ty_vec(_, ty::vstore_uniq) | ty::ty_str(ty::vstore_uniq) => { - let unit_ty = ty::sequence_element_type(bcx.tcx(), self.ty); - let unboxed_vec_ty = ty::mk_mut_unboxed_vec(bcx.tcx(), unit_ty); - (unboxed_vec_ty, true) - } _ => { bcx.tcx().sess.bug(format!( "box_body() invoked on non-box type {}", @@ -582,7 +577,7 @@ impl Datum { } }; - if !header && !ty::type_contents(bcx.tcx(), content_ty).owns_managed() { + if !header { let ptr = self.to_value_llval(bcx); let ty = type_of::type_of(bcx.ccx(), content_ty); let body = PointerCast(bcx, ptr, ty.ptr_to()); diff --git a/src/librustc/middle/trans/debuginfo.rs b/src/librustc/middle/trans/debuginfo.rs index 97bcf92fcc9..4e8b3c78dc8 100644 --- a/src/librustc/middle/trans/debuginfo.rs +++ b/src/librustc/middle/trans/debuginfo.rs @@ -2147,10 +2147,6 @@ fn type_metadata(cx: &CrateContext, ty::vstore_fixed(len) => { fixed_vec_metadata(cx, mt.ty, len, usage_site_span) } - ty::vstore_uniq if ty::type_contents(cx.tcx, mt.ty).owns_managed() => { - let boxed_vec_metadata = boxed_vec_metadata(cx, mt.ty, usage_site_span); - pointer_type_metadata(cx, t, boxed_vec_metadata) - } ty::vstore_uniq => { let vec_metadata = vec_metadata(cx, mt.ty, usage_site_span); pointer_type_metadata(cx, t, vec_metadata) @@ -2165,12 +2161,8 @@ fn type_metadata(cx: &CrateContext, } }, ty::ty_uniq(typ) => { - if ty::type_contents(cx.tcx, typ).owns_managed() { - create_pointer_to_box_metadata(cx, t, typ) - } else { - let pointee = type_metadata(cx, typ, usage_site_span); - pointer_type_metadata(cx, t, pointee) - } + let pointee = type_metadata(cx, typ, usage_site_span); + pointer_type_metadata(cx, t, pointee) } ty::ty_ptr(ref mt) | ty::ty_rptr(_, ref mt) => { let pointee = type_metadata(cx, mt.ty, usage_site_span); diff --git a/src/librustc/middle/trans/expr.rs b/src/librustc/middle/trans/expr.rs index 8d3b953af18..7fb66c521b1 100644 --- a/src/librustc/middle/trans/expr.rs +++ b/src/librustc/middle/trans/expr.rs @@ -608,8 +608,7 @@ fn trans_rvalue_datum_unadjusted<'a>(bcx: &'a Block<'a>, expr: &ast::Expr) expr, contents); } ast::ExprVstore(contents, ast::ExprVstoreUniq) => { - let heap = heap_for_unique(bcx, expr_ty(bcx, contents)); - return tvec::trans_uniq_or_managed_vstore(bcx, heap, + return tvec::trans_uniq_or_managed_vstore(bcx, heap_exchange, expr, contents); } ast::ExprBox(_, contents) => { @@ -617,7 +616,7 @@ fn trans_rvalue_datum_unadjusted<'a>(bcx: &'a Block<'a>, expr: &ast::Expr) // `trans_rvalue_dps_unadjusted`.) let box_ty = expr_ty(bcx, expr); let contents_ty = expr_ty(bcx, contents); - let heap = heap_for_unique(bcx, contents_ty); + let heap = heap_exchange; return trans_boxed_expr(bcx, box_ty, contents, contents_ty, heap) } ast::ExprLit(lit) => { @@ -1461,8 +1460,7 @@ fn trans_unary_datum<'a>( trans_boxed_expr(bcx, un_ty, sub_expr, sub_ty, heap_managed) } ast::UnUniq => { - let heap = heap_for_unique(bcx, un_ty); - trans_boxed_expr(bcx, un_ty, sub_expr, sub_ty, heap) + trans_boxed_expr(bcx, un_ty, sub_expr, sub_ty, heap_exchange) } ast::UnDeref => { bcx.sess().bug("deref expressions should have been \ diff --git a/src/librustc/middle/trans/glue.rs b/src/librustc/middle/trans/glue.rs index 4b09da4ddde..457cac09fc5 100644 --- a/src/librustc/middle/trans/glue.rs +++ b/src/librustc/middle/trans/glue.rs @@ -303,11 +303,7 @@ pub fn make_free_glue<'a>(bcx: &'a Block<'a>, v: ValueRef, t: ty::t) with_cond(bcx, not_null, |bcx| { let body_datum = box_datum.box_body(bcx); let bcx = drop_ty(bcx, body_datum.to_ref_llval(bcx), body_datum.ty); - if ty::type_contents(bcx.tcx(), t).owns_managed() { - trans_free(bcx, box_datum.val) - } else { - trans_exchange_free(bcx, box_datum.val) - } + trans_exchange_free(bcx, box_datum.val) }) } ty::ty_vec(_, ty::vstore_uniq) | ty::ty_str(ty::vstore_uniq) | @@ -552,7 +548,6 @@ pub fn declare_tydesc(ccx: &CrateContext, t: ty::t) -> @tydesc_info { let has_header = match ty::get(t).sty { ty::ty_box(..) => true, - ty::ty_uniq(..) => ty::type_contents(ccx.tcx, t).owns_managed(), _ => false }; diff --git a/src/librustc/middle/trans/reflect.rs b/src/librustc/middle/trans/reflect.rs index e8d9015e860..2a8c23a6c32 100644 --- a/src/librustc/middle/trans/reflect.rs +++ b/src/librustc/middle/trans/reflect.rs @@ -184,11 +184,7 @@ impl<'a> Reflector<'a> { ty::ty_vec(ref mt, vst) => { let (name, extra) = self.vstore_name_and_extra(t, vst); let extra = extra + self.c_mt(mt); - if "uniq" == name && ty::type_contents(bcx.tcx(), t).owns_managed() { - self.visit("evec_uniq_managed", extra) - } else { - self.visit(~"evec_" + name, extra) - } + self.visit(~"evec_" + name, extra) } // Should remove mt from box and uniq. ty::ty_box(typ) => { @@ -203,11 +199,7 @@ impl<'a> Reflector<'a> { ty: typ, mutbl: ast::MutImmutable, }); - if ty::type_contents(bcx.tcx(), t).owns_managed() { - self.visit("uniq_managed", extra) - } else { - self.visit("uniq", extra) - } + self.visit("uniq", extra) } ty::ty_ptr(ref mt) => { let extra = self.c_mt(mt); diff --git a/src/librustc/middle/trans/tvec.rs b/src/librustc/middle/trans/tvec.rs index 9819f34c6f1..100f28af97d 100644 --- a/src/librustc/middle/trans/tvec.rs +++ b/src/librustc/middle/trans/tvec.rs @@ -64,7 +64,14 @@ pub fn get_alloc(bcx: &Block, vptr: ValueRef) -> ValueRef { } pub fn get_bodyptr(bcx: &Block, vptr: ValueRef, t: ty::t) -> ValueRef { - if ty::type_contents(bcx.tcx(), t).owns_managed() { + let vt = vec_types(bcx, t); + + let managed = match ty::get(vt.vec_ty).sty { + ty::ty_str(ty::vstore_box) | ty::ty_vec(_, ty::vstore_box) => true, + _ => false + }; + + if managed { GEPi(bcx, vptr, [0u, abi::box_field_body]) } else { vptr @@ -106,7 +113,6 @@ pub fn alloc_raw<'a>( base::malloc_general_dyn(bcx, vecbodyty, heap, vecsize); Store(bcx, fill, GEPi(bcx, body, [0u, abi::vec_elt_fill])); Store(bcx, alloc, GEPi(bcx, body, [0u, abi::vec_elt_alloc])); - base::maybe_set_managed_unique_rc(bcx, bx, heap); return rslt(bcx, bx); } } @@ -117,7 +123,7 @@ pub fn alloc_uniq_raw<'a>( fill: ValueRef, alloc: ValueRef) -> Result<'a> { - alloc_raw(bcx, unit_ty, fill, alloc, base::heap_for_unique(bcx, unit_ty)) + alloc_raw(bcx, unit_ty, fill, alloc, heap_exchange) } pub fn alloc_vec<'a>( @@ -350,7 +356,7 @@ pub fn trans_uniq_or_managed_vstore<'a>( } } heap_exchange_closure => fail!("vectors use exchange_alloc"), - heap_managed | heap_managed_unique => {} + heap_managed => {} } let vt = vec_types_from_expr(bcx, vstore_expr); diff --git a/src/librustc/middle/trans/type_.rs b/src/librustc/middle/trans/type_.rs index d4ba4a76f81..b5349ca323b 100644 --- a/src/librustc/middle/trans/type_.rs +++ b/src/librustc/middle/trans/type_.rs @@ -269,10 +269,6 @@ impl Type { Type::smart_ptr(ctx, &Type::opaque()) } - pub fn unique(ctx: &CrateContext, ty: &Type) -> Type { - Type::smart_ptr(ctx, ty) - } - pub fn opaque_cbox_ptr(cx: &CrateContext) -> Type { Type::opaque_box(cx).ptr_to() } @@ -281,7 +277,7 @@ impl Type { let tydesc_ptr = ctx.tydesc_type.ptr_to(); let box_ty = match store { ty::BoxTraitStore => Type::opaque_box(ctx), - ty::UniqTraitStore => Type::unique(ctx, &Type::i8()), + ty::UniqTraitStore => Type::i8(), ty::RegionTraitStore(..) => Type::i8() }; Type::struct_([tydesc_ptr, box_ty.ptr_to()], false) diff --git a/src/librustc/middle/trans/type_of.rs b/src/librustc/middle/trans/type_of.rs index 2fc35f379ba..ae96f43b07d 100644 --- a/src/librustc/middle/trans/type_of.rs +++ b/src/librustc/middle/trans/type_of.rs @@ -245,21 +245,11 @@ pub fn type_of(cx: &CrateContext, t: ty::t) -> Type { Type::smart_ptr(cx, &ty).ptr_to() } ty::ty_uniq(typ) => { - let ty = type_of(cx, typ); - if ty::type_contents(cx.tcx, typ).owns_managed() { - Type::unique(cx, &ty).ptr_to() - } else { - ty.ptr_to() - } + type_of(cx, typ).ptr_to() } ty::ty_vec(ref mt, ty::vstore_uniq) => { let ty = type_of(cx, mt.ty); - let ty = Type::vec(cx.sess.targ_cfg.arch, &ty); - if ty::type_contents(cx.tcx, mt.ty).owns_managed() { - Type::unique(cx, &ty).ptr_to() - } else { - ty.ptr_to() - } + Type::vec(cx.sess.targ_cfg.arch, &ty).ptr_to() } ty::ty_unboxed_vec(ref mt) => { let ty = type_of(cx, mt.ty); diff --git a/src/libstd/rc.rs b/src/libstd/rc.rs index 9947d8822ae..48e796f0f4a 100644 --- a/src/libstd/rc.rs +++ b/src/libstd/rc.rs @@ -230,4 +230,12 @@ mod tests { drop(x); assert!(y.upgrade().is_none()); } + + #[test] + fn gc_inside() { + // see issue #11532 + use gc::Gc; + let a = Rc::new(RefCell::new(Gc::new(1))); + assert!(a.borrow().try_borrow_mut().is_some()); + } } diff --git a/src/libstd/reflect.rs b/src/libstd/reflect.rs index 8a3e60eb3e2..c4a5561a7aa 100644 --- a/src/libstd/reflect.rs +++ b/src/libstd/reflect.rs @@ -227,6 +227,7 @@ impl TyVisitor for MovePtrAdaptor { true } + #[cfg(stage0)] fn visit_uniq_managed(&mut self, mtbl: uint, inner: *TyDesc) -> bool { self.align_to::<~u8>(); if ! self.inner.visit_uniq_managed(mtbl, inner) { return false; } @@ -275,6 +276,7 @@ impl TyVisitor for MovePtrAdaptor { true } + #[cfg(stage0)] fn visit_evec_uniq_managed(&mut self, mtbl: uint, inner: *TyDesc) -> bool { self.align_to::<~[@u8]>(); if ! self.inner.visit_evec_uniq_managed(mtbl, inner) { return false; } diff --git a/src/libstd/repr.rs b/src/libstd/repr.rs index e0f96365edd..8539717544d 100644 --- a/src/libstd/repr.rs +++ b/src/libstd/repr.rs @@ -310,6 +310,7 @@ impl<'a> TyVisitor for ReprVisitor<'a> { }) } + #[cfg(stage0)] fn visit_uniq_managed(&mut self, _mtbl: uint, inner: *TyDesc) -> bool { self.writer.write(['~' as u8]); self.get::<&raw::Box<()>>(|this, b| { @@ -358,6 +359,7 @@ impl<'a> TyVisitor for ReprVisitor<'a> { }) } + #[cfg(stage0)] fn visit_evec_uniq_managed(&mut self, mtbl: uint, inner: *TyDesc) -> bool { self.get::<&raw::Box>>(|this, b| { this.writer.write(['~' as u8]); diff --git a/src/libstd/unstable/intrinsics.rs b/src/libstd/unstable/intrinsics.rs index acd1cfcf901..269622d02c0 100644 --- a/src/libstd/unstable/intrinsics.rs +++ b/src/libstd/unstable/intrinsics.rs @@ -146,6 +146,7 @@ pub trait TyVisitor { fn visit_box(&mut self, mtbl: uint, inner: *TyDesc) -> bool; fn visit_uniq(&mut self, mtbl: uint, inner: *TyDesc) -> bool; + #[cfg(stage0)] fn visit_uniq_managed(&mut self, mtbl: uint, inner: *TyDesc) -> bool; fn visit_ptr(&mut self, mtbl: uint, inner: *TyDesc) -> bool; fn visit_rptr(&mut self, mtbl: uint, inner: *TyDesc) -> bool; @@ -154,6 +155,7 @@ pub trait TyVisitor { fn visit_unboxed_vec(&mut self, mtbl: uint, inner: *TyDesc) -> bool; fn visit_evec_box(&mut self, mtbl: uint, inner: *TyDesc) -> bool; fn visit_evec_uniq(&mut self, mtbl: uint, inner: *TyDesc) -> bool; + #[cfg(stage0)] fn visit_evec_uniq_managed(&mut self, mtbl: uint, inner: *TyDesc) -> bool; fn visit_evec_slice(&mut self, mtbl: uint, inner: *TyDesc) -> bool; fn visit_evec_fixed(&mut self, n: uint, sz: uint, align: uint, diff --git a/src/libstd/vec.rs b/src/libstd/vec.rs index 61e78b68adc..df4d82678d3 100644 --- a/src/libstd/vec.rs +++ b/src/libstd/vec.rs @@ -116,14 +116,18 @@ use ptr::to_unsafe_ptr; use ptr; use ptr::RawPtr; use rt::global_heap::{malloc_raw, realloc_raw, exchange_free}; +#[cfg(stage0)] use rt::local_heap::local_free; use mem; use mem::size_of; use uint; use unstable::finally::Finally; use unstable::intrinsics; +#[cfg(stage0)] use unstable::intrinsics::{get_tydesc, owns_managed}; -use unstable::raw::{Box, Repr, Slice, Vec}; +use unstable::raw::{Repr, Slice, Vec}; +#[cfg(stage0)] +use unstable::raw::Box; use util; /** @@ -178,6 +182,7 @@ pub fn from_elem(n_elts: uint, t: T) -> ~[T] { /// Creates a new vector with a capacity of `capacity` #[inline] +#[cfg(stage0)] pub fn with_capacity(capacity: uint) -> ~[T] { unsafe { if owns_managed::() { @@ -198,6 +203,23 @@ pub fn with_capacity(capacity: uint) -> ~[T] { } } +/// Creates a new vector with a capacity of `capacity` +#[inline] +#[cfg(not(stage0))] +pub fn with_capacity(capacity: uint) -> ~[T] { + unsafe { + let alloc = capacity * mem::nonzero_size_of::(); + let size = alloc + mem::size_of::>(); + if alloc / mem::nonzero_size_of::() != capacity || size < alloc { + fail!("vector size is too large: {}", capacity); + } + let ptr = malloc_raw(size) as *mut Vec<()>; + (*ptr).alloc = alloc; + (*ptr).fill = 0; + cast::transmute(ptr) + } +} + /** * Builds a vector by calling a provided function with an argument * function that pushes an element to the back of a vector. @@ -1481,6 +1503,7 @@ impl OwnedVector for ~[T] { self.move_iter().invert() } + #[cfg(stage0)] fn reserve(&mut self, n: uint) { // Only make the (slow) call into the runtime if we have to if self.capacity() < n { @@ -1504,6 +1527,24 @@ impl OwnedVector for ~[T] { } } + #[cfg(not(stage0))] + fn reserve(&mut self, n: uint) { + // Only make the (slow) call into the runtime if we have to + if self.capacity() < n { + unsafe { + let ptr: *mut *mut Vec<()> = cast::transmute(self); + let alloc = n * mem::nonzero_size_of::(); + let size = alloc + mem::size_of::>(); + if alloc / mem::nonzero_size_of::() != n || size < alloc { + fail!("vector size is too large: {}", n); + } + *ptr = realloc_raw(*ptr as *mut c_void, size) + as *mut Vec<()>; + (**ptr).alloc = alloc; + } + } + } + #[inline] fn reserve_at_least(&mut self, n: uint) { self.reserve(uint::next_power_of_two_opt(n).unwrap_or(n)); @@ -1520,6 +1561,7 @@ impl OwnedVector for ~[T] { } #[inline] + #[cfg(stage0)] fn capacity(&self) -> uint { unsafe { if owns_managed::() { @@ -1532,6 +1574,15 @@ impl OwnedVector for ~[T] { } } + #[inline] + #[cfg(not(stage0))] + fn capacity(&self) -> uint { + unsafe { + let repr: **Vec<()> = cast::transmute(self); + (**repr).alloc / mem::nonzero_size_of::() + } + } + fn shrink_to_fit(&mut self) { unsafe { let ptr: *mut *mut Vec<()> = cast::transmute(self); @@ -1543,6 +1594,7 @@ impl OwnedVector for ~[T] { } #[inline] + #[cfg(stage0)] fn push(&mut self, t: T) { unsafe { if owns_managed::() { @@ -1583,7 +1635,31 @@ impl OwnedVector for ~[T] { intrinsics::move_val_init(&mut(*p), t); } } + } + #[inline] + #[cfg(not(stage0))] + fn push(&mut self, t: T) { + unsafe { + let repr: **Vec<()> = cast::transmute(&mut *self); + let fill = (**repr).fill; + if (**repr).alloc <= fill { + self.reserve_additional(1); + } + + push_fast(self, t); + } + + // This doesn't bother to make sure we have space. + #[inline] // really pretty please + unsafe fn push_fast(this: &mut ~[T], t: T) { + let repr: **mut Vec = cast::transmute(this); + let fill = (**repr).fill; + (**repr).fill += mem::nonzero_size_of::(); + let p = to_unsafe_ptr(&((**repr).data)); + let p = ptr::offset(p, fill as int) as *mut T; + intrinsics::move_val_init(&mut(*p), t); + } } #[inline] @@ -1746,6 +1822,7 @@ impl OwnedVector for ~[T] { } } #[inline] + #[cfg(stage0)] unsafe fn set_len(&mut self, new_len: uint) { if owns_managed::() { let repr: **mut Box> = cast::transmute(self); @@ -1755,6 +1832,13 @@ impl OwnedVector for ~[T] { (**repr).fill = new_len * mem::nonzero_size_of::(); } } + + #[inline] + #[cfg(not(stage0))] + unsafe fn set_len(&mut self, new_len: uint) { + let repr: **mut Vec<()> = cast::transmute(self); + (**repr).fill = new_len * mem::nonzero_size_of::(); + } } impl Mutable for ~[T] { @@ -2926,6 +3010,7 @@ impl DoubleEndedIterator for MoveIterator { } #[unsafe_destructor] +#[cfg(stage0)] impl Drop for MoveIterator { fn drop(&mut self) { // destroy the remaining elements @@ -2940,6 +3025,18 @@ impl Drop for MoveIterator { } } +#[unsafe_destructor] +#[cfg(not(stage0))] +impl Drop for MoveIterator { + fn drop(&mut self) { + // destroy the remaining elements + for _x in *self {} + unsafe { + exchange_free(self.allocation as *u8 as *c_char) + } + } +} + /// An iterator that moves out of a vector in reverse order. pub type MoveRevIterator = Invert>; diff --git a/src/test/debug-info/managed-pointer-within-unique-vec.rs b/src/test/debug-info/managed-pointer-within-unique-vec.rs index 337af1ed86f..8eed28170c5 100644 --- a/src/test/debug-info/managed-pointer-within-unique-vec.rs +++ b/src/test/debug-info/managed-pointer-within-unique-vec.rs @@ -17,16 +17,16 @@ // debugger:run // debugger:finish -// debugger:print unique->val.elements[0]->val +// debugger:print unique->elements[0]->val // check:$1 = 10 -// debugger:print unique->val.elements[1]->val +// debugger:print unique->elements[1]->val // check:$2 = 11 -// debugger:print unique->val.elements[2]->val +// debugger:print unique->elements[2]->val // check:$3 = 12 -// debugger:print unique->val.elements[3]->val +// debugger:print unique->elements[3]->val // check:$4 = 13 #[allow(unused_variable)]; diff --git a/src/test/debug-info/managed-pointer-within-unique.rs b/src/test/debug-info/managed-pointer-within-unique.rs index db75b4bd4be..4afc8b235ed 100644 --- a/src/test/debug-info/managed-pointer-within-unique.rs +++ b/src/test/debug-info/managed-pointer-within-unique.rs @@ -21,28 +21,22 @@ // debugger:print *ordinary_unique // check:$1 = {-1, -2} -// debugger:print managed_within_unique.val->x +// debugger:print managed_within_unique->x // check:$2 = -3 -// debugger:print managed_within_unique.val->y->val +// debugger:print managed_within_unique->y->val // check:$3 = -4 #[allow(unused_variable)]; -struct ContainsManaged -{ - x: int, - y: @int +struct ContainsManaged { + x: int, + y: @int } fn main() { + let ordinary_unique = ~(-1, -2); - let ordinary_unique = ~(-1, -2); - - - // This is a special case: Normally values allocated in the exchange heap are not boxed, unless, - // however, if they contain managed pointers. - // This test case verifies that both cases are handled correctly. let managed_within_unique = ~ContainsManaged { x: -3, y: @-4 }; zzz(); diff --git a/src/test/debug-info/recursive-struct.rs b/src/test/debug-info/recursive-struct.rs index 20c8cc27f27..cf693d7e356 100644 --- a/src/test/debug-info/recursive-struct.rs +++ b/src/test/debug-info/recursive-struct.rs @@ -49,9 +49,9 @@ // debugger:print stack_managed.next.val->val.value // check:$12 = 11 -// debugger:print unique_managed->val.value +// debugger:print unique_managed->value // check:$13 = 12 -// debugger:print unique_managed->val.next.val->val.value +// debugger:print unique_managed->next.val->val.value // check:$14 = 13 // debugger:print box_managed->val.value diff --git a/src/test/run-pass/reflect-visit-data.rs b/src/test/run-pass/reflect-visit-data.rs index 8fc27cf8ea3..e244eace65b 100644 --- a/src/test/run-pass/reflect-visit-data.rs +++ b/src/test/run-pass/reflect-visit-data.rs @@ -223,13 +223,6 @@ impl TyVisitor for ptr_visit_adaptor { true } - fn visit_uniq_managed(&mut self, mtbl: uint, inner: *TyDesc) -> bool { - self.align_to::<~u8>(); - if ! self.inner().visit_uniq_managed(mtbl, inner) { return false; } - self.bump_past::<~u8>(); - true - } - fn visit_ptr(&mut self, mtbl: uint, inner: *TyDesc) -> bool { self.align_to::<*u8>(); if ! self.inner().visit_ptr(mtbl, inner) { return false; } @@ -275,13 +268,6 @@ impl TyVisitor for ptr_visit_adaptor { true } - fn visit_evec_uniq_managed(&mut self, mtbl: uint, inner: *TyDesc) -> bool { - self.align_to::<~[@u8]>(); - if ! self.inner().visit_evec_uniq_managed(mtbl, inner) { return false; } - self.bump_past::<~[@u8]>(); - true - } - fn visit_evec_slice(&mut self, mtbl: uint, inner: *TyDesc) -> bool { self.align_to::<&'static [u8]>(); if ! self.inner().visit_evec_slice(mtbl, inner) { return false; } @@ -549,7 +535,6 @@ impl TyVisitor for my_visitor { fn visit_box(&mut self, _mtbl: uint, _inner: *TyDesc) -> bool { true } fn visit_uniq(&mut self, _mtbl: uint, _inner: *TyDesc) -> bool { true } - fn visit_uniq_managed(&mut self, _mtbl: uint, _inner: *TyDesc) -> bool { true } fn visit_ptr(&mut self, _mtbl: uint, _inner: *TyDesc) -> bool { true } fn visit_rptr(&mut self, _mtbl: uint, _inner: *TyDesc) -> bool { true } @@ -557,7 +542,6 @@ impl TyVisitor for my_visitor { fn visit_unboxed_vec(&mut self, _mtbl: uint, _inner: *TyDesc) -> bool { true } fn visit_evec_box(&mut self, _mtbl: uint, _inner: *TyDesc) -> bool { true } fn visit_evec_uniq(&mut self, _mtbl: uint, _inner: *TyDesc) -> bool { true } - fn visit_evec_uniq_managed(&mut self, _mtbl: uint, _inner: *TyDesc) -> bool { true } fn visit_evec_slice(&mut self, _mtbl: uint, _inner: *TyDesc) -> bool { true } fn visit_evec_fixed(&mut self, _n: uint, _sz: uint, _align: uint, _mtbl: uint, _inner: *TyDesc) -> bool { true } diff --git a/src/test/run-pass/reflect-visit-type.rs b/src/test/run-pass/reflect-visit-type.rs index de50bb3bfed..f5871facd1d 100644 --- a/src/test/run-pass/reflect-visit-type.rs +++ b/src/test/run-pass/reflect-visit-type.rs @@ -70,7 +70,6 @@ impl TyVisitor for MyVisitor { fn visit_box(&mut self, _mtbl: uint, _inner: *TyDesc) -> bool { true } fn visit_uniq(&mut self, _mtbl: uint, _inner: *TyDesc) -> bool { true } - fn visit_uniq_managed(&mut self, _mtbl: uint, _inner: *TyDesc) -> bool { true } fn visit_ptr(&mut self, _mtbl: uint, _inner: *TyDesc) -> bool { true } fn visit_rptr(&mut self, _mtbl: uint, _inner: *TyDesc) -> bool { true } @@ -83,12 +82,6 @@ impl TyVisitor for MyVisitor { self.types.push(~"]"); true } - fn visit_evec_uniq_managed(&mut self, _mtbl: uint, inner: *TyDesc) -> bool { - self.types.push(~"["); - unsafe { visit_tydesc(inner, &mut *self as &mut TyVisitor) }; - self.types.push(~"]"); - true - } fn visit_evec_slice(&mut self, _mtbl: uint, _inner: *TyDesc) -> bool { true } fn visit_evec_fixed(&mut self, _n: uint, _sz: uint, _align: uint, _mtbl: uint, _inner: *TyDesc) -> bool { true }