remove reference counting headers from ~
Unique pointers and vectors currently contain a reference counting header when containing a managed pointer. This `{ ref_count, type_desc, prev, next }` header is not necessary and not a sensible foundation for tracing. It adds needless complexity to library code and is responsible for breakage in places where the branch has been left out. The `borrow_offset` field can now be removed from `TyDesc` along with the associated handling in the compiler. Closes #9510 Closes #11533
This commit is contained in:
parent
9075025c7b
commit
0e885e42b1
@ -1584,14 +1584,9 @@ fn compile_submatch_continue<'r,
|
||||
}
|
||||
|
||||
if any_uniq_pat(m, col) {
|
||||
let pat_ty = node_id_type(bcx, pat_id);
|
||||
let llbox = Load(bcx, val);
|
||||
let unboxed = match ty::get(pat_ty).sty {
|
||||
ty::ty_uniq(..) if !ty::type_contents(bcx.tcx(), pat_ty).owns_managed() => llbox,
|
||||
_ => GEPi(bcx, llbox, [0u, abi::box_field_body])
|
||||
};
|
||||
compile_submatch(bcx, enter_uniq(bcx, dm, m, col, val),
|
||||
vec::append(~[unboxed], vals_left), chk);
|
||||
vec::append(~[llbox], vals_left), chk);
|
||||
return;
|
||||
}
|
||||
|
||||
@ -2231,13 +2226,8 @@ fn bind_irrefutable_pat<'a>(
|
||||
}
|
||||
}
|
||||
ast::PatUniq(inner) => {
|
||||
let pat_ty = node_id_type(bcx, pat.id);
|
||||
let llbox = Load(bcx, val);
|
||||
let unboxed = match ty::get(pat_ty).sty {
|
||||
ty::ty_uniq(..) if !ty::type_contents(bcx.tcx(), pat_ty).owns_managed() => llbox,
|
||||
_ => GEPi(bcx, llbox, [0u, abi::box_field_body])
|
||||
};
|
||||
bcx = bind_irrefutable_pat(bcx, inner, unboxed, binding_mode);
|
||||
bcx = bind_irrefutable_pat(bcx, inner, llbox, binding_mode);
|
||||
}
|
||||
ast::PatRegion(inner) => {
|
||||
let loaded_val = Load(bcx, val);
|
||||
|
@ -358,7 +358,7 @@ pub fn malloc_raw_dyn<'a>(
|
||||
} else {
|
||||
// we treat ~fn, @fn and @[] as @ here, which isn't ideal
|
||||
let langcall = match heap {
|
||||
heap_managed | heap_managed_unique => {
|
||||
heap_managed => {
|
||||
require_alloc_fn(bcx, t, MallocFnLangItem)
|
||||
}
|
||||
heap_exchange_closure => {
|
||||
@ -382,9 +382,7 @@ pub fn malloc_raw_dyn<'a>(
|
||||
langcall,
|
||||
[tydesc, size],
|
||||
None);
|
||||
let r = rslt(r.bcx, PointerCast(r.bcx, r.val, llty));
|
||||
maybe_set_managed_unique_rc(r.bcx, r.val, heap);
|
||||
r
|
||||
rslt(r.bcx, PointerCast(r.bcx, r.val, llty))
|
||||
}
|
||||
}
|
||||
|
||||
@ -431,27 +429,6 @@ pub fn malloc_general<'a>(bcx: &'a Block, t: ty::t, heap: heap)
|
||||
malloc_general_dyn(bcx, t, heap, llsize_of(bcx.ccx(), ty))
|
||||
}
|
||||
|
||||
pub fn heap_for_unique(bcx: &Block, t: ty::t) -> heap {
|
||||
if ty::type_contents(bcx.tcx(), t).owns_managed() {
|
||||
heap_managed_unique
|
||||
} else {
|
||||
heap_exchange
|
||||
}
|
||||
}
|
||||
|
||||
pub fn maybe_set_managed_unique_rc(bcx: &Block, bx: ValueRef, heap: heap) {
|
||||
assert!(heap != heap_exchange);
|
||||
if heap == heap_managed_unique {
|
||||
// In cases where we are looking at a unique-typed allocation in the
|
||||
// managed heap (thus have refcount 1 from the managed allocator),
|
||||
// such as a ~(@foo) or such. These need to have their refcount forced
|
||||
// to -2 so the annihilator ignores them.
|
||||
let rc = GEPi(bcx, bx, [0u, abi::box_field_refcnt]);
|
||||
let rc_val = C_int(bcx.ccx(), -2);
|
||||
Store(bcx, rc_val, rc);
|
||||
}
|
||||
}
|
||||
|
||||
// Type descriptor and type glue stuff
|
||||
|
||||
pub fn get_tydesc_simple(ccx: &CrateContext, t: ty::t) -> ValueRef {
|
||||
|
@ -150,14 +150,6 @@ pub fn mk_closure_tys(tcx: ty::ctxt,
|
||||
return cdata_ty;
|
||||
}
|
||||
|
||||
fn heap_for_unique_closure(bcx: &Block, t: ty::t) -> heap {
|
||||
if ty::type_contents(bcx.tcx(), t).owns_managed() {
|
||||
heap_managed_unique
|
||||
} else {
|
||||
heap_exchange_closure
|
||||
}
|
||||
}
|
||||
|
||||
pub fn allocate_cbox<'a>(
|
||||
bcx: &'a Block<'a>,
|
||||
sigil: ast::Sigil,
|
||||
@ -173,7 +165,7 @@ pub fn allocate_cbox<'a>(
|
||||
tcx.sess.bug("trying to trans allocation of @fn")
|
||||
}
|
||||
ast::OwnedSigil => {
|
||||
malloc_raw(bcx, cdata_ty, heap_for_unique_closure(bcx, cdata_ty))
|
||||
malloc_raw(bcx, cdata_ty, heap_exchange_closure)
|
||||
}
|
||||
ast::BorrowedSigil => {
|
||||
let cbox_ty = tuplify_box_ty(tcx, cdata_ty);
|
||||
|
@ -316,7 +316,6 @@ pub fn warn_not_to_commit(ccx: &CrateContext, msg: &str) {
|
||||
#[deriving(Eq)]
|
||||
pub enum heap {
|
||||
heap_managed,
|
||||
heap_managed_unique,
|
||||
heap_exchange,
|
||||
heap_exchange_closure
|
||||
}
|
||||
@ -498,7 +497,7 @@ pub fn add_clean_temp_mem_in_scope_(bcx: &Block, scope_id: Option<ast::NodeId>,
|
||||
|
||||
pub fn add_clean_free(cx: &Block, ptr: ValueRef, heap: heap) {
|
||||
let free_fn = match heap {
|
||||
heap_managed | heap_managed_unique => {
|
||||
heap_managed => {
|
||||
@GCHeapFreeingCleanupFunction {
|
||||
ptr: ptr,
|
||||
} as @CleanupFunction
|
||||
|
@ -570,11 +570,6 @@ impl Datum {
|
||||
let (content_ty, header) = match ty::get(self.ty).sty {
|
||||
ty::ty_box(typ) => (typ, true),
|
||||
ty::ty_uniq(typ) => (typ, false),
|
||||
ty::ty_vec(_, ty::vstore_uniq) | ty::ty_str(ty::vstore_uniq) => {
|
||||
let unit_ty = ty::sequence_element_type(bcx.tcx(), self.ty);
|
||||
let unboxed_vec_ty = ty::mk_mut_unboxed_vec(bcx.tcx(), unit_ty);
|
||||
(unboxed_vec_ty, true)
|
||||
}
|
||||
_ => {
|
||||
bcx.tcx().sess.bug(format!(
|
||||
"box_body() invoked on non-box type {}",
|
||||
@ -582,7 +577,7 @@ impl Datum {
|
||||
}
|
||||
};
|
||||
|
||||
if !header && !ty::type_contents(bcx.tcx(), content_ty).owns_managed() {
|
||||
if !header {
|
||||
let ptr = self.to_value_llval(bcx);
|
||||
let ty = type_of::type_of(bcx.ccx(), content_ty);
|
||||
let body = PointerCast(bcx, ptr, ty.ptr_to());
|
||||
|
@ -2147,10 +2147,6 @@ fn type_metadata(cx: &CrateContext,
|
||||
ty::vstore_fixed(len) => {
|
||||
fixed_vec_metadata(cx, mt.ty, len, usage_site_span)
|
||||
}
|
||||
ty::vstore_uniq if ty::type_contents(cx.tcx, mt.ty).owns_managed() => {
|
||||
let boxed_vec_metadata = boxed_vec_metadata(cx, mt.ty, usage_site_span);
|
||||
pointer_type_metadata(cx, t, boxed_vec_metadata)
|
||||
}
|
||||
ty::vstore_uniq => {
|
||||
let vec_metadata = vec_metadata(cx, mt.ty, usage_site_span);
|
||||
pointer_type_metadata(cx, t, vec_metadata)
|
||||
@ -2165,12 +2161,8 @@ fn type_metadata(cx: &CrateContext,
|
||||
}
|
||||
},
|
||||
ty::ty_uniq(typ) => {
|
||||
if ty::type_contents(cx.tcx, typ).owns_managed() {
|
||||
create_pointer_to_box_metadata(cx, t, typ)
|
||||
} else {
|
||||
let pointee = type_metadata(cx, typ, usage_site_span);
|
||||
pointer_type_metadata(cx, t, pointee)
|
||||
}
|
||||
let pointee = type_metadata(cx, typ, usage_site_span);
|
||||
pointer_type_metadata(cx, t, pointee)
|
||||
}
|
||||
ty::ty_ptr(ref mt) | ty::ty_rptr(_, ref mt) => {
|
||||
let pointee = type_metadata(cx, mt.ty, usage_site_span);
|
||||
|
@ -608,8 +608,7 @@ fn trans_rvalue_datum_unadjusted<'a>(bcx: &'a Block<'a>, expr: &ast::Expr)
|
||||
expr, contents);
|
||||
}
|
||||
ast::ExprVstore(contents, ast::ExprVstoreUniq) => {
|
||||
let heap = heap_for_unique(bcx, expr_ty(bcx, contents));
|
||||
return tvec::trans_uniq_or_managed_vstore(bcx, heap,
|
||||
return tvec::trans_uniq_or_managed_vstore(bcx, heap_exchange,
|
||||
expr, contents);
|
||||
}
|
||||
ast::ExprBox(_, contents) => {
|
||||
@ -617,7 +616,7 @@ fn trans_rvalue_datum_unadjusted<'a>(bcx: &'a Block<'a>, expr: &ast::Expr)
|
||||
// `trans_rvalue_dps_unadjusted`.)
|
||||
let box_ty = expr_ty(bcx, expr);
|
||||
let contents_ty = expr_ty(bcx, contents);
|
||||
let heap = heap_for_unique(bcx, contents_ty);
|
||||
let heap = heap_exchange;
|
||||
return trans_boxed_expr(bcx, box_ty, contents, contents_ty, heap)
|
||||
}
|
||||
ast::ExprLit(lit) => {
|
||||
@ -1461,8 +1460,7 @@ fn trans_unary_datum<'a>(
|
||||
trans_boxed_expr(bcx, un_ty, sub_expr, sub_ty, heap_managed)
|
||||
}
|
||||
ast::UnUniq => {
|
||||
let heap = heap_for_unique(bcx, un_ty);
|
||||
trans_boxed_expr(bcx, un_ty, sub_expr, sub_ty, heap)
|
||||
trans_boxed_expr(bcx, un_ty, sub_expr, sub_ty, heap_exchange)
|
||||
}
|
||||
ast::UnDeref => {
|
||||
bcx.sess().bug("deref expressions should have been \
|
||||
|
@ -303,11 +303,7 @@ pub fn make_free_glue<'a>(bcx: &'a Block<'a>, v: ValueRef, t: ty::t)
|
||||
with_cond(bcx, not_null, |bcx| {
|
||||
let body_datum = box_datum.box_body(bcx);
|
||||
let bcx = drop_ty(bcx, body_datum.to_ref_llval(bcx), body_datum.ty);
|
||||
if ty::type_contents(bcx.tcx(), t).owns_managed() {
|
||||
trans_free(bcx, box_datum.val)
|
||||
} else {
|
||||
trans_exchange_free(bcx, box_datum.val)
|
||||
}
|
||||
trans_exchange_free(bcx, box_datum.val)
|
||||
})
|
||||
}
|
||||
ty::ty_vec(_, ty::vstore_uniq) | ty::ty_str(ty::vstore_uniq) |
|
||||
@ -552,7 +548,6 @@ pub fn declare_tydesc(ccx: &CrateContext, t: ty::t) -> @tydesc_info {
|
||||
|
||||
let has_header = match ty::get(t).sty {
|
||||
ty::ty_box(..) => true,
|
||||
ty::ty_uniq(..) => ty::type_contents(ccx.tcx, t).owns_managed(),
|
||||
_ => false
|
||||
};
|
||||
|
||||
|
@ -184,11 +184,7 @@ impl<'a> Reflector<'a> {
|
||||
ty::ty_vec(ref mt, vst) => {
|
||||
let (name, extra) = self.vstore_name_and_extra(t, vst);
|
||||
let extra = extra + self.c_mt(mt);
|
||||
if "uniq" == name && ty::type_contents(bcx.tcx(), t).owns_managed() {
|
||||
self.visit("evec_uniq_managed", extra)
|
||||
} else {
|
||||
self.visit(~"evec_" + name, extra)
|
||||
}
|
||||
self.visit(~"evec_" + name, extra)
|
||||
}
|
||||
// Should remove mt from box and uniq.
|
||||
ty::ty_box(typ) => {
|
||||
@ -203,11 +199,7 @@ impl<'a> Reflector<'a> {
|
||||
ty: typ,
|
||||
mutbl: ast::MutImmutable,
|
||||
});
|
||||
if ty::type_contents(bcx.tcx(), t).owns_managed() {
|
||||
self.visit("uniq_managed", extra)
|
||||
} else {
|
||||
self.visit("uniq", extra)
|
||||
}
|
||||
self.visit("uniq", extra)
|
||||
}
|
||||
ty::ty_ptr(ref mt) => {
|
||||
let extra = self.c_mt(mt);
|
||||
|
@ -64,7 +64,14 @@ pub fn get_alloc(bcx: &Block, vptr: ValueRef) -> ValueRef {
|
||||
}
|
||||
|
||||
pub fn get_bodyptr(bcx: &Block, vptr: ValueRef, t: ty::t) -> ValueRef {
|
||||
if ty::type_contents(bcx.tcx(), t).owns_managed() {
|
||||
let vt = vec_types(bcx, t);
|
||||
|
||||
let managed = match ty::get(vt.vec_ty).sty {
|
||||
ty::ty_str(ty::vstore_box) | ty::ty_vec(_, ty::vstore_box) => true,
|
||||
_ => false
|
||||
};
|
||||
|
||||
if managed {
|
||||
GEPi(bcx, vptr, [0u, abi::box_field_body])
|
||||
} else {
|
||||
vptr
|
||||
@ -106,7 +113,6 @@ pub fn alloc_raw<'a>(
|
||||
base::malloc_general_dyn(bcx, vecbodyty, heap, vecsize);
|
||||
Store(bcx, fill, GEPi(bcx, body, [0u, abi::vec_elt_fill]));
|
||||
Store(bcx, alloc, GEPi(bcx, body, [0u, abi::vec_elt_alloc]));
|
||||
base::maybe_set_managed_unique_rc(bcx, bx, heap);
|
||||
return rslt(bcx, bx);
|
||||
}
|
||||
}
|
||||
@ -117,7 +123,7 @@ pub fn alloc_uniq_raw<'a>(
|
||||
fill: ValueRef,
|
||||
alloc: ValueRef)
|
||||
-> Result<'a> {
|
||||
alloc_raw(bcx, unit_ty, fill, alloc, base::heap_for_unique(bcx, unit_ty))
|
||||
alloc_raw(bcx, unit_ty, fill, alloc, heap_exchange)
|
||||
}
|
||||
|
||||
pub fn alloc_vec<'a>(
|
||||
@ -350,7 +356,7 @@ pub fn trans_uniq_or_managed_vstore<'a>(
|
||||
}
|
||||
}
|
||||
heap_exchange_closure => fail!("vectors use exchange_alloc"),
|
||||
heap_managed | heap_managed_unique => {}
|
||||
heap_managed => {}
|
||||
}
|
||||
|
||||
let vt = vec_types_from_expr(bcx, vstore_expr);
|
||||
|
@ -269,10 +269,6 @@ impl Type {
|
||||
Type::smart_ptr(ctx, &Type::opaque())
|
||||
}
|
||||
|
||||
pub fn unique(ctx: &CrateContext, ty: &Type) -> Type {
|
||||
Type::smart_ptr(ctx, ty)
|
||||
}
|
||||
|
||||
pub fn opaque_cbox_ptr(cx: &CrateContext) -> Type {
|
||||
Type::opaque_box(cx).ptr_to()
|
||||
}
|
||||
@ -281,7 +277,7 @@ impl Type {
|
||||
let tydesc_ptr = ctx.tydesc_type.ptr_to();
|
||||
let box_ty = match store {
|
||||
ty::BoxTraitStore => Type::opaque_box(ctx),
|
||||
ty::UniqTraitStore => Type::unique(ctx, &Type::i8()),
|
||||
ty::UniqTraitStore => Type::i8(),
|
||||
ty::RegionTraitStore(..) => Type::i8()
|
||||
};
|
||||
Type::struct_([tydesc_ptr, box_ty.ptr_to()], false)
|
||||
|
@ -245,21 +245,11 @@ pub fn type_of(cx: &CrateContext, t: ty::t) -> Type {
|
||||
Type::smart_ptr(cx, &ty).ptr_to()
|
||||
}
|
||||
ty::ty_uniq(typ) => {
|
||||
let ty = type_of(cx, typ);
|
||||
if ty::type_contents(cx.tcx, typ).owns_managed() {
|
||||
Type::unique(cx, &ty).ptr_to()
|
||||
} else {
|
||||
ty.ptr_to()
|
||||
}
|
||||
type_of(cx, typ).ptr_to()
|
||||
}
|
||||
ty::ty_vec(ref mt, ty::vstore_uniq) => {
|
||||
let ty = type_of(cx, mt.ty);
|
||||
let ty = Type::vec(cx.sess.targ_cfg.arch, &ty);
|
||||
if ty::type_contents(cx.tcx, mt.ty).owns_managed() {
|
||||
Type::unique(cx, &ty).ptr_to()
|
||||
} else {
|
||||
ty.ptr_to()
|
||||
}
|
||||
Type::vec(cx.sess.targ_cfg.arch, &ty).ptr_to()
|
||||
}
|
||||
ty::ty_unboxed_vec(ref mt) => {
|
||||
let ty = type_of(cx, mt.ty);
|
||||
|
@ -230,4 +230,12 @@ mod tests {
|
||||
drop(x);
|
||||
assert!(y.upgrade().is_none());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn gc_inside() {
|
||||
// see issue #11532
|
||||
use gc::Gc;
|
||||
let a = Rc::new(RefCell::new(Gc::new(1)));
|
||||
assert!(a.borrow().try_borrow_mut().is_some());
|
||||
}
|
||||
}
|
||||
|
@ -227,6 +227,7 @@ impl<V:TyVisitor + MovePtr> TyVisitor for MovePtrAdaptor<V> {
|
||||
true
|
||||
}
|
||||
|
||||
#[cfg(stage0)]
|
||||
fn visit_uniq_managed(&mut self, mtbl: uint, inner: *TyDesc) -> bool {
|
||||
self.align_to::<~u8>();
|
||||
if ! self.inner.visit_uniq_managed(mtbl, inner) { return false; }
|
||||
@ -275,6 +276,7 @@ impl<V:TyVisitor + MovePtr> TyVisitor for MovePtrAdaptor<V> {
|
||||
true
|
||||
}
|
||||
|
||||
#[cfg(stage0)]
|
||||
fn visit_evec_uniq_managed(&mut self, mtbl: uint, inner: *TyDesc) -> bool {
|
||||
self.align_to::<~[@u8]>();
|
||||
if ! self.inner.visit_evec_uniq_managed(mtbl, inner) { return false; }
|
||||
|
@ -310,6 +310,7 @@ impl<'a> TyVisitor for ReprVisitor<'a> {
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg(stage0)]
|
||||
fn visit_uniq_managed(&mut self, _mtbl: uint, inner: *TyDesc) -> bool {
|
||||
self.writer.write(['~' as u8]);
|
||||
self.get::<&raw::Box<()>>(|this, b| {
|
||||
@ -358,6 +359,7 @@ impl<'a> TyVisitor for ReprVisitor<'a> {
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg(stage0)]
|
||||
fn visit_evec_uniq_managed(&mut self, mtbl: uint, inner: *TyDesc) -> bool {
|
||||
self.get::<&raw::Box<raw::Vec<()>>>(|this, b| {
|
||||
this.writer.write(['~' as u8]);
|
||||
|
@ -146,6 +146,7 @@ pub trait TyVisitor {
|
||||
|
||||
fn visit_box(&mut self, mtbl: uint, inner: *TyDesc) -> bool;
|
||||
fn visit_uniq(&mut self, mtbl: uint, inner: *TyDesc) -> bool;
|
||||
#[cfg(stage0)]
|
||||
fn visit_uniq_managed(&mut self, mtbl: uint, inner: *TyDesc) -> bool;
|
||||
fn visit_ptr(&mut self, mtbl: uint, inner: *TyDesc) -> bool;
|
||||
fn visit_rptr(&mut self, mtbl: uint, inner: *TyDesc) -> bool;
|
||||
@ -154,6 +155,7 @@ pub trait TyVisitor {
|
||||
fn visit_unboxed_vec(&mut self, mtbl: uint, inner: *TyDesc) -> bool;
|
||||
fn visit_evec_box(&mut self, mtbl: uint, inner: *TyDesc) -> bool;
|
||||
fn visit_evec_uniq(&mut self, mtbl: uint, inner: *TyDesc) -> bool;
|
||||
#[cfg(stage0)]
|
||||
fn visit_evec_uniq_managed(&mut self, mtbl: uint, inner: *TyDesc) -> bool;
|
||||
fn visit_evec_slice(&mut self, mtbl: uint, inner: *TyDesc) -> bool;
|
||||
fn visit_evec_fixed(&mut self, n: uint, sz: uint, align: uint,
|
||||
|
@ -116,14 +116,18 @@ use ptr::to_unsafe_ptr;
|
||||
use ptr;
|
||||
use ptr::RawPtr;
|
||||
use rt::global_heap::{malloc_raw, realloc_raw, exchange_free};
|
||||
#[cfg(stage0)]
|
||||
use rt::local_heap::local_free;
|
||||
use mem;
|
||||
use mem::size_of;
|
||||
use uint;
|
||||
use unstable::finally::Finally;
|
||||
use unstable::intrinsics;
|
||||
#[cfg(stage0)]
|
||||
use unstable::intrinsics::{get_tydesc, owns_managed};
|
||||
use unstable::raw::{Box, Repr, Slice, Vec};
|
||||
use unstable::raw::{Repr, Slice, Vec};
|
||||
#[cfg(stage0)]
|
||||
use unstable::raw::Box;
|
||||
use util;
|
||||
|
||||
/**
|
||||
@ -178,6 +182,7 @@ pub fn from_elem<T:Clone>(n_elts: uint, t: T) -> ~[T] {
|
||||
|
||||
/// Creates a new vector with a capacity of `capacity`
|
||||
#[inline]
|
||||
#[cfg(stage0)]
|
||||
pub fn with_capacity<T>(capacity: uint) -> ~[T] {
|
||||
unsafe {
|
||||
if owns_managed::<T>() {
|
||||
@ -198,6 +203,23 @@ pub fn with_capacity<T>(capacity: uint) -> ~[T] {
|
||||
}
|
||||
}
|
||||
|
||||
/// Creates a new vector with a capacity of `capacity`
|
||||
#[inline]
|
||||
#[cfg(not(stage0))]
|
||||
pub fn with_capacity<T>(capacity: uint) -> ~[T] {
|
||||
unsafe {
|
||||
let alloc = capacity * mem::nonzero_size_of::<T>();
|
||||
let size = alloc + mem::size_of::<Vec<()>>();
|
||||
if alloc / mem::nonzero_size_of::<T>() != capacity || size < alloc {
|
||||
fail!("vector size is too large: {}", capacity);
|
||||
}
|
||||
let ptr = malloc_raw(size) as *mut Vec<()>;
|
||||
(*ptr).alloc = alloc;
|
||||
(*ptr).fill = 0;
|
||||
cast::transmute(ptr)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Builds a vector by calling a provided function with an argument
|
||||
* function that pushes an element to the back of a vector.
|
||||
@ -1481,6 +1503,7 @@ impl<T> OwnedVector<T> for ~[T] {
|
||||
self.move_iter().invert()
|
||||
}
|
||||
|
||||
#[cfg(stage0)]
|
||||
fn reserve(&mut self, n: uint) {
|
||||
// Only make the (slow) call into the runtime if we have to
|
||||
if self.capacity() < n {
|
||||
@ -1504,6 +1527,24 @@ impl<T> OwnedVector<T> for ~[T] {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(not(stage0))]
|
||||
fn reserve(&mut self, n: uint) {
|
||||
// Only make the (slow) call into the runtime if we have to
|
||||
if self.capacity() < n {
|
||||
unsafe {
|
||||
let ptr: *mut *mut Vec<()> = cast::transmute(self);
|
||||
let alloc = n * mem::nonzero_size_of::<T>();
|
||||
let size = alloc + mem::size_of::<Vec<()>>();
|
||||
if alloc / mem::nonzero_size_of::<T>() != n || size < alloc {
|
||||
fail!("vector size is too large: {}", n);
|
||||
}
|
||||
*ptr = realloc_raw(*ptr as *mut c_void, size)
|
||||
as *mut Vec<()>;
|
||||
(**ptr).alloc = alloc;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn reserve_at_least(&mut self, n: uint) {
|
||||
self.reserve(uint::next_power_of_two_opt(n).unwrap_or(n));
|
||||
@ -1520,6 +1561,7 @@ impl<T> OwnedVector<T> for ~[T] {
|
||||
}
|
||||
|
||||
#[inline]
|
||||
#[cfg(stage0)]
|
||||
fn capacity(&self) -> uint {
|
||||
unsafe {
|
||||
if owns_managed::<T>() {
|
||||
@ -1532,6 +1574,15 @@ impl<T> OwnedVector<T> for ~[T] {
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
#[cfg(not(stage0))]
|
||||
fn capacity(&self) -> uint {
|
||||
unsafe {
|
||||
let repr: **Vec<()> = cast::transmute(self);
|
||||
(**repr).alloc / mem::nonzero_size_of::<T>()
|
||||
}
|
||||
}
|
||||
|
||||
fn shrink_to_fit(&mut self) {
|
||||
unsafe {
|
||||
let ptr: *mut *mut Vec<()> = cast::transmute(self);
|
||||
@ -1543,6 +1594,7 @@ impl<T> OwnedVector<T> for ~[T] {
|
||||
}
|
||||
|
||||
#[inline]
|
||||
#[cfg(stage0)]
|
||||
fn push(&mut self, t: T) {
|
||||
unsafe {
|
||||
if owns_managed::<T>() {
|
||||
@ -1583,7 +1635,31 @@ impl<T> OwnedVector<T> for ~[T] {
|
||||
intrinsics::move_val_init(&mut(*p), t);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
#[cfg(not(stage0))]
|
||||
fn push(&mut self, t: T) {
|
||||
unsafe {
|
||||
let repr: **Vec<()> = cast::transmute(&mut *self);
|
||||
let fill = (**repr).fill;
|
||||
if (**repr).alloc <= fill {
|
||||
self.reserve_additional(1);
|
||||
}
|
||||
|
||||
push_fast(self, t);
|
||||
}
|
||||
|
||||
// This doesn't bother to make sure we have space.
|
||||
#[inline] // really pretty please
|
||||
unsafe fn push_fast<T>(this: &mut ~[T], t: T) {
|
||||
let repr: **mut Vec<u8> = cast::transmute(this);
|
||||
let fill = (**repr).fill;
|
||||
(**repr).fill += mem::nonzero_size_of::<T>();
|
||||
let p = to_unsafe_ptr(&((**repr).data));
|
||||
let p = ptr::offset(p, fill as int) as *mut T;
|
||||
intrinsics::move_val_init(&mut(*p), t);
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
@ -1746,6 +1822,7 @@ impl<T> OwnedVector<T> for ~[T] {
|
||||
}
|
||||
}
|
||||
#[inline]
|
||||
#[cfg(stage0)]
|
||||
unsafe fn set_len(&mut self, new_len: uint) {
|
||||
if owns_managed::<T>() {
|
||||
let repr: **mut Box<Vec<()>> = cast::transmute(self);
|
||||
@ -1755,6 +1832,13 @@ impl<T> OwnedVector<T> for ~[T] {
|
||||
(**repr).fill = new_len * mem::nonzero_size_of::<T>();
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
#[cfg(not(stage0))]
|
||||
unsafe fn set_len(&mut self, new_len: uint) {
|
||||
let repr: **mut Vec<()> = cast::transmute(self);
|
||||
(**repr).fill = new_len * mem::nonzero_size_of::<T>();
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> Mutable for ~[T] {
|
||||
@ -2926,6 +3010,7 @@ impl<T> DoubleEndedIterator<T> for MoveIterator<T> {
|
||||
}
|
||||
|
||||
#[unsafe_destructor]
|
||||
#[cfg(stage0)]
|
||||
impl<T> Drop for MoveIterator<T> {
|
||||
fn drop(&mut self) {
|
||||
// destroy the remaining elements
|
||||
@ -2940,6 +3025,18 @@ impl<T> Drop for MoveIterator<T> {
|
||||
}
|
||||
}
|
||||
|
||||
#[unsafe_destructor]
|
||||
#[cfg(not(stage0))]
|
||||
impl<T> Drop for MoveIterator<T> {
|
||||
fn drop(&mut self) {
|
||||
// destroy the remaining elements
|
||||
for _x in *self {}
|
||||
unsafe {
|
||||
exchange_free(self.allocation as *u8 as *c_char)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// An iterator that moves out of a vector in reverse order.
|
||||
pub type MoveRevIterator<T> = Invert<MoveIterator<T>>;
|
||||
|
||||
|
@ -17,16 +17,16 @@
|
||||
// debugger:run
|
||||
// debugger:finish
|
||||
|
||||
// debugger:print unique->val.elements[0]->val
|
||||
// debugger:print unique->elements[0]->val
|
||||
// check:$1 = 10
|
||||
|
||||
// debugger:print unique->val.elements[1]->val
|
||||
// debugger:print unique->elements[1]->val
|
||||
// check:$2 = 11
|
||||
|
||||
// debugger:print unique->val.elements[2]->val
|
||||
// debugger:print unique->elements[2]->val
|
||||
// check:$3 = 12
|
||||
|
||||
// debugger:print unique->val.elements[3]->val
|
||||
// debugger:print unique->elements[3]->val
|
||||
// check:$4 = 13
|
||||
|
||||
#[allow(unused_variable)];
|
||||
|
@ -21,28 +21,22 @@
|
||||
// debugger:print *ordinary_unique
|
||||
// check:$1 = {-1, -2}
|
||||
|
||||
// debugger:print managed_within_unique.val->x
|
||||
// debugger:print managed_within_unique->x
|
||||
// check:$2 = -3
|
||||
|
||||
// debugger:print managed_within_unique.val->y->val
|
||||
// debugger:print managed_within_unique->y->val
|
||||
// check:$3 = -4
|
||||
|
||||
#[allow(unused_variable)];
|
||||
|
||||
struct ContainsManaged
|
||||
{
|
||||
x: int,
|
||||
y: @int
|
||||
struct ContainsManaged {
|
||||
x: int,
|
||||
y: @int
|
||||
}
|
||||
|
||||
fn main() {
|
||||
let ordinary_unique = ~(-1, -2);
|
||||
|
||||
let ordinary_unique = ~(-1, -2);
|
||||
|
||||
|
||||
// This is a special case: Normally values allocated in the exchange heap are not boxed, unless,
|
||||
// however, if they contain managed pointers.
|
||||
// This test case verifies that both cases are handled correctly.
|
||||
let managed_within_unique = ~ContainsManaged { x: -3, y: @-4 };
|
||||
|
||||
zzz();
|
||||
|
@ -49,9 +49,9 @@
|
||||
// debugger:print stack_managed.next.val->val.value
|
||||
// check:$12 = 11
|
||||
|
||||
// debugger:print unique_managed->val.value
|
||||
// debugger:print unique_managed->value
|
||||
// check:$13 = 12
|
||||
// debugger:print unique_managed->val.next.val->val.value
|
||||
// debugger:print unique_managed->next.val->val.value
|
||||
// check:$14 = 13
|
||||
|
||||
// debugger:print box_managed->val.value
|
||||
|
@ -223,13 +223,6 @@ impl<V:TyVisitor + movable_ptr> TyVisitor for ptr_visit_adaptor<V> {
|
||||
true
|
||||
}
|
||||
|
||||
fn visit_uniq_managed(&mut self, mtbl: uint, inner: *TyDesc) -> bool {
|
||||
self.align_to::<~u8>();
|
||||
if ! self.inner().visit_uniq_managed(mtbl, inner) { return false; }
|
||||
self.bump_past::<~u8>();
|
||||
true
|
||||
}
|
||||
|
||||
fn visit_ptr(&mut self, mtbl: uint, inner: *TyDesc) -> bool {
|
||||
self.align_to::<*u8>();
|
||||
if ! self.inner().visit_ptr(mtbl, inner) { return false; }
|
||||
@ -275,13 +268,6 @@ impl<V:TyVisitor + movable_ptr> TyVisitor for ptr_visit_adaptor<V> {
|
||||
true
|
||||
}
|
||||
|
||||
fn visit_evec_uniq_managed(&mut self, mtbl: uint, inner: *TyDesc) -> bool {
|
||||
self.align_to::<~[@u8]>();
|
||||
if ! self.inner().visit_evec_uniq_managed(mtbl, inner) { return false; }
|
||||
self.bump_past::<~[@u8]>();
|
||||
true
|
||||
}
|
||||
|
||||
fn visit_evec_slice(&mut self, mtbl: uint, inner: *TyDesc) -> bool {
|
||||
self.align_to::<&'static [u8]>();
|
||||
if ! self.inner().visit_evec_slice(mtbl, inner) { return false; }
|
||||
@ -549,7 +535,6 @@ impl TyVisitor for my_visitor {
|
||||
|
||||
fn visit_box(&mut self, _mtbl: uint, _inner: *TyDesc) -> bool { true }
|
||||
fn visit_uniq(&mut self, _mtbl: uint, _inner: *TyDesc) -> bool { true }
|
||||
fn visit_uniq_managed(&mut self, _mtbl: uint, _inner: *TyDesc) -> bool { true }
|
||||
fn visit_ptr(&mut self, _mtbl: uint, _inner: *TyDesc) -> bool { true }
|
||||
fn visit_rptr(&mut self, _mtbl: uint, _inner: *TyDesc) -> bool { true }
|
||||
|
||||
@ -557,7 +542,6 @@ impl TyVisitor for my_visitor {
|
||||
fn visit_unboxed_vec(&mut self, _mtbl: uint, _inner: *TyDesc) -> bool { true }
|
||||
fn visit_evec_box(&mut self, _mtbl: uint, _inner: *TyDesc) -> bool { true }
|
||||
fn visit_evec_uniq(&mut self, _mtbl: uint, _inner: *TyDesc) -> bool { true }
|
||||
fn visit_evec_uniq_managed(&mut self, _mtbl: uint, _inner: *TyDesc) -> bool { true }
|
||||
fn visit_evec_slice(&mut self, _mtbl: uint, _inner: *TyDesc) -> bool { true }
|
||||
fn visit_evec_fixed(&mut self, _n: uint, _sz: uint, _align: uint,
|
||||
_mtbl: uint, _inner: *TyDesc) -> bool { true }
|
||||
|
@ -70,7 +70,6 @@ impl TyVisitor for MyVisitor {
|
||||
|
||||
fn visit_box(&mut self, _mtbl: uint, _inner: *TyDesc) -> bool { true }
|
||||
fn visit_uniq(&mut self, _mtbl: uint, _inner: *TyDesc) -> bool { true }
|
||||
fn visit_uniq_managed(&mut self, _mtbl: uint, _inner: *TyDesc) -> bool { true }
|
||||
fn visit_ptr(&mut self, _mtbl: uint, _inner: *TyDesc) -> bool { true }
|
||||
fn visit_rptr(&mut self, _mtbl: uint, _inner: *TyDesc) -> bool { true }
|
||||
|
||||
@ -83,12 +82,6 @@ impl TyVisitor for MyVisitor {
|
||||
self.types.push(~"]");
|
||||
true
|
||||
}
|
||||
fn visit_evec_uniq_managed(&mut self, _mtbl: uint, inner: *TyDesc) -> bool {
|
||||
self.types.push(~"[");
|
||||
unsafe { visit_tydesc(inner, &mut *self as &mut TyVisitor) };
|
||||
self.types.push(~"]");
|
||||
true
|
||||
}
|
||||
fn visit_evec_slice(&mut self, _mtbl: uint, _inner: *TyDesc) -> bool { true }
|
||||
fn visit_evec_fixed(&mut self, _n: uint, _sz: uint, _align: uint,
|
||||
_mtbl: uint, _inner: *TyDesc) -> bool { true }
|
||||
|
Loading…
Reference in New Issue
Block a user