auto merge of #7816 : thestinger/rust/header, r=huonw
Note that the headers are still on `~[T]` when `T` is managed. This is continued from #7605, which removed all the code relying on the headers and removed them from `~T` for non-managed `T`.
This commit is contained in:
commit
274e7a4e49
@ -63,34 +63,33 @@ pub enum LangItem {
|
||||
FailFnLangItem, // 24
|
||||
FailBoundsCheckFnLangItem, // 25
|
||||
ExchangeMallocFnLangItem, // 26
|
||||
VectorExchangeMallocFnLangItem, // 27
|
||||
ClosureExchangeMallocFnLangItem, // 28
|
||||
ExchangeFreeFnLangItem, // 29
|
||||
MallocFnLangItem, // 30
|
||||
FreeFnLangItem, // 31
|
||||
BorrowAsImmFnLangItem, // 32
|
||||
BorrowAsMutFnLangItem, // 33
|
||||
ReturnToMutFnLangItem, // 34
|
||||
CheckNotBorrowedFnLangItem, // 35
|
||||
StrDupUniqFnLangItem, // 36
|
||||
RecordBorrowFnLangItem, // 37
|
||||
UnrecordBorrowFnLangItem, // 38
|
||||
ClosureExchangeMallocFnLangItem, // 27
|
||||
ExchangeFreeFnLangItem, // 28
|
||||
MallocFnLangItem, // 29
|
||||
FreeFnLangItem, // 30
|
||||
BorrowAsImmFnLangItem, // 31
|
||||
BorrowAsMutFnLangItem, // 32
|
||||
ReturnToMutFnLangItem, // 33
|
||||
CheckNotBorrowedFnLangItem, // 34
|
||||
StrDupUniqFnLangItem, // 35
|
||||
RecordBorrowFnLangItem, // 36
|
||||
UnrecordBorrowFnLangItem, // 37
|
||||
|
||||
StartFnLangItem, // 39
|
||||
StartFnLangItem, // 38
|
||||
|
||||
TyDescStructLangItem, // 40
|
||||
TyVisitorTraitLangItem, // 41
|
||||
OpaqueStructLangItem, // 42
|
||||
TyDescStructLangItem, // 39
|
||||
TyVisitorTraitLangItem, // 40
|
||||
OpaqueStructLangItem, // 41
|
||||
}
|
||||
|
||||
pub struct LanguageItems {
|
||||
items: [Option<def_id>, ..43]
|
||||
items: [Option<def_id>, ..42]
|
||||
}
|
||||
|
||||
impl LanguageItems {
|
||||
pub fn new() -> LanguageItems {
|
||||
LanguageItems {
|
||||
items: [ None, ..43 ]
|
||||
items: [ None, ..42 ]
|
||||
}
|
||||
}
|
||||
|
||||
@ -130,24 +129,23 @@ impl LanguageItems {
|
||||
24 => "fail_",
|
||||
25 => "fail_bounds_check",
|
||||
26 => "exchange_malloc",
|
||||
27 => "vector_exchange_malloc",
|
||||
28 => "closure_exchange_malloc",
|
||||
29 => "exchange_free",
|
||||
30 => "malloc",
|
||||
31 => "free",
|
||||
32 => "borrow_as_imm",
|
||||
33 => "borrow_as_mut",
|
||||
34 => "return_to_mut",
|
||||
35 => "check_not_borrowed",
|
||||
36 => "strdup_uniq",
|
||||
37 => "record_borrow",
|
||||
38 => "unrecord_borrow",
|
||||
27 => "closure_exchange_malloc",
|
||||
28 => "exchange_free",
|
||||
29 => "malloc",
|
||||
30 => "free",
|
||||
31 => "borrow_as_imm",
|
||||
32 => "borrow_as_mut",
|
||||
33 => "return_to_mut",
|
||||
34 => "check_not_borrowed",
|
||||
35 => "strdup_uniq",
|
||||
36 => "record_borrow",
|
||||
37 => "unrecord_borrow",
|
||||
|
||||
39 => "start",
|
||||
38 => "start",
|
||||
|
||||
40 => "ty_desc",
|
||||
41 => "ty_visitor",
|
||||
42 => "opaque",
|
||||
39 => "ty_desc",
|
||||
40 => "ty_visitor",
|
||||
41 => "opaque",
|
||||
|
||||
_ => "???"
|
||||
}
|
||||
@ -240,9 +238,6 @@ impl LanguageItems {
|
||||
pub fn exchange_malloc_fn(&self) -> def_id {
|
||||
self.items[ExchangeMallocFnLangItem as uint].get()
|
||||
}
|
||||
pub fn vector_exchange_malloc_fn(&self) -> def_id {
|
||||
self.items[VectorExchangeMallocFnLangItem as uint].get()
|
||||
}
|
||||
pub fn closure_exchange_malloc_fn(&self) -> def_id {
|
||||
self.items[ClosureExchangeMallocFnLangItem as uint].get()
|
||||
}
|
||||
@ -336,7 +331,6 @@ impl<'self> LanguageItemCollector<'self> {
|
||||
item_refs.insert(@"fail_bounds_check",
|
||||
FailBoundsCheckFnLangItem as uint);
|
||||
item_refs.insert(@"exchange_malloc", ExchangeMallocFnLangItem as uint);
|
||||
item_refs.insert(@"vector_exchange_malloc", VectorExchangeMallocFnLangItem as uint);
|
||||
item_refs.insert(@"closure_exchange_malloc", ClosureExchangeMallocFnLangItem as uint);
|
||||
item_refs.insert(@"exchange_free", ExchangeFreeFnLangItem as uint);
|
||||
item_refs.insert(@"malloc", MallocFnLangItem as uint);
|
||||
|
@ -294,25 +294,6 @@ pub fn malloc_raw_dyn(bcx: block,
|
||||
[size],
|
||||
None);
|
||||
rslt(r.bcx, PointerCast(r.bcx, r.val, llty_value.ptr_to()))
|
||||
} else if heap == heap_exchange_vector {
|
||||
// Grab the TypeRef type of box_ptr_ty.
|
||||
let element_type = match ty::get(t).sty {
|
||||
ty::ty_unboxed_vec(e) => e,
|
||||
_ => fail!("not a vector body")
|
||||
};
|
||||
let box_ptr_ty = ty::mk_evec(bcx.tcx(), element_type, ty::vstore_uniq);
|
||||
let llty = type_of(ccx, box_ptr_ty);
|
||||
|
||||
let llty_value = type_of::type_of(ccx, t);
|
||||
let llalign = llalign_of_min(ccx, llty_value);
|
||||
|
||||
// Allocate space:
|
||||
let r = callee::trans_lang_call(
|
||||
bcx,
|
||||
bcx.tcx().lang_items.vector_exchange_malloc_fn(),
|
||||
[C_i32(llalign as i32), size],
|
||||
None);
|
||||
rslt(r.bcx, PointerCast(r.bcx, r.val, llty))
|
||||
} else {
|
||||
// we treat ~fn, @fn and @[] as @ here, which isn't ideal
|
||||
let (mk_fn, langcall) = match heap {
|
||||
@ -322,7 +303,7 @@ pub fn malloc_raw_dyn(bcx: block,
|
||||
heap_exchange_closure => {
|
||||
(ty::mk_imm_box, bcx.tcx().lang_items.closure_exchange_malloc_fn())
|
||||
}
|
||||
_ => fail!("heap_exchange/heap_exchange_vector already handled")
|
||||
_ => fail!("heap_exchange already handled")
|
||||
};
|
||||
|
||||
// Grab the TypeRef type of box_ptr_ty.
|
||||
|
@ -283,7 +283,6 @@ pub enum heap {
|
||||
heap_managed,
|
||||
heap_managed_unique,
|
||||
heap_exchange,
|
||||
heap_exchange_vector,
|
||||
heap_exchange_closure
|
||||
}
|
||||
|
||||
@ -405,7 +404,7 @@ pub fn add_clean_free(cx: block, ptr: ValueRef, heap: heap) {
|
||||
let f: @fn(block) -> block = |a| glue::trans_free(a, ptr);
|
||||
f
|
||||
}
|
||||
heap_exchange | heap_exchange_vector | heap_exchange_closure => {
|
||||
heap_exchange | heap_exchange_closure => {
|
||||
let f: @fn(block) -> block = |a| glue::trans_exchange_free(a, ptr);
|
||||
f
|
||||
}
|
||||
|
@ -460,7 +460,7 @@ fn trans_rvalue_datum_unadjusted(bcx: block, expr: @ast::expr) -> DatumBlock {
|
||||
expr, contents);
|
||||
}
|
||||
ast::expr_vstore(contents, ast::expr_vstore_uniq) => {
|
||||
let heap = tvec::heap_for_unique_vector(bcx, expr_ty(bcx, contents));
|
||||
let heap = heap_for_unique(bcx, expr_ty(bcx, contents));
|
||||
return tvec::trans_uniq_or_managed_vstore(bcx, heap,
|
||||
expr, contents);
|
||||
}
|
||||
|
@ -397,9 +397,7 @@ pub fn make_free_glue(bcx: block, v: ValueRef, t: ty::t) -> block {
|
||||
ty::ty_uniq(*) => {
|
||||
uniq::make_free_glue(bcx, v, t)
|
||||
}
|
||||
ty::ty_evec(_, ty::vstore_uniq) | ty::ty_estr(ty::vstore_uniq) => {
|
||||
tvec::make_uniq_free_glue(bcx, v, t)
|
||||
}
|
||||
ty::ty_evec(_, ty::vstore_uniq) | ty::ty_estr(ty::vstore_uniq) |
|
||||
ty::ty_evec(_, ty::vstore_box) | ty::ty_estr(ty::vstore_box) => {
|
||||
make_free_glue(bcx, v, tvec::expand_boxed_vec_ty(bcx.tcx(), t))
|
||||
}
|
||||
|
@ -122,9 +122,9 @@ impl Reflector {
|
||||
bracket_name: &str,
|
||||
extra: &[ValueRef],
|
||||
inner: &fn(&mut Reflector)) {
|
||||
self.visit(~"enter_" + bracket_name, extra);
|
||||
self.visit("enter_" + bracket_name, extra);
|
||||
inner(self);
|
||||
self.visit(~"leave_" + bracket_name, extra);
|
||||
self.visit("leave_" + bracket_name, extra);
|
||||
}
|
||||
|
||||
pub fn vstore_name_and_extra(&mut self,
|
||||
@ -183,7 +183,11 @@ impl Reflector {
|
||||
ty::ty_evec(ref mt, vst) => {
|
||||
let (name, extra) = self.vstore_name_and_extra(t, vst);
|
||||
let extra = extra + self.c_mt(mt);
|
||||
self.visit(~"evec_" + name, extra)
|
||||
if "uniq" == name && ty::type_contents(bcx.tcx(), t).contains_managed() {
|
||||
self.visit("evec_uniq_managed", extra)
|
||||
} else {
|
||||
self.visit(~"evec_" + name, extra)
|
||||
}
|
||||
}
|
||||
ty::ty_box(ref mt) => {
|
||||
let extra = self.c_mt(mt);
|
||||
|
@ -33,23 +33,6 @@ use std::option::None;
|
||||
use syntax::ast;
|
||||
use syntax::codemap;
|
||||
|
||||
pub fn make_uniq_free_glue(bcx: block, vptrptr: ValueRef, box_ty: ty::t)
|
||||
-> block {
|
||||
let box_datum = immediate_rvalue(Load(bcx, vptrptr), box_ty);
|
||||
|
||||
let not_null = IsNotNull(bcx, box_datum.val);
|
||||
do with_cond(bcx, not_null) |bcx| {
|
||||
let body_datum = box_datum.box_body(bcx);
|
||||
let bcx = glue::drop_ty(bcx, body_datum.to_ref_llval(bcx),
|
||||
body_datum.ty);
|
||||
if ty::type_contents(bcx.tcx(), box_ty).contains_managed() {
|
||||
glue::trans_free(bcx, box_datum.val)
|
||||
} else {
|
||||
glue::trans_exchange_free(bcx, box_datum.val)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Boxed vector types are in some sense currently a "shorthand" for a box
|
||||
// containing an unboxed vector. This expands a boxed vector type into such an
|
||||
// expanded type. It doesn't respect mutability, but that doesn't matter at
|
||||
@ -59,7 +42,7 @@ pub fn expand_boxed_vec_ty(tcx: ty::ctxt, t: ty::t) -> ty::t {
|
||||
let unboxed_vec_ty = ty::mk_mut_unboxed_vec(tcx, unit_ty);
|
||||
match ty::get(t).sty {
|
||||
ty::ty_estr(ty::vstore_uniq) | ty::ty_evec(_, ty::vstore_uniq) => {
|
||||
fail!("cannot treat vectors/strings as exchange allocations yet");
|
||||
ty::mk_imm_uniq(tcx, unboxed_vec_ty)
|
||||
}
|
||||
ty::ty_estr(ty::vstore_box) | ty::ty_evec(_, ty::vstore_box) => {
|
||||
ty::mk_imm_box(tcx, unboxed_vec_ty)
|
||||
@ -80,8 +63,12 @@ pub fn get_alloc(bcx: block, vptr: ValueRef) -> ValueRef {
|
||||
Load(bcx, GEPi(bcx, vptr, [0u, abi::vec_elt_alloc]))
|
||||
}
|
||||
|
||||
pub fn get_bodyptr(bcx: block, vptr: ValueRef) -> ValueRef {
|
||||
GEPi(bcx, vptr, [0u, abi::box_field_body])
|
||||
pub fn get_bodyptr(bcx: block, vptr: ValueRef, t: ty::t) -> ValueRef {
|
||||
if ty::type_contents(bcx.tcx(), t).contains_managed() {
|
||||
GEPi(bcx, vptr, [0u, abi::box_field_body])
|
||||
} else {
|
||||
vptr
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_dataptr(bcx: block, vptr: ValueRef) -> ValueRef {
|
||||
@ -104,25 +91,24 @@ pub fn alloc_raw(bcx: block, unit_ty: ty::t,
|
||||
let vecbodyty = ty::mk_mut_unboxed_vec(bcx.tcx(), unit_ty);
|
||||
let vecsize = Add(bcx, alloc, llsize_of(ccx, ccx.opaque_vec_type));
|
||||
|
||||
let base::MallocResult {bcx, box: bx, body} =
|
||||
base::malloc_general_dyn(bcx, vecbodyty, heap, vecsize);
|
||||
Store(bcx, fill, GEPi(bcx, body, [0u, abi::vec_elt_fill]));
|
||||
Store(bcx, alloc, GEPi(bcx, body, [0u, abi::vec_elt_alloc]));
|
||||
base::maybe_set_managed_unique_rc(bcx, bx, heap);
|
||||
return rslt(bcx, bx);
|
||||
}
|
||||
|
||||
pub fn heap_for_unique_vector(bcx: block, t: ty::t) -> heap {
|
||||
if ty::type_contents(bcx.tcx(), t).contains_managed() {
|
||||
heap_managed_unique
|
||||
if heap == heap_exchange {
|
||||
let Result { bcx: bcx, val: val } = malloc_raw_dyn(bcx, vecbodyty, heap_exchange, vecsize);
|
||||
Store(bcx, fill, GEPi(bcx, val, [0u, abi::vec_elt_fill]));
|
||||
Store(bcx, alloc, GEPi(bcx, val, [0u, abi::vec_elt_alloc]));
|
||||
return rslt(bcx, val);
|
||||
} else {
|
||||
heap_exchange_vector
|
||||
let base::MallocResult {bcx, box: bx, body} =
|
||||
base::malloc_general_dyn(bcx, vecbodyty, heap, vecsize);
|
||||
Store(bcx, fill, GEPi(bcx, body, [0u, abi::vec_elt_fill]));
|
||||
Store(bcx, alloc, GEPi(bcx, body, [0u, abi::vec_elt_alloc]));
|
||||
base::maybe_set_managed_unique_rc(bcx, bx, heap);
|
||||
return rslt(bcx, bx);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn alloc_uniq_raw(bcx: block, unit_ty: ty::t,
|
||||
fill: ValueRef, alloc: ValueRef) -> Result {
|
||||
alloc_raw(bcx, unit_ty, fill, alloc, heap_for_unique_vector(bcx, unit_ty))
|
||||
alloc_raw(bcx, unit_ty, fill, alloc, base::heap_for_unique(bcx, unit_ty))
|
||||
}
|
||||
|
||||
pub fn alloc_vec(bcx: block,
|
||||
@ -146,12 +132,12 @@ pub fn alloc_vec(bcx: block,
|
||||
pub fn duplicate_uniq(bcx: block, vptr: ValueRef, vec_ty: ty::t) -> Result {
|
||||
let _icx = push_ctxt("tvec::duplicate_uniq");
|
||||
|
||||
let fill = get_fill(bcx, get_bodyptr(bcx, vptr));
|
||||
let fill = get_fill(bcx, get_bodyptr(bcx, vptr, vec_ty));
|
||||
let unit_ty = ty::sequence_element_type(bcx.tcx(), vec_ty);
|
||||
let Result {bcx, val: newptr} = alloc_uniq_raw(bcx, unit_ty, fill, fill);
|
||||
|
||||
let data_ptr = get_dataptr(bcx, get_bodyptr(bcx, vptr));
|
||||
let new_data_ptr = get_dataptr(bcx, get_bodyptr(bcx, newptr));
|
||||
let data_ptr = get_dataptr(bcx, get_bodyptr(bcx, vptr, vec_ty));
|
||||
let new_data_ptr = get_dataptr(bcx, get_bodyptr(bcx, newptr, vec_ty));
|
||||
base::call_memcpy(bcx, new_data_ptr, data_ptr, fill, 1);
|
||||
|
||||
let bcx = if ty::type_needs_drop(bcx.tcx(), unit_ty) {
|
||||
@ -323,7 +309,7 @@ pub fn trans_uniq_or_managed_vstore(bcx: block, heap: heap, vstore_expr: @ast::e
|
||||
|
||||
// Handle ~"".
|
||||
match heap {
|
||||
heap_exchange_vector => {
|
||||
heap_exchange => {
|
||||
match content_expr.node {
|
||||
ast::expr_lit(@codemap::spanned {
|
||||
node: ast::lit_str(s), _
|
||||
@ -346,7 +332,7 @@ pub fn trans_uniq_or_managed_vstore(bcx: block, heap: heap, vstore_expr: @ast::e
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
heap_exchange | heap_exchange_closure => fail!("vectors use vector_exchange_alloc"),
|
||||
heap_exchange_closure => fail!("vectors use exchange_alloc"),
|
||||
heap_managed | heap_managed_unique => {}
|
||||
}
|
||||
|
||||
@ -356,7 +342,7 @@ pub fn trans_uniq_or_managed_vstore(bcx: block, heap: heap, vstore_expr: @ast::e
|
||||
let Result {bcx, val} = alloc_vec(bcx, vt.unit_ty, count, heap);
|
||||
|
||||
add_clean_free(bcx, val, heap);
|
||||
let dataptr = get_dataptr(bcx, get_bodyptr(bcx, val));
|
||||
let dataptr = get_dataptr(bcx, get_bodyptr(bcx, val, vt.vec_ty));
|
||||
|
||||
debug!("alloc_vec() returned val=%s, dataptr=%s",
|
||||
bcx.val_to_str(val), bcx.val_to_str(dataptr));
|
||||
@ -562,7 +548,7 @@ pub fn get_base_and_len(bcx: block,
|
||||
(base, len)
|
||||
}
|
||||
ty::vstore_uniq | ty::vstore_box => {
|
||||
let body = get_bodyptr(bcx, llval);
|
||||
let body = get_bodyptr(bcx, llval, vec_ty);
|
||||
(get_dataptr(bcx, body), get_fill(bcx, body))
|
||||
}
|
||||
}
|
||||
@ -604,7 +590,7 @@ pub fn iter_vec_raw(bcx: block, data_ptr: ValueRef, vec_ty: ty::t,
|
||||
pub fn iter_vec_uniq(bcx: block, vptr: ValueRef, vec_ty: ty::t,
|
||||
fill: ValueRef, f: iter_vec_block) -> block {
|
||||
let _icx = push_ctxt("tvec::iter_vec_uniq");
|
||||
let data_ptr = get_dataptr(bcx, get_bodyptr(bcx, vptr));
|
||||
let data_ptr = get_dataptr(bcx, get_bodyptr(bcx, vptr, vec_ty));
|
||||
iter_vec_raw(bcx, data_ptr, vec_ty, fill, f)
|
||||
}
|
||||
|
||||
|
@ -183,7 +183,7 @@ pub fn type_of(cx: &mut CrateContext, t: ty::t) -> Type {
|
||||
ty::ty_uint(t) => Type::uint_from_ty(cx, t),
|
||||
ty::ty_float(t) => Type::float_from_ty(cx, t),
|
||||
ty::ty_estr(ty::vstore_uniq) => {
|
||||
Type::unique(cx, &Type::vec(cx.sess.targ_cfg.arch, &Type::i8())).ptr_to()
|
||||
Type::vec(cx.sess.targ_cfg.arch, &Type::i8()).ptr_to()
|
||||
}
|
||||
ty::ty_enum(did, ref substs) => {
|
||||
// Only create the named struct, but don't fill it in. We
|
||||
@ -217,7 +217,11 @@ pub fn type_of(cx: &mut CrateContext, t: ty::t) -> Type {
|
||||
ty::ty_evec(ref mt, ty::vstore_uniq) => {
|
||||
let ty = type_of(cx, mt.ty);
|
||||
let ty = Type::vec(cx.sess.targ_cfg.arch, &ty);
|
||||
Type::unique(cx, &ty).ptr_to()
|
||||
if ty::type_contents(cx.tcx, mt.ty).contains_managed() {
|
||||
Type::unique(cx, &ty).ptr_to()
|
||||
} else {
|
||||
ty.ptr_to()
|
||||
}
|
||||
}
|
||||
ty::ty_unboxed_vec(ref mt) => {
|
||||
let ty = type_of(cx, mt.ty);
|
||||
|
@ -297,6 +297,14 @@ impl<V:TyVisitor + MovePtr> TyVisitor for MovePtrAdaptor<V> {
|
||||
true
|
||||
}
|
||||
|
||||
#[cfg(not(stage0))]
|
||||
fn visit_evec_uniq_managed(&self, mtbl: uint, inner: *TyDesc) -> bool {
|
||||
self.align_to::<~[@u8]>();
|
||||
if ! self.inner.visit_evec_uniq_managed(mtbl, inner) { return false; }
|
||||
self.bump_past::<~[@u8]>();
|
||||
true
|
||||
}
|
||||
|
||||
fn visit_evec_slice(&self, mtbl: uint, inner: *TyDesc) -> bool {
|
||||
self.align_to::<&'static [u8]>();
|
||||
if ! self.inner.visit_evec_slice(mtbl, inner) { return false; }
|
||||
|
@ -353,6 +353,14 @@ impl TyVisitor for ReprVisitor {
|
||||
}
|
||||
|
||||
fn visit_evec_uniq(&self, mtbl: uint, inner: *TyDesc) -> bool {
|
||||
do self.get::<&UnboxedVecRepr> |b| {
|
||||
self.writer.write_char('~');
|
||||
self.write_unboxed_vec_repr(mtbl, *b, inner);
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(not(stage0))]
|
||||
fn visit_evec_uniq_managed(&self, mtbl: uint, inner: *TyDesc) -> bool {
|
||||
do self.get::<&VecRepr> |b| {
|
||||
self.writer.write_char('~');
|
||||
self.write_unboxed_vec_repr(mtbl, &b.unboxed, inner);
|
||||
|
@ -84,14 +84,6 @@ pub unsafe fn exchange_malloc(size: uintptr_t) -> *c_char {
|
||||
malloc_raw(size as uint) as *c_char
|
||||
}
|
||||
|
||||
#[cfg(not(test))]
|
||||
#[lang="vector_exchange_malloc"]
|
||||
#[inline]
|
||||
pub unsafe fn vector_exchange_malloc(align: u32, size: uintptr_t) -> *c_char {
|
||||
let total_size = get_box_size(size as uint, align as uint);
|
||||
malloc_raw(total_size as uint) as *c_char
|
||||
}
|
||||
|
||||
// FIXME: #7496
|
||||
#[cfg(not(test))]
|
||||
#[lang="closure_exchange_malloc"]
|
||||
|
@ -1003,6 +1003,7 @@ pub mod raw {
|
||||
|
||||
/// Sets the length of the string and adds the null terminator
|
||||
#[inline]
|
||||
#[cfg(stage0)]
|
||||
pub unsafe fn set_len(v: &mut ~str, new_len: uint) {
|
||||
let v: **mut vec::raw::VecRepr = cast::transmute(v);
|
||||
let repr: *mut vec::raw::VecRepr = *v;
|
||||
@ -1012,6 +1013,18 @@ pub mod raw {
|
||||
*null = 0u8;
|
||||
}
|
||||
|
||||
/// Sets the length of the string and adds the null terminator
|
||||
#[inline]
|
||||
#[cfg(not(stage0))]
|
||||
pub unsafe fn set_len(v: &mut ~str, new_len: uint) {
|
||||
let v: **mut vec::UnboxedVecRepr = cast::transmute(v);
|
||||
let repr: *mut vec::UnboxedVecRepr = *v;
|
||||
(*repr).fill = new_len + 1u;
|
||||
let null = ptr::mut_offset(cast::transmute(&((*repr).data)),
|
||||
new_len);
|
||||
*null = 0u8;
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_from_buf_len() {
|
||||
unsafe {
|
||||
@ -2027,7 +2040,7 @@ impl NullTerminatedStr for @str {
|
||||
*/
|
||||
#[inline]
|
||||
fn as_bytes_with_null<'a>(&'a self) -> &'a [u8] {
|
||||
let ptr: &'a ~[u8] = unsafe { ::cast::transmute(self) };
|
||||
let ptr: &'a @[u8] = unsafe { ::cast::transmute(self) };
|
||||
let slice: &'a [u8] = *ptr;
|
||||
slice
|
||||
}
|
||||
|
@ -99,6 +99,7 @@ pub trait TyVisitor {
|
||||
fn visit_unboxed_vec(&self, mtbl: uint, inner: *TyDesc) -> bool;
|
||||
fn visit_evec_box(&self, mtbl: uint, inner: *TyDesc) -> bool;
|
||||
fn visit_evec_uniq(&self, mtbl: uint, inner: *TyDesc) -> bool;
|
||||
fn visit_evec_uniq_managed(&self, mtbl: uint, inner: *TyDesc) -> bool;
|
||||
fn visit_evec_slice(&self, mtbl: uint, inner: *TyDesc) -> bool;
|
||||
fn visit_evec_fixed(&self, n: uint, sz: uint, align: uint,
|
||||
mtbl: uint, inner: *TyDesc) -> bool;
|
||||
|
@ -30,6 +30,7 @@ use ptr::RawPtr;
|
||||
use rt::global_heap::malloc_raw;
|
||||
use rt::global_heap::realloc_raw;
|
||||
use sys;
|
||||
use sys::size_of;
|
||||
use uint;
|
||||
use unstable::intrinsics;
|
||||
#[cfg(stage0)]
|
||||
@ -108,9 +109,9 @@ pub fn with_capacity<T>(capacity: uint) -> ~[T] {
|
||||
vec
|
||||
} else {
|
||||
let alloc = capacity * sys::nonzero_size_of::<T>();
|
||||
let ptr = malloc_raw(alloc + sys::size_of::<raw::VecRepr>()) as *mut raw::VecRepr;
|
||||
(*ptr).unboxed.alloc = alloc;
|
||||
(*ptr).unboxed.fill = 0;
|
||||
let ptr = malloc_raw(alloc + sys::size_of::<UnboxedVecRepr>()) as *mut UnboxedVecRepr;
|
||||
(*ptr).alloc = alloc;
|
||||
(*ptr).fill = 0;
|
||||
cast::transmute(ptr)
|
||||
}
|
||||
}
|
||||
@ -1150,7 +1151,7 @@ impl<T> OwnedVector<T> for ~[T] {
|
||||
::at_vec::raw::reserve_raw(td, ptr, n);
|
||||
} else {
|
||||
let alloc = n * sys::nonzero_size_of::<T>();
|
||||
*ptr = realloc_raw(*ptr as *mut c_void, alloc + sys::size_of::<raw::VecRepr>())
|
||||
*ptr = realloc_raw(*ptr as *mut c_void, alloc + size_of::<raw::VecRepr>())
|
||||
as *mut raw::VecRepr;
|
||||
(**ptr).unboxed.alloc = alloc;
|
||||
}
|
||||
@ -1173,19 +1174,20 @@ impl<T> OwnedVector<T> for ~[T] {
|
||||
// Only make the (slow) call into the runtime if we have to
|
||||
if self.capacity() < n {
|
||||
unsafe {
|
||||
let ptr: *mut *mut raw::VecRepr = cast::transmute(self);
|
||||
let td = get_tydesc::<T>();
|
||||
if contains_managed::<T>() {
|
||||
let ptr: *mut *mut raw::VecRepr = cast::transmute(self);
|
||||
::at_vec::raw::reserve_raw(td, ptr, n);
|
||||
} else {
|
||||
let ptr: *mut *mut UnboxedVecRepr = cast::transmute(self);
|
||||
let alloc = n * sys::nonzero_size_of::<T>();
|
||||
let size = alloc + sys::size_of::<raw::VecRepr>();
|
||||
let size = alloc + sys::size_of::<UnboxedVecRepr>();
|
||||
if alloc / sys::nonzero_size_of::<T>() != n || size < alloc {
|
||||
fail!("vector size is too large: %u", n);
|
||||
}
|
||||
*ptr = realloc_raw(*ptr as *mut c_void, size)
|
||||
as *mut raw::VecRepr;
|
||||
(**ptr).unboxed.alloc = alloc;
|
||||
as *mut UnboxedVecRepr;
|
||||
(**ptr).alloc = alloc;
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1211,6 +1213,7 @@ impl<T> OwnedVector<T> for ~[T] {
|
||||
|
||||
/// Returns the number of elements the vector can hold without reallocating.
|
||||
#[inline]
|
||||
#[cfg(stage0)]
|
||||
fn capacity(&self) -> uint {
|
||||
unsafe {
|
||||
let repr: **raw::VecRepr = transmute(self);
|
||||
@ -1218,8 +1221,24 @@ impl<T> OwnedVector<T> for ~[T] {
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the number of elements the vector can hold without reallocating.
|
||||
#[inline]
|
||||
#[cfg(not(stage0))]
|
||||
fn capacity(&self) -> uint {
|
||||
unsafe {
|
||||
if contains_managed::<T>() {
|
||||
let repr: **raw::VecRepr = transmute(self);
|
||||
(**repr).unboxed.alloc / sys::nonzero_size_of::<T>()
|
||||
} else {
|
||||
let repr: **UnboxedVecRepr = transmute(self);
|
||||
(**repr).alloc / sys::nonzero_size_of::<T>()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Append an element to a vector
|
||||
#[inline]
|
||||
#[cfg(stage0)]
|
||||
fn push(&mut self, t: T) {
|
||||
unsafe {
|
||||
let repr: **raw::VecRepr = transmute(&mut *self);
|
||||
@ -1233,8 +1252,36 @@ impl<T> OwnedVector<T> for ~[T] {
|
||||
}
|
||||
}
|
||||
|
||||
/// Append an element to a vector
|
||||
#[inline]
|
||||
#[cfg(not(stage0))]
|
||||
fn push(&mut self, t: T) {
|
||||
unsafe {
|
||||
if contains_managed::<T>() {
|
||||
let repr: **raw::VecRepr = transmute(&mut *self);
|
||||
let fill = (**repr).unboxed.fill;
|
||||
if (**repr).unboxed.alloc <= fill {
|
||||
let new_len = self.len() + 1;
|
||||
self.reserve_at_least(new_len);
|
||||
}
|
||||
|
||||
self.push_fast(t);
|
||||
} else {
|
||||
let repr: **UnboxedVecRepr = transmute(&mut *self);
|
||||
let fill = (**repr).fill;
|
||||
if (**repr).alloc <= fill {
|
||||
let new_len = self.len() + 1;
|
||||
self.reserve_at_least(new_len);
|
||||
}
|
||||
|
||||
self.push_fast(t);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// This doesn't bother to make sure we have space.
|
||||
#[inline] // really pretty please
|
||||
#[cfg(stage0)]
|
||||
unsafe fn push_fast(&mut self, t: T) {
|
||||
let repr: **mut raw::VecRepr = transmute(self);
|
||||
let fill = (**repr).unboxed.fill;
|
||||
@ -1244,6 +1291,27 @@ impl<T> OwnedVector<T> for ~[T] {
|
||||
intrinsics::move_val_init(&mut(*p), t);
|
||||
}
|
||||
|
||||
// This doesn't bother to make sure we have space.
|
||||
#[inline] // really pretty please
|
||||
#[cfg(not(stage0))]
|
||||
unsafe fn push_fast(&mut self, t: T) {
|
||||
if contains_managed::<T>() {
|
||||
let repr: **mut raw::VecRepr = transmute(self);
|
||||
let fill = (**repr).unboxed.fill;
|
||||
(**repr).unboxed.fill += sys::nonzero_size_of::<T>();
|
||||
let p = to_unsafe_ptr(&((**repr).unboxed.data));
|
||||
let p = ptr::offset(p, fill) as *mut T;
|
||||
intrinsics::move_val_init(&mut(*p), t);
|
||||
} else {
|
||||
let repr: **mut UnboxedVecRepr = transmute(self);
|
||||
let fill = (**repr).fill;
|
||||
(**repr).fill += sys::nonzero_size_of::<T>();
|
||||
let p = to_unsafe_ptr(&((**repr).data));
|
||||
let p = ptr::offset(p, fill) as *mut T;
|
||||
intrinsics::move_val_init(&mut(*p), t);
|
||||
}
|
||||
}
|
||||
|
||||
/// Takes ownership of the vector `rhs`, moving all elements into
|
||||
/// the current vector. This does not copy any elements, and it is
|
||||
/// illegal to use the `rhs` vector after calling this method
|
||||
@ -1834,6 +1902,8 @@ pub mod raw {
|
||||
use unstable::intrinsics;
|
||||
use vec::{UnboxedVecRepr, with_capacity, ImmutableVector, MutableVector};
|
||||
use util;
|
||||
#[cfg(not(stage0))]
|
||||
use unstable::intrinsics::contains_managed;
|
||||
|
||||
/// The internal representation of a (boxed) vector
|
||||
#[allow(missing_doc)]
|
||||
@ -1858,11 +1928,31 @@ pub mod raw {
|
||||
* the vector is actually the specified size.
|
||||
*/
|
||||
#[inline]
|
||||
#[cfg(stage0)]
|
||||
pub unsafe fn set_len<T>(v: &mut ~[T], new_len: uint) {
|
||||
let repr: **mut VecRepr = transmute(v);
|
||||
(**repr).unboxed.fill = new_len * sys::nonzero_size_of::<T>();
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the length of a vector
|
||||
*
|
||||
* This will explicitly set the size of the vector, without actually
|
||||
* modifing its buffers, so it is up to the caller to ensure that
|
||||
* the vector is actually the specified size.
|
||||
*/
|
||||
#[inline]
|
||||
#[cfg(not(stage0))]
|
||||
pub unsafe fn set_len<T>(v: &mut ~[T], new_len: uint) {
|
||||
if contains_managed::<T>() {
|
||||
let repr: **mut VecRepr = transmute(v);
|
||||
(**repr).unboxed.fill = new_len * sys::nonzero_size_of::<T>();
|
||||
} else {
|
||||
let repr: **mut UnboxedVecRepr = transmute(v);
|
||||
(**repr).fill = new_len * sys::nonzero_size_of::<T>();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns an unsafe pointer to the vector's buffer
|
||||
*
|
||||
|
@ -390,9 +390,9 @@ void tm_to_rust_tm(tm* in_tm, rust_tm* out_tm, int32_t gmtoff,
|
||||
if (zone != NULL) {
|
||||
size_t size = strlen(zone);
|
||||
reserve_vec_exact(&out_tm->tm_zone, size + 1);
|
||||
memcpy(out_tm->tm_zone->body.data, zone, size);
|
||||
out_tm->tm_zone->body.fill = size + 1;
|
||||
out_tm->tm_zone->body.data[size] = '\0';
|
||||
memcpy(out_tm->tm_zone->data, zone, size);
|
||||
out_tm->tm_zone->fill = size + 1;
|
||||
out_tm->tm_zone->data[size] = '\0';
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -57,17 +57,17 @@ vec_data(rust_vec *v) {
|
||||
return reinterpret_cast<T*>(v->data);
|
||||
}
|
||||
|
||||
inline void reserve_vec_exact(rust_vec_box** vpp,
|
||||
inline void reserve_vec_exact(rust_vec** vpp,
|
||||
size_t size) {
|
||||
if (size > (*vpp)->body.alloc) {
|
||||
if (size > (*vpp)->alloc) {
|
||||
rust_exchange_alloc exchange_alloc;
|
||||
*vpp = (rust_vec_box*)exchange_alloc
|
||||
.realloc(*vpp, size + sizeof(rust_vec_box));
|
||||
(*vpp)->body.alloc = size;
|
||||
*vpp = (rust_vec*)exchange_alloc
|
||||
.realloc(*vpp, size + sizeof(rust_vec));
|
||||
(*vpp)->alloc = size;
|
||||
}
|
||||
}
|
||||
|
||||
typedef rust_vec_box rust_str;
|
||||
typedef rust_vec rust_str;
|
||||
|
||||
inline size_t get_box_size(size_t body_size, size_t body_align) {
|
||||
size_t header_size = sizeof(rust_opaque_box);
|
||||
|
@ -284,6 +284,13 @@ impl<V:TyVisitor + movable_ptr> TyVisitor for ptr_visit_adaptor<V> {
|
||||
true
|
||||
}
|
||||
|
||||
fn visit_evec_uniq_managed(&self, mtbl: uint, inner: *TyDesc) -> bool {
|
||||
self.align_to::<~[@u8]>();
|
||||
if ! self.inner.visit_evec_uniq_managed(mtbl, inner) { return false; }
|
||||
self.bump_past::<~[@u8]>();
|
||||
true
|
||||
}
|
||||
|
||||
fn visit_evec_slice(&self, mtbl: uint, inner: *TyDesc) -> bool {
|
||||
self.align_to::<&'static [u8]>();
|
||||
if ! self.inner.visit_evec_slice(mtbl, inner) { return false; }
|
||||
@ -567,6 +574,7 @@ impl TyVisitor for my_visitor {
|
||||
fn visit_unboxed_vec(&self, _mtbl: uint, _inner: *TyDesc) -> bool { true }
|
||||
fn visit_evec_box(&self, _mtbl: uint, _inner: *TyDesc) -> bool { true }
|
||||
fn visit_evec_uniq(&self, _mtbl: uint, _inner: *TyDesc) -> bool { true }
|
||||
fn visit_evec_uniq_managed(&self, _mtbl: uint, _inner: *TyDesc) -> bool { true }
|
||||
fn visit_evec_slice(&self, _mtbl: uint, _inner: *TyDesc) -> bool { true }
|
||||
fn visit_evec_fixed(&self, _n: uint, _sz: uint, _align: uint,
|
||||
_mtbl: uint, _inner: *TyDesc) -> bool { true }
|
||||
|
@ -85,6 +85,14 @@ impl TyVisitor for MyVisitor {
|
||||
self.types.push(~"]");
|
||||
true
|
||||
}
|
||||
fn visit_evec_uniq_managed(&self, _mtbl: uint, inner: *TyDesc) -> bool {
|
||||
self.types.push(~"[");
|
||||
unsafe {
|
||||
visit_tydesc(inner, (@*self) as @TyVisitor);
|
||||
}
|
||||
self.types.push(~"]");
|
||||
true
|
||||
}
|
||||
fn visit_evec_slice(&self, _mtbl: uint, _inner: *TyDesc) -> bool { true }
|
||||
fn visit_evec_fixed(&self, _n: uint, _sz: uint, _align: uint,
|
||||
_mtbl: uint, _inner: *TyDesc) -> bool { true }
|
||||
|
Loading…
Reference in New Issue
Block a user