rollup merge of #22109: petrochenkov/intuint1

This commit is contained in:
Alex Crichton 2015-02-10 08:42:53 -08:00
commit 6ecb011a58
7 changed files with 101 additions and 103 deletions

View File

@ -206,12 +206,12 @@ impl<T> Arc<T> {
/// Get the number of weak references to this value.
#[inline]
#[unstable(feature = "alloc")]
pub fn weak_count<T>(this: &Arc<T>) -> uint { this.inner().weak.load(SeqCst) - 1 }
pub fn weak_count<T>(this: &Arc<T>) -> usize { this.inner().weak.load(SeqCst) - 1 }
/// Get the number of strong references to this value.
#[inline]
#[unstable(feature = "alloc")]
pub fn strong_count<T>(this: &Arc<T>) -> uint { this.inner().strong.load(SeqCst) }
pub fn strong_count<T>(this: &Arc<T>) -> usize { this.inner().strong.load(SeqCst) }
#[stable(feature = "rust1", since = "1.0.0")]
impl<T> Clone for Arc<T> {
@ -649,7 +649,7 @@ mod tests {
let (tx, rx) = channel();
let _t = Thread::spawn(move || {
let arc_v: Arc<Vec<int>> = rx.recv().unwrap();
let arc_v: Arc<Vec<i32>> = rx.recv().unwrap();
assert_eq!((*arc_v)[3], 4);
});
@ -818,5 +818,5 @@ mod tests {
// Make sure deriving works with Arc<T>
#[derive(Eq, Ord, PartialEq, PartialOrd, Clone, Debug, Default)]
struct Foo { inner: Arc<int> }
struct Foo { inner: Arc<i32> }
}

View File

@ -22,7 +22,7 @@ use std::boxed::BoxAny;
#[test]
fn test_owned_clone() {
let a = Box::new(5);
let b: Box<int> = a.clone();
let b: Box<i32> = a.clone();
assert!(a == b);
}
@ -31,11 +31,11 @@ struct Test;
#[test]
fn any_move() {
let a = Box::new(8us) as Box<Any>;
let a = Box::new(8) as Box<Any>;
let b = Box::new(Test) as Box<Any>;
match a.downcast::<uint>() {
Ok(a) => { assert!(a == Box::new(8us)); }
match a.downcast::<i32>() {
Ok(a) => { assert!(a == Box::new(8)); }
Err(..) => panic!()
}
match b.downcast::<Test>() {
@ -47,7 +47,7 @@ fn any_move() {
let b = Box::new(Test) as Box<Any>;
assert!(a.downcast::<Box<Test>>().is_err());
assert!(b.downcast::<Box<uint>>().is_err());
assert!(b.downcast::<Box<i32>>().is_err());
}
#[test]

View File

@ -21,7 +21,7 @@ use core::ptr::PtrExt;
/// power of 2. The alignment must be no larger than the largest supported page
/// size on the platform.
#[inline]
pub unsafe fn allocate(size: uint, align: uint) -> *mut u8 {
pub unsafe fn allocate(size: usize, align: usize) -> *mut u8 {
imp::allocate(size, align)
}
@ -37,7 +37,7 @@ pub unsafe fn allocate(size: uint, align: uint) -> *mut u8 {
/// create the allocation referenced by `ptr`. The `old_size` parameter may be
/// any value in range_inclusive(requested_size, usable_size).
#[inline]
pub unsafe fn reallocate(ptr: *mut u8, old_size: uint, size: uint, align: uint) -> *mut u8 {
pub unsafe fn reallocate(ptr: *mut u8, old_size: usize, size: usize, align: usize) -> *mut u8 {
imp::reallocate(ptr, old_size, size, align)
}
@ -54,7 +54,8 @@ pub unsafe fn reallocate(ptr: *mut u8, old_size: uint, size: uint, align: uint)
/// create the allocation referenced by `ptr`. The `old_size` parameter may be
/// any value in range_inclusive(requested_size, usable_size).
#[inline]
pub unsafe fn reallocate_inplace(ptr: *mut u8, old_size: uint, size: uint, align: uint) -> uint {
pub unsafe fn reallocate_inplace(ptr: *mut u8, old_size: usize, size: usize,
align: usize) -> usize {
imp::reallocate_inplace(ptr, old_size, size, align)
}
@ -66,14 +67,14 @@ pub unsafe fn reallocate_inplace(ptr: *mut u8, old_size: uint, size: uint, align
/// create the allocation referenced by `ptr`. The `old_size` parameter may be
/// any value in range_inclusive(requested_size, usable_size).
#[inline]
pub unsafe fn deallocate(ptr: *mut u8, old_size: uint, align: uint) {
pub unsafe fn deallocate(ptr: *mut u8, old_size: usize, align: usize) {
imp::deallocate(ptr, old_size, align)
}
/// Returns the usable size of an allocation created with the specified the
/// `size` and `align`.
#[inline]
pub fn usable_size(size: uint, align: uint) -> uint {
pub fn usable_size(size: usize, align: usize) -> usize {
imp::usable_size(size, align)
}
@ -96,7 +97,7 @@ pub const EMPTY: *mut () = 0x1 as *mut ();
#[cfg(not(test))]
#[lang="exchange_malloc"]
#[inline]
unsafe fn exchange_malloc(size: uint, align: uint) -> *mut u8 {
unsafe fn exchange_malloc(size: usize, align: usize) -> *mut u8 {
if size == 0 {
EMPTY as *mut u8
} else {
@ -109,7 +110,7 @@ unsafe fn exchange_malloc(size: uint, align: uint) -> *mut u8 {
#[cfg(not(test))]
#[lang="exchange_free"]
#[inline]
unsafe fn exchange_free(ptr: *mut u8, old_size: uint, align: uint) {
unsafe fn exchange_free(ptr: *mut u8, old_size: usize, align: usize) {
deallocate(ptr, old_size, align);
}
@ -122,49 +123,49 @@ unsafe fn exchange_free(ptr: *mut u8, old_size: uint, align: uint) {
target_arch = "mips",
target_arch = "mipsel",
target_arch = "powerpc")))]
const MIN_ALIGN: uint = 8;
const MIN_ALIGN: usize = 8;
#[cfg(all(not(feature = "external_funcs"),
not(feature = "external_crate"),
any(target_arch = "x86",
target_arch = "x86_64",
target_arch = "aarch64")))]
const MIN_ALIGN: uint = 16;
const MIN_ALIGN: usize = 16;
#[cfg(feature = "external_funcs")]
mod imp {
extern {
fn rust_allocate(size: uint, align: uint) -> *mut u8;
fn rust_deallocate(ptr: *mut u8, old_size: uint, align: uint);
fn rust_reallocate(ptr: *mut u8, old_size: uint, size: uint, align: uint) -> *mut u8;
fn rust_reallocate_inplace(ptr: *mut u8, old_size: uint, size: uint,
align: uint) -> uint;
fn rust_usable_size(size: uint, align: uint) -> uint;
fn rust_allocate(size: usize, align: usize) -> *mut u8;
fn rust_deallocate(ptr: *mut u8, old_size: usize, align: usize);
fn rust_reallocate(ptr: *mut u8, old_size: usize, size: usize, align: usize) -> *mut u8;
fn rust_reallocate_inplace(ptr: *mut u8, old_size: usize, size: usize,
align: usize) -> usize;
fn rust_usable_size(size: usize, align: usize) -> usize;
fn rust_stats_print();
}
#[inline]
pub unsafe fn allocate(size: uint, align: uint) -> *mut u8 {
pub unsafe fn allocate(size: usize, align: usize) -> *mut u8 {
rust_allocate(size, align)
}
#[inline]
pub unsafe fn deallocate(ptr: *mut u8, old_size: uint, align: uint) {
pub unsafe fn deallocate(ptr: *mut u8, old_size: usize, align: usize) {
rust_deallocate(ptr, old_size, align)
}
#[inline]
pub unsafe fn reallocate(ptr: *mut u8, old_size: uint, size: uint, align: uint) -> *mut u8 {
pub unsafe fn reallocate(ptr: *mut u8, old_size: usize, size: usize, align: usize) -> *mut u8 {
rust_reallocate(ptr, old_size, size, align)
}
#[inline]
pub unsafe fn reallocate_inplace(ptr: *mut u8, old_size: uint, size: uint,
align: uint) -> uint {
pub unsafe fn reallocate_inplace(ptr: *mut u8, old_size: usize, size: usize,
align: usize) -> usize {
rust_reallocate_inplace(ptr, old_size, size, align)
}
#[inline]
pub fn usable_size(size: uint, align: uint) -> uint {
pub fn usable_size(size: usize, align: usize) -> usize {
unsafe { rust_usable_size(size, align) }
}
@ -215,42 +216,42 @@ mod imp {
// MALLOCX_ALIGN(a) macro
#[inline(always)]
fn mallocx_align(a: uint) -> c_int { a.trailing_zeros() as c_int }
fn mallocx_align(a: usize) -> c_int { a.trailing_zeros() as c_int }
#[inline(always)]
fn align_to_flags(align: uint) -> c_int {
fn align_to_flags(align: usize) -> c_int {
if align <= MIN_ALIGN { 0 } else { mallocx_align(align) }
}
#[inline]
pub unsafe fn allocate(size: uint, align: uint) -> *mut u8 {
pub unsafe fn allocate(size: usize, align: usize) -> *mut u8 {
let flags = align_to_flags(align);
je_mallocx(size as size_t, flags) as *mut u8
}
#[inline]
pub unsafe fn reallocate(ptr: *mut u8, _old_size: uint, size: uint, align: uint) -> *mut u8 {
pub unsafe fn reallocate(ptr: *mut u8, _old_size: usize, size: usize, align: usize) -> *mut u8 {
let flags = align_to_flags(align);
je_rallocx(ptr as *mut c_void, size as size_t, flags) as *mut u8
}
#[inline]
pub unsafe fn reallocate_inplace(ptr: *mut u8, _old_size: uint, size: uint,
align: uint) -> uint {
pub unsafe fn reallocate_inplace(ptr: *mut u8, _old_size: usize, size: usize,
align: usize) -> usize {
let flags = align_to_flags(align);
je_xallocx(ptr as *mut c_void, size as size_t, 0, flags) as uint
je_xallocx(ptr as *mut c_void, size as size_t, 0, flags) as usize
}
#[inline]
pub unsafe fn deallocate(ptr: *mut u8, old_size: uint, align: uint) {
pub unsafe fn deallocate(ptr: *mut u8, old_size: usize, align: usize) {
let flags = align_to_flags(align);
je_sdallocx(ptr as *mut c_void, old_size as size_t, flags)
}
#[inline]
pub fn usable_size(size: uint, align: uint) -> uint {
pub fn usable_size(size: usize, align: usize) -> usize {
let flags = align_to_flags(align);
unsafe { je_nallocx(size as size_t, flags) as uint }
unsafe { je_nallocx(size as size_t, flags) as usize }
}
pub fn stats_print() {
@ -277,7 +278,7 @@ mod imp {
}
#[inline]
pub unsafe fn allocate(size: uint, align: uint) -> *mut u8 {
pub unsafe fn allocate(size: usize, align: usize) -> *mut u8 {
if align <= MIN_ALIGN {
libc::malloc(size as libc::size_t) as *mut u8
} else {
@ -294,7 +295,7 @@ mod imp {
}
#[inline]
pub unsafe fn reallocate(ptr: *mut u8, old_size: uint, size: uint, align: uint) -> *mut u8 {
pub unsafe fn reallocate(ptr: *mut u8, old_size: usize, size: usize, align: usize) -> *mut u8 {
if align <= MIN_ALIGN {
libc::realloc(ptr as *mut libc::c_void, size as libc::size_t) as *mut u8
} else {
@ -306,18 +307,18 @@ mod imp {
}
#[inline]
pub unsafe fn reallocate_inplace(_ptr: *mut u8, old_size: uint, _size: uint,
_align: uint) -> uint {
pub unsafe fn reallocate_inplace(_ptr: *mut u8, old_size: usize, _size: usize,
_align: usize) -> usize {
old_size
}
#[inline]
pub unsafe fn deallocate(ptr: *mut u8, _old_size: uint, _align: uint) {
pub unsafe fn deallocate(ptr: *mut u8, _old_size: usize, _align: usize) {
libc::free(ptr as *mut libc::c_void)
}
#[inline]
pub fn usable_size(size: uint, _align: uint) -> uint {
pub fn usable_size(size: usize, _align: usize) -> usize {
size
}
@ -341,7 +342,7 @@ mod imp {
}
#[inline]
pub unsafe fn allocate(size: uint, align: uint) -> *mut u8 {
pub unsafe fn allocate(size: usize, align: usize) -> *mut u8 {
if align <= MIN_ALIGN {
libc::malloc(size as size_t) as *mut u8
} else {
@ -350,7 +351,7 @@ mod imp {
}
#[inline]
pub unsafe fn reallocate(ptr: *mut u8, _old_size: uint, size: uint, align: uint) -> *mut u8 {
pub unsafe fn reallocate(ptr: *mut u8, _old_size: usize, size: usize, align: usize) -> *mut u8 {
if align <= MIN_ALIGN {
libc::realloc(ptr as *mut c_void, size as size_t) as *mut u8
} else {
@ -359,13 +360,13 @@ mod imp {
}
#[inline]
pub unsafe fn reallocate_inplace(_ptr: *mut u8, old_size: uint, _size: uint,
_align: uint) -> uint {
pub unsafe fn reallocate_inplace(_ptr: *mut u8, old_size: usize, _size: usize,
_align: usize) -> usize {
old_size
}
#[inline]
pub unsafe fn deallocate(ptr: *mut u8, _old_size: uint, align: uint) {
pub unsafe fn deallocate(ptr: *mut u8, _old_size: usize, align: usize) {
if align <= MIN_ALIGN {
libc::free(ptr as *mut libc::c_void)
} else {
@ -374,7 +375,7 @@ mod imp {
}
#[inline]
pub fn usable_size(size: uint, _align: uint) -> uint {
pub fn usable_size(size: usize, _align: usize) -> usize {
size
}

View File

@ -70,7 +70,6 @@
#![feature(lang_items, unsafe_destructor)]
#![feature(box_syntax)]
#![feature(optin_builtin_traits)]
#![feature(int_uint)]
#![feature(unboxed_closures)]
#![feature(core)]
#![feature(hash)]

View File

@ -40,7 +40,7 @@
//! }
//!
//! struct Gadget {
//! id: int,
//! id: i32,
//! owner: Rc<Owner>
//! // ...other fields
//! }
@ -99,7 +99,7 @@
//! }
//!
//! struct Gadget {
//! id: int,
//! id: i32,
//! owner: Rc<Owner>
//! // ...other fields
//! }
@ -166,8 +166,8 @@ use heap::deallocate;
struct RcBox<T> {
value: T,
strong: Cell<uint>,
weak: Cell<uint>
strong: Cell<usize>,
weak: Cell<usize>
}
/// An immutable reference-counted pointer type.
@ -233,12 +233,12 @@ impl<T> Rc<T> {
/// Get the number of weak references to this value.
#[inline]
#[unstable(feature = "alloc")]
pub fn weak_count<T>(this: &Rc<T>) -> uint { this.weak() - 1 }
pub fn weak_count<T>(this: &Rc<T>) -> usize { this.weak() - 1 }
/// Get the number of strong references to this value.
#[inline]
#[unstable(feature = "alloc")]
pub fn strong_count<T>(this: &Rc<T>) -> uint { this.strong() }
pub fn strong_count<T>(this: &Rc<T>) -> usize { this.strong() }
/// Returns true if there are no other `Rc` or `Weak<T>` values that share the same inner value.
///
@ -447,7 +447,7 @@ impl<T: Default> Default for Rc<T> {
/// use std::rc::Rc;
/// use std::default::Default;
///
/// let x: Rc<int> = Default::default();
/// let x: Rc<i32> = Default::default();
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
@ -750,7 +750,7 @@ trait RcBoxPtr<T> {
fn inner(&self) -> &RcBox<T>;
#[inline]
fn strong(&self) -> uint { self.inner().strong.get() }
fn strong(&self) -> usize { self.inner().strong.get() }
#[inline]
fn inc_strong(&self) { self.inner().strong.set(self.strong() + 1); }
@ -759,7 +759,7 @@ trait RcBoxPtr<T> {
fn dec_strong(&self) { self.inner().strong.set(self.strong() - 1); }
#[inline]
fn weak(&self) -> uint { self.inner().weak.get() }
fn weak(&self) -> usize { self.inner().weak.get() }
#[inline]
fn inc_weak(&self) { self.inner().weak.set(self.weak() + 1); }

View File

@ -31,7 +31,6 @@
#![feature(alloc)]
#![feature(box_syntax)]
#![feature(core)]
#![feature(int_uint)]
#![feature(staged_api)]
#![feature(unboxed_closures)]
#![feature(unsafe_destructor)]
@ -55,12 +54,12 @@ use std::rt::heap::{allocate, deallocate};
#[derive(Clone, PartialEq)]
struct Chunk {
data: Rc<RefCell<Vec<u8>>>,
fill: Cell<uint>,
fill: Cell<usize>,
is_copy: Cell<bool>,
}
impl Chunk {
fn capacity(&self) -> uint {
fn capacity(&self) -> usize {
self.data.borrow().capacity()
}
@ -105,7 +104,7 @@ impl Arena {
}
/// Allocates a new Arena with `initial_size` bytes preallocated.
pub fn new_with_size(initial_size: uint) -> Arena {
pub fn new_with_size(initial_size: usize) -> Arena {
Arena {
head: RefCell::new(chunk(initial_size, false)),
copy_head: RefCell::new(chunk(initial_size, true)),
@ -114,7 +113,7 @@ impl Arena {
}
}
fn chunk(size: uint, is_copy: bool) -> Chunk {
fn chunk(size: usize, is_copy: bool) -> Chunk {
Chunk {
data: Rc::new(RefCell::new(Vec::with_capacity(size))),
fill: Cell::new(0),
@ -137,7 +136,7 @@ impl Drop for Arena {
}
#[inline]
fn round_up(base: uint, align: uint) -> uint {
fn round_up(base: usize, align: usize) -> usize {
(base.checked_add(align - 1)).unwrap() & !(align - 1)
}
@ -149,7 +148,7 @@ unsafe fn destroy_chunk(chunk: &Chunk) {
let fill = chunk.fill.get();
while idx < fill {
let tydesc_data: *const uint = mem::transmute(buf.offset(idx as int));
let tydesc_data: *const usize = mem::transmute(buf.offset(idx as isize));
let (tydesc, is_done) = un_bitpack_tydesc_ptr(*tydesc_data);
let (size, align) = ((*tydesc).size, (*tydesc).align);
@ -160,7 +159,7 @@ unsafe fn destroy_chunk(chunk: &Chunk) {
//debug!("freeing object: idx = {}, size = {}, align = {}, done = {}",
// start, size, align, is_done);
if is_done {
((*tydesc).drop_glue)(buf.offset(start as int) as *const i8);
((*tydesc).drop_glue)(buf.offset(start as isize) as *const i8);
}
// Find where the next tydesc lives
@ -173,21 +172,21 @@ unsafe fn destroy_chunk(chunk: &Chunk) {
// is necessary in order to properly do cleanup if a panic occurs
// during an initializer.
#[inline]
fn bitpack_tydesc_ptr(p: *const TyDesc, is_done: bool) -> uint {
p as uint | (is_done as uint)
fn bitpack_tydesc_ptr(p: *const TyDesc, is_done: bool) -> usize {
p as usize | (is_done as usize)
}
#[inline]
fn un_bitpack_tydesc_ptr(p: uint) -> (*const TyDesc, bool) {
fn un_bitpack_tydesc_ptr(p: usize) -> (*const TyDesc, bool) {
((p & !1) as *const TyDesc, p & 1 == 1)
}
impl Arena {
fn chunk_size(&self) -> uint {
fn chunk_size(&self) -> usize {
self.copy_head.borrow().capacity()
}
// Functions for the POD part of the arena
fn alloc_copy_grow(&self, n_bytes: uint, align: uint) -> *const u8 {
fn alloc_copy_grow(&self, n_bytes: usize, align: usize) -> *const u8 {
// Allocate a new chunk.
let new_min_chunk_size = cmp::max(n_bytes, self.chunk_size());
self.chunks.borrow_mut().push(self.copy_head.borrow().clone());
@ -199,7 +198,7 @@ impl Arena {
}
#[inline]
fn alloc_copy_inner(&self, n_bytes: uint, align: uint) -> *const u8 {
fn alloc_copy_inner(&self, n_bytes: usize, align: usize) -> *const u8 {
let start = round_up(self.copy_head.borrow().fill.get(), align);
let end = start + n_bytes;
@ -211,7 +210,7 @@ impl Arena {
copy_head.fill.set(end);
unsafe {
copy_head.as_ptr().offset(start as int)
copy_head.as_ptr().offset(start as isize)
}
}
@ -227,8 +226,8 @@ impl Arena {
}
// Functions for the non-POD part of the arena
fn alloc_noncopy_grow(&self, n_bytes: uint,
align: uint) -> (*const u8, *const u8) {
fn alloc_noncopy_grow(&self, n_bytes: usize,
align: usize) -> (*const u8, *const u8) {
// Allocate a new chunk.
let new_min_chunk_size = cmp::max(n_bytes, self.chunk_size());
self.chunks.borrow_mut().push(self.head.borrow().clone());
@ -240,8 +239,8 @@ impl Arena {
}
#[inline]
fn alloc_noncopy_inner(&self, n_bytes: uint,
align: uint) -> (*const u8, *const u8) {
fn alloc_noncopy_inner(&self, n_bytes: usize,
align: usize) -> (*const u8, *const u8) {
// Be careful to not maintain any `head` borrows active, because
// `alloc_noncopy_grow` borrows it mutably.
let (start, end, tydesc_start, head_capacity) = {
@ -265,7 +264,7 @@ impl Arena {
unsafe {
let buf = head.as_ptr();
return (buf.offset(tydesc_start as int), buf.offset(start as int));
return (buf.offset(tydesc_start as isize), buf.offset(start as isize));
}
}
@ -276,7 +275,7 @@ impl Arena {
let (ty_ptr, ptr) =
self.alloc_noncopy_inner(mem::size_of::<T>(),
mem::min_align_of::<T>());
let ty_ptr = ty_ptr as *mut uint;
let ty_ptr = ty_ptr as *mut usize;
let ptr = ptr as *mut T;
// Write in our tydesc along with a bit indicating that it
// has *not* been initialized yet.
@ -320,7 +319,7 @@ fn test_arena_destructors() {
#[test]
fn test_arena_alloc_nested() {
struct Inner { value: uint }
struct Inner { value: usize }
struct Outer<'a> { inner: &'a Inner }
let arena = Arena::new();
@ -343,10 +342,10 @@ fn test_arena_destructors_fail() {
arena.alloc(|| { Rc::new(i) });
// Allocate something with funny size and alignment, to keep
// things interesting.
arena.alloc(|| { [0u8, 1u8, 2u8] });
arena.alloc(|| { [0u8, 1, 2] });
}
// Now, panic while allocating
arena.alloc::<Rc<int>, _>(|| {
arena.alloc::<Rc<i32>, _>(|| {
panic!();
});
}
@ -373,12 +372,12 @@ struct TypedArenaChunk<T> {
next: *mut TypedArenaChunk<T>,
/// The number of elements that this chunk can hold.
capacity: uint,
capacity: usize,
// Objects follow here, suitably aligned.
}
fn calculate_size<T>(capacity: uint) -> uint {
fn calculate_size<T>(capacity: usize) -> usize {
let mut size = mem::size_of::<TypedArenaChunk<T>>();
size = round_up(size, mem::min_align_of::<T>());
let elem_size = mem::size_of::<T>();
@ -389,7 +388,7 @@ fn calculate_size<T>(capacity: uint) -> uint {
impl<T> TypedArenaChunk<T> {
#[inline]
unsafe fn new(next: *mut TypedArenaChunk<T>, capacity: uint)
unsafe fn new(next: *mut TypedArenaChunk<T>, capacity: usize)
-> *mut TypedArenaChunk<T> {
let size = calculate_size::<T>(capacity);
let chunk = allocate(size, mem::min_align_of::<TypedArenaChunk<T>>())
@ -403,13 +402,13 @@ impl<T> TypedArenaChunk<T> {
/// Destroys this arena chunk. If the type descriptor is supplied, the
/// drop glue is called; otherwise, drop glue is not called.
#[inline]
unsafe fn destroy(&mut self, len: uint) {
unsafe fn destroy(&mut self, len: usize) {
// Destroy all the allocated objects.
if intrinsics::needs_drop::<T>() {
let mut start = self.start();
for _ in 0..len {
ptr::read(start as *const T); // run the destructor on the pointer
start = start.offset(mem::size_of::<T>() as int)
start = start.offset(mem::size_of::<T>() as isize)
}
}
@ -429,7 +428,7 @@ impl<T> TypedArenaChunk<T> {
fn start(&self) -> *const u8 {
let this: *const TypedArenaChunk<T> = self;
unsafe {
mem::transmute(round_up(this.offset(1) as uint,
mem::transmute(round_up(this.offset(1) as usize,
mem::min_align_of::<T>()))
}
}
@ -439,7 +438,7 @@ impl<T> TypedArenaChunk<T> {
fn end(&self) -> *const u8 {
unsafe {
let size = mem::size_of::<T>().checked_mul(self.capacity).unwrap();
self.start().offset(size as int)
self.start().offset(size as isize)
}
}
}
@ -454,7 +453,7 @@ impl<T> TypedArena<T> {
/// Creates a new `TypedArena` with preallocated space for the given number of
/// objects.
#[inline]
pub fn with_capacity(capacity: uint) -> TypedArena<T> {
pub fn with_capacity(capacity: usize) -> TypedArena<T> {
unsafe {
let chunk = TypedArenaChunk::<T>::new(ptr::null_mut(), capacity);
TypedArena {
@ -501,8 +500,8 @@ impl<T> Drop for TypedArena<T> {
fn drop(&mut self) {
unsafe {
// Determine how much was filled.
let start = self.first.borrow().as_ref().unwrap().start() as uint;
let end = self.ptr.get() as uint;
let start = self.first.borrow().as_ref().unwrap().start() as usize;
let end = self.ptr.get() as usize;
let diff = (end - start) / mem::size_of::<T>();
// Pass that to the `destroy` method.
@ -519,9 +518,9 @@ mod tests {
#[allow(dead_code)]
struct Point {
x: int,
y: int,
z: int,
x: i32,
y: i32,
z: i32,
}
#[test]
@ -576,7 +575,7 @@ mod tests {
#[allow(dead_code)]
struct Noncopy {
string: String,
array: Vec<int>,
array: Vec<i32>,
}
#[test]

View File

@ -15,7 +15,6 @@
#![cfg_attr(not(feature = "cargo-build"), feature(staged_api))]
#![cfg_attr(not(feature = "cargo-build"), staged_api)]
#![cfg_attr(not(feature = "cargo-build"), feature(core))]
#![feature(int_uint)]
#![feature(no_std)]
#![no_std]
#![doc(html_logo_url = "http://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
@ -1905,7 +1904,7 @@ pub mod types {
#[repr(C)]
#[derive(Copy)] pub struct WSAPROTOCOLCHAIN {
pub ChainLen: c_int,
pub ChainEntries: [DWORD; MAX_PROTOCOL_CHAIN as uint],
pub ChainEntries: [DWORD; MAX_PROTOCOL_CHAIN as usize],
}
pub type LPWSAPROTOCOLCHAIN = *mut WSAPROTOCOLCHAIN;
@ -1931,7 +1930,7 @@ pub mod types {
pub iSecurityScheme: c_int,
pub dwMessageSize: DWORD,
pub dwProviderReserved: DWORD,
pub szProtocol: [u8; (WSAPROTOCOL_LEN as uint) + 1us],
pub szProtocol: [u8; WSAPROTOCOL_LEN as usize + 1],
}
pub type LPWSAPROTOCOL_INFO = *mut WSAPROTOCOL_INFO;