Auto merge of #28610 - nrc:fmt6, r=brson

This commit is contained in:
bors 2015-09-25 19:06:02 +00:00
commit e7a73881e9
7 changed files with 258 additions and 106 deletions

View File

@ -214,7 +214,9 @@ impl<T> Arc<T> {
#[stable(feature = "arc_unique", since = "1.4.0")]
pub fn try_unwrap(this: Self) -> Result<T, Self> {
// See `drop` for why all these atomics are like this
if this.inner().strong.compare_and_swap(1, 0, Release) != 1 { return Err(this) }
if this.inner().strong.compare_and_swap(1, 0, Release) != 1 {
return Err(this)
}
atomic::fence(Acquire);
@ -251,7 +253,9 @@ impl<T: ?Sized> Arc<T> {
let cur = this.inner().weak.load(Relaxed);
// check if the weak counter is currently "locked"; if so, spin.
if cur == usize::MAX { continue }
if cur == usize::MAX {
continue
}
// NOTE: this code currently ignores the possibility of overflow
// into usize::MAX; in general both Rc and Arc need to be adjusted
@ -303,7 +307,9 @@ impl<T: ?Sized> Arc<T> {
if self.inner().weak.fetch_sub(1, Release) == 1 {
atomic::fence(Acquire);
deallocate(ptr as *mut u8, size_of_val(&*ptr), align_of_val(&*ptr))
deallocate(ptr as *mut u8,
size_of_val(&*ptr),
align_of_val(&*ptr))
}
}
}
@ -348,7 +354,9 @@ impl<T: ?Sized> Clone for Arc<T> {
// We abort because such a program is incredibly degenerate, and we
// don't care to support it.
if old_size > MAX_REFCOUNT {
unsafe { abort(); }
unsafe {
abort();
}
}
Arc { _ptr: self._ptr }
@ -556,7 +564,9 @@ impl<T: ?Sized> Drop for Arc<T> {
// Because `fetch_sub` is already atomic, we do not need to synchronize
// with other threads unless we are going to delete the object. This
// same logic applies to the below `fetch_sub` to the `weak` count.
if self.inner().strong.fetch_sub(1, Release) != 1 { return }
if self.inner().strong.fetch_sub(1, Release) != 1 {
return
}
// This fence is needed to prevent reordering of use of the data and
// deletion of the data. Because it is marked `Release`, the decreasing
@ -578,7 +588,7 @@ impl<T: ?Sized> Drop for Arc<T> {
atomic::fence(Acquire);
unsafe {
self.drop_slow()
self.drop_slow();
}
}
}
@ -613,11 +623,15 @@ impl<T: ?Sized> Weak<T> {
// "stale" read of 0 is fine), and any other value is
// confirmed via the CAS below.
let n = inner.strong.load(Relaxed);
if n == 0 { return None }
if n == 0 {
return None
}
// Relaxed is valid for the same reason it is on Arc's Clone impl
let old = inner.strong.compare_and_swap(n, n + 1, Relaxed);
if old == n { return Some(Arc { _ptr: self._ptr }) }
if old == n {
return Some(Arc { _ptr: self._ptr })
}
}
}
@ -653,7 +667,9 @@ impl<T: ?Sized> Clone for Weak<T> {
// See comments in Arc::clone() for why we do this (for mem::forget).
if old_size > MAX_REFCOUNT {
unsafe { abort(); }
unsafe {
abort();
}
}
return Weak { _ptr: self._ptr }
@ -705,9 +721,11 @@ impl<T: ?Sized> Drop for Weak<T> {
// ref, which can only happen after the lock is released.
if self.inner().weak.fetch_sub(1, Release) == 1 {
atomic::fence(Acquire);
unsafe { deallocate(ptr as *mut u8,
size_of_val(&*ptr),
align_of_val(&*ptr)) }
unsafe {
deallocate(ptr as *mut u8,
size_of_val(&*ptr),
align_of_val(&*ptr))
}
}
}
}
@ -727,7 +745,9 @@ impl<T: ?Sized + PartialEq> PartialEq for Arc<T> {
///
/// five == Arc::new(5);
/// ```
fn eq(&self, other: &Arc<T>) -> bool { *(*self) == *(*other) }
fn eq(&self, other: &Arc<T>) -> bool {
*(*self) == *(*other)
}
/// Inequality for two `Arc<T>`s.
///
@ -742,7 +762,9 @@ impl<T: ?Sized + PartialEq> PartialEq for Arc<T> {
///
/// five != Arc::new(5);
/// ```
fn ne(&self, other: &Arc<T>) -> bool { *(*self) != *(*other) }
fn ne(&self, other: &Arc<T>) -> bool {
*(*self) != *(*other)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: ?Sized + PartialOrd> PartialOrd for Arc<T> {
@ -776,7 +798,9 @@ impl<T: ?Sized + PartialOrd> PartialOrd for Arc<T> {
///
/// five < Arc::new(5);
/// ```
fn lt(&self, other: &Arc<T>) -> bool { *(*self) < *(*other) }
fn lt(&self, other: &Arc<T>) -> bool {
*(*self) < *(*other)
}
/// 'Less-than or equal to' comparison for two `Arc<T>`s.
///
@ -791,7 +815,9 @@ impl<T: ?Sized + PartialOrd> PartialOrd for Arc<T> {
///
/// five <= Arc::new(5);
/// ```
fn le(&self, other: &Arc<T>) -> bool { *(*self) <= *(*other) }
fn le(&self, other: &Arc<T>) -> bool {
*(*self) <= *(*other)
}
/// Greater-than comparison for two `Arc<T>`s.
///
@ -806,7 +832,9 @@ impl<T: ?Sized + PartialOrd> PartialOrd for Arc<T> {
///
/// five > Arc::new(5);
/// ```
fn gt(&self, other: &Arc<T>) -> bool { *(*self) > *(*other) }
fn gt(&self, other: &Arc<T>) -> bool {
*(*self) > *(*other)
}
/// 'Greater-than or equal to' comparison for two `Arc<T>`s.
///
@ -821,11 +849,15 @@ impl<T: ?Sized + PartialOrd> PartialOrd for Arc<T> {
///
/// five >= Arc::new(5);
/// ```
fn ge(&self, other: &Arc<T>) -> bool { *(*self) >= *(*other) }
fn ge(&self, other: &Arc<T>) -> bool {
*(*self) >= *(*other)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: ?Sized + Ord> Ord for Arc<T> {
fn cmp(&self, other: &Arc<T>) -> Ordering { (**self).cmp(&**other) }
fn cmp(&self, other: &Arc<T>) -> Ordering {
(**self).cmp(&**other)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: ?Sized + Eq> Eq for Arc<T> {}
@ -854,7 +886,9 @@ impl<T> fmt::Pointer for Arc<T> {
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: Default> Default for Arc<T> {
#[stable(feature = "rust1", since = "1.0.0")]
fn default() -> Arc<T> { Arc::new(Default::default()) }
fn default() -> Arc<T> {
Arc::new(Default::default())
}
}
#[stable(feature = "rust1", since = "1.0.0")]
@ -1015,7 +1049,7 @@ mod tests {
#[test]
fn weak_self_cyclic() {
struct Cycle {
x: Mutex<Option<Weak<Cycle>>>
x: Mutex<Option<Weak<Cycle>>>,
}
let a = Arc::new(Cycle { x: Mutex::new(None) });
@ -1095,7 +1129,9 @@ mod tests {
// Make sure deriving works with Arc<T>
#[derive(Eq, Ord, PartialEq, PartialOrd, Clone, Debug, Default)]
struct Foo { inner: Arc<i32> }
struct Foo {
inner: Arc<i32>,
}
#[test]
fn test_unsized() {
@ -1108,5 +1144,7 @@ mod tests {
}
impl<T: ?Sized> borrow::Borrow<T> for Arc<T> {
fn borrow(&self) -> &T { &**self }
fn borrow(&self) -> &T {
&**self
}
}

View File

@ -66,7 +66,7 @@ use core::mem;
use core::ops::{CoerceUnsized, Deref, DerefMut};
use core::ops::{Placer, Boxed, Place, InPlace, BoxPlace};
use core::ptr::{self, Unique};
use core::raw::{TraitObject};
use core::raw::TraitObject;
/// A value that represents the heap. This is the default place that the `box`
/// keyword allocates into when no place is supplied.
@ -95,7 +95,9 @@ pub const HEAP: ExchangeHeapSingleton =
reason = "may be renamed; uncertain about custom allocator design",
issue = "27779")]
#[derive(Copy, Clone)]
pub struct ExchangeHeapSingleton { _force_singleton: () }
pub struct ExchangeHeapSingleton {
_force_singleton: (),
}
/// A pointer type for heap allocation.
///
@ -126,7 +128,7 @@ pub struct Box<T: ?Sized>(Unique<T>);
#[unstable(feature = "placement_in",
reason = "placement box design is still being worked out.",
issue = "27779")]
pub struct IntermediateBox<T: ?Sized>{
pub struct IntermediateBox<T: ?Sized> {
ptr: *mut u8,
size: usize,
align: usize,
@ -152,9 +154,7 @@ fn make_place<T>() -> IntermediateBox<T> {
let p = if size == 0 {
heap::EMPTY as *mut u8
} else {
let p = unsafe {
heap::allocate(size, align)
};
let p = unsafe { heap::allocate(size, align) };
if p.is_null() {
panic!("Box make_place allocation failure.");
}
@ -165,18 +165,24 @@ fn make_place<T>() -> IntermediateBox<T> {
}
impl<T> BoxPlace<T> for IntermediateBox<T> {
fn make_place() -> IntermediateBox<T> { make_place() }
fn make_place() -> IntermediateBox<T> {
make_place()
}
}
impl<T> InPlace<T> for IntermediateBox<T> {
type Owner = Box<T>;
unsafe fn finalize(self) -> Box<T> { finalize(self) }
unsafe fn finalize(self) -> Box<T> {
finalize(self)
}
}
impl<T> Boxed for Box<T> {
type Data = T;
type Place = IntermediateBox<T>;
unsafe fn finalize(b: IntermediateBox<T>) -> Box<T> { finalize(b) }
unsafe fn finalize(b: IntermediateBox<T>) -> Box<T> {
finalize(b)
}
}
impl<T> Placer<T> for ExchangeHeapSingleton {
@ -190,9 +196,7 @@ impl<T> Placer<T> for ExchangeHeapSingleton {
impl<T: ?Sized> Drop for IntermediateBox<T> {
fn drop(&mut self) {
if self.size > 0 {
unsafe {
heap::deallocate(self.ptr, self.size, self.align)
}
unsafe { heap::deallocate(self.ptr, self.size, self.align) }
}
}
}
@ -256,13 +260,17 @@ impl<T : ?Sized> Box<T> {
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: Default> Default for Box<T> {
#[stable(feature = "rust1", since = "1.0.0")]
fn default() -> Box<T> { box Default::default() }
fn default() -> Box<T> {
box Default::default()
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T> Default for Box<[T]> {
#[stable(feature = "rust1", since = "1.0.0")]
fn default() -> Box<[T]> { Box::<[T; 0]>::new([]) }
fn default() -> Box<[T]> {
Box::<[T; 0]>::new([])
}
}
#[stable(feature = "rust1", since = "1.0.0")]
@ -275,8 +283,11 @@ impl<T: Clone> Clone for Box<T> {
/// let x = Box::new(5);
/// let y = x.clone();
/// ```
#[rustfmt_skip]
#[inline]
fn clone(&self) -> Box<T> { box {(**self).clone()} }
fn clone(&self) -> Box<T> {
box { (**self).clone() }
}
/// Copies `source`'s contents into `self` without creating a new allocation.
///
/// # Examples
@ -311,9 +322,13 @@ impl Clone for Box<str> {
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: ?Sized + PartialEq> PartialEq for Box<T> {
#[inline]
fn eq(&self, other: &Box<T>) -> bool { PartialEq::eq(&**self, &**other) }
fn eq(&self, other: &Box<T>) -> bool {
PartialEq::eq(&**self, &**other)
}
#[inline]
fn ne(&self, other: &Box<T>) -> bool { PartialEq::ne(&**self, &**other) }
fn ne(&self, other: &Box<T>) -> bool {
PartialEq::ne(&**self, &**other)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: ?Sized + PartialOrd> PartialOrd for Box<T> {
@ -322,13 +337,21 @@ impl<T: ?Sized + PartialOrd> PartialOrd for Box<T> {
PartialOrd::partial_cmp(&**self, &**other)
}
#[inline]
fn lt(&self, other: &Box<T>) -> bool { PartialOrd::lt(&**self, &**other) }
fn lt(&self, other: &Box<T>) -> bool {
PartialOrd::lt(&**self, &**other)
}
#[inline]
fn le(&self, other: &Box<T>) -> bool { PartialOrd::le(&**self, &**other) }
fn le(&self, other: &Box<T>) -> bool {
PartialOrd::le(&**self, &**other)
}
#[inline]
fn ge(&self, other: &Box<T>) -> bool { PartialOrd::ge(&**self, &**other) }
fn ge(&self, other: &Box<T>) -> bool {
PartialOrd::ge(&**self, &**other)
}
#[inline]
fn gt(&self, other: &Box<T>) -> bool { PartialOrd::gt(&**self, &**other) }
fn gt(&self, other: &Box<T>) -> bool {
PartialOrd::gt(&**self, &**other)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: ?Sized + Ord> Ord for Box<T> {
@ -356,8 +379,7 @@ impl Box<Any> {
unsafe {
// Get the raw representation of the trait object
let raw = Box::into_raw(self);
let to: TraitObject =
mem::transmute::<*mut Any, TraitObject>(raw);
let to: TraitObject = mem::transmute::<*mut Any, TraitObject>(raw);
// Extract the data pointer
Ok(Box::from_raw(to.data as *mut T))
@ -408,23 +430,33 @@ impl<T> fmt::Pointer for Box<T> {
impl<T: ?Sized> Deref for Box<T> {
type Target = T;
fn deref(&self) -> &T { &**self }
fn deref(&self) -> &T {
&**self
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: ?Sized> DerefMut for Box<T> {
fn deref_mut(&mut self) -> &mut T { &mut **self }
fn deref_mut(&mut self) -> &mut T {
&mut **self
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<I: Iterator + ?Sized> Iterator for Box<I> {
type Item = I::Item;
fn next(&mut self) -> Option<I::Item> { (**self).next() }
fn size_hint(&self) -> (usize, Option<usize>) { (**self).size_hint() }
fn next(&mut self) -> Option<I::Item> {
(**self).next()
}
fn size_hint(&self) -> (usize, Option<usize>) {
(**self).size_hint()
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<I: DoubleEndedIterator + ?Sized> DoubleEndedIterator for Box<I> {
fn next_back(&mut self) -> Option<I::Item> { (**self).next_back() }
fn next_back(&mut self) -> Option<I::Item> {
(**self).next_back()
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<I: ExactSizeIterator + ?Sized> ExactSizeIterator for Box<I> {}
@ -506,10 +538,7 @@ impl<T: ?Sized+Unsize<U>, U: ?Sized> CoerceUnsized<Box<U>> for Box<T> {}
#[stable(feature = "box_slice_clone", since = "1.3.0")]
impl<T: Clone> Clone for Box<[T]> {
fn clone(&self) -> Self {
let mut new = BoxBuilder {
data: RawVec::with_capacity(self.len()),
len: 0
};
let mut new = BoxBuilder { data: RawVec::with_capacity(self.len()), len: 0 };
let mut target = new.data.ptr();
@ -555,9 +584,13 @@ impl<T: Clone> Clone for Box<[T]> {
}
impl<T: ?Sized> borrow::Borrow<T> for Box<T> {
fn borrow(&self) -> &T { &**self }
fn borrow(&self) -> &T {
&**self
}
}
impl<T: ?Sized> borrow::BorrowMut<T> for Box<T> {
fn borrow_mut(&mut self) -> &mut T { &mut **self }
fn borrow_mut(&mut self) -> &mut T {
&mut **self
}
}

View File

@ -34,12 +34,16 @@ fn any_move() {
let b = Box::new(Test) as Box<Any>;
match a.downcast::<i32>() {
Ok(a) => { assert!(a == Box::new(8)); }
Err(..) => panic!()
Ok(a) => {
assert!(a == Box::new(8));
}
Err(..) => panic!(),
}
match b.downcast::<Test>() {
Ok(a) => { assert!(a == Box::new(Test)); }
Err(..) => panic!()
Ok(a) => {
assert!(a == Box::new(Test));
}
Err(..) => panic!(),
}
let a = Box::new(8) as Box<Any>;
@ -70,7 +74,8 @@ fn test_show() {
#[test]
fn deref() {
fn homura<T: Deref<Target=i32>>(_: T) { }
fn homura<T: Deref<Target = i32>>(_: T) {
}
homura(Box::new(765));
}

View File

@ -22,18 +22,24 @@ extern {
#[allocator]
fn __rust_allocate(size: usize, align: usize) -> *mut u8;
fn __rust_deallocate(ptr: *mut u8, old_size: usize, align: usize);
fn __rust_reallocate(ptr: *mut u8, old_size: usize, size: usize,
align: usize) -> *mut u8;
fn __rust_reallocate_inplace(ptr: *mut u8, old_size: usize, size: usize,
align: usize) -> usize;
fn __rust_reallocate(ptr: *mut u8, old_size: usize, size: usize, align: usize) -> *mut u8;
fn __rust_reallocate_inplace(ptr: *mut u8,
old_size: usize,
size: usize,
align: usize)
-> usize;
fn __rust_usable_size(size: usize, align: usize) -> usize;
}
#[inline(always)]
fn check_size_and_alignment(size: usize, align: usize) {
debug_assert!(size != 0);
debug_assert!(size <= isize::MAX as usize, "Tried to allocate too much: {} bytes", size);
debug_assert!(usize::is_power_of_two(align), "Invalid alignment of allocation: {}", align);
debug_assert!(size <= isize::MAX as usize,
"Tried to allocate too much: {} bytes",
size);
debug_assert!(usize::is_power_of_two(align),
"Invalid alignment of allocation: {}",
align);
}
// FIXME: #13996: mark the `allocate` and `reallocate` return value as `noalias`
@ -84,8 +90,11 @@ pub unsafe fn reallocate(ptr: *mut u8, old_size: usize, size: usize, align: usiz
/// create the allocation referenced by `ptr`. The `old_size` parameter may be
/// any value in range_inclusive(requested_size, usable_size).
#[inline]
pub unsafe fn reallocate_inplace(ptr: *mut u8, old_size: usize, size: usize,
align: usize) -> usize {
pub unsafe fn reallocate_inplace(ptr: *mut u8,
old_size: usize,
size: usize,
align: usize)
-> usize {
check_size_and_alignment(size, align);
__rust_reallocate_inplace(ptr, old_size, size, align)
}
@ -124,7 +133,9 @@ unsafe fn exchange_malloc(size: usize, align: usize) -> *mut u8 {
EMPTY as *mut u8
} else {
let ptr = allocate(size, align);
if ptr.is_null() { ::oom() }
if ptr.is_null() {
::oom()
}
ptr
}
}
@ -148,7 +159,9 @@ mod tests {
unsafe {
let size = 4000;
let ptr = heap::allocate(size, 8);
if ptr.is_null() { ::oom() }
if ptr.is_null() {
::oom()
}
let ret = heap::reallocate_inplace(ptr, size, size, 8);
heap::deallocate(ptr, size, 8);
assert_eq!(ret, heap::usable_size(size, 8));

View File

@ -107,8 +107,12 @@ extern crate alloc_system;
// Allow testing this library
#[cfg(test)] #[macro_use] extern crate std;
#[cfg(test)] #[macro_use] extern crate log;
#[cfg(test)]
#[macro_use]
extern crate std;
#[cfg(test)]
#[macro_use]
extern crate log;
// Heaps provided for low-level allocation strategies
@ -123,7 +127,9 @@ pub mod heap;
#[cfg(not(test))]
pub mod boxed;
#[cfg(test)]
mod boxed { pub use std::boxed::{Box, HEAP}; }
mod boxed {
pub use std::boxed::{Box, HEAP};
}
#[cfg(test)]
mod boxed_test;
pub mod arc;

View File

@ -58,7 +58,11 @@ impl<T> RawVec<T> {
pub fn new() -> Self {
unsafe {
// !0 is usize::MAX. This branch should be stripped at compile time.
let cap = if mem::size_of::<T>() == 0 { !0 } else { 0 };
let cap = if mem::size_of::<T>() == 0 {
!0
} else {
0
};
// heap::EMPTY doubles as "unallocated" and "zero-sized allocation"
RawVec { ptr: Unique::new(heap::EMPTY as *mut T), cap: cap }
@ -92,7 +96,9 @@ impl<T> RawVec<T> {
} else {
let align = mem::align_of::<T>();
let ptr = heap::allocate(alloc_size, align);
if ptr.is_null() { oom() }
if ptr.is_null() {
oom()
}
ptr
};
@ -133,7 +139,11 @@ impl<T> RawVec<T> {
///
/// This will always be `usize::MAX` if `T` is zero-sized.
pub fn cap(&self) -> usize {
if mem::size_of::<T>() == 0 { !0 } else { self.cap }
if mem::size_of::<T>() == 0 {
!0
} else {
self.cap
}
}
/// Doubles the size of the type's backing allocation. This is common enough
@ -190,7 +200,11 @@ impl<T> RawVec<T> {
let (new_cap, ptr) = if self.cap == 0 {
// skip to 4 because tiny Vec's are dumb; but not if that would cause overflow
let new_cap = if elem_size > (!0) / 8 { 1 } else { 4 };
let new_cap = if elem_size > (!0) / 8 {
1
} else {
4
};
let ptr = heap::allocate(new_cap * elem_size, align);
(new_cap, ptr)
} else {
@ -207,7 +221,9 @@ impl<T> RawVec<T> {
};
// If allocate or reallocate fail, we'll get `null` back
if ptr.is_null() { oom() }
if ptr.is_null() {
oom()
}
self.ptr = Unique::new(ptr as *mut _);
self.cap = new_cap;
@ -246,7 +262,9 @@ impl<T> RawVec<T> {
// Don't actually need any more capacity.
// Wrapping in case they gave a bad `used_cap`.
if self.cap().wrapping_sub(used_cap) >= needed_extra_cap { return; }
if self.cap().wrapping_sub(used_cap) >= needed_extra_cap {
return;
}
// Nothing we can really do about these checks :(
let new_cap = used_cap.checked_add(needed_extra_cap).expect("capacity overflow");
@ -263,7 +281,9 @@ impl<T> RawVec<T> {
};
// If allocate or reallocate fail, we'll get `null` back
if ptr.is_null() { oom() }
if ptr.is_null() {
oom()
}
self.ptr = Unique::new(ptr as *mut _);
self.cap = new_cap;
@ -326,7 +346,9 @@ impl<T> RawVec<T> {
// Don't actually need any more capacity.
// Wrapping in case they give a bas `used_cap`
if self.cap().wrapping_sub(used_cap) >= needed_extra_cap { return; }
if self.cap().wrapping_sub(used_cap) >= needed_extra_cap {
return;
}
// Nothing we can really do about these checks :(
let new_cap = used_cap.checked_add(needed_extra_cap)
@ -346,7 +368,9 @@ impl<T> RawVec<T> {
};
// If allocate or reallocate fail, we'll get `null` back
if ptr.is_null() { oom() }
if ptr.is_null() {
oom()
}
self.ptr = Unique::new(ptr as *mut _);
self.cap = new_cap;
@ -374,7 +398,8 @@ impl<T> RawVec<T> {
}
// This check is my waterloo; it's the only thing Vec wouldn't have to do.
assert!(self.cap >= amount, "Tried to shrink to a larger capacity");
assert!(self.cap >= amount,
"Tried to shrink to a larger capacity");
if amount == 0 {
mem::replace(self, RawVec::new());
@ -386,7 +411,9 @@ impl<T> RawVec<T> {
self.cap * elem_size,
amount * elem_size,
align);
if ptr.is_null() { oom() }
if ptr.is_null() {
oom()
}
self.ptr = Unique::new(ptr as *mut _);
}
self.cap = amount;
@ -446,6 +473,7 @@ impl<T> Drop for RawVec<T> {
#[inline]
fn alloc_guard(alloc_size: usize) {
if core::usize::BITS < 64 {
assert!(alloc_size <= ::core::isize::MAX as usize, "capacity overflow");
assert!(alloc_size <= ::core::isize::MAX as usize,
"capacity overflow");
}
}

View File

@ -213,7 +213,7 @@ impl<T> Rc<T> {
_ptr: NonZero::new(Box::into_raw(box RcBox {
strong: Cell::new(1),
weak: Cell::new(1),
value: value
value: value,
})),
}
}
@ -290,13 +290,17 @@ impl<T: ?Sized> Rc<T> {
#[inline]
#[unstable(feature = "rc_counts", reason = "not clearly useful",
issue = "28356")]
pub fn weak_count(this: &Self) -> usize { this.weak() - 1 }
pub fn weak_count(this: &Self) -> usize {
this.weak() - 1
}
/// Get the number of strong references to this value.
#[inline]
#[unstable(feature = "rc_counts", reason = "not clearly useful",
issue = "28356")]
pub fn strong_count(this: &Self) -> usize { this.strong() }
pub fn strong_count(this: &Self) -> usize {
this.strong()
}
/// Returns true if there are no other `Rc` or `Weak<T>` values that share
/// the same inner value.
@ -451,7 +455,7 @@ impl<T: ?Sized> Drop for Rc<T> {
unsafe {
let ptr = *self._ptr;
if !(*(&ptr as *const _ as *const *const ())).is_null() &&
ptr as *const () as usize != mem::POST_DROP_USIZE {
ptr as *const () as usize != mem::POST_DROP_USIZE {
self.dec_strong();
if self.strong() == 0 {
// destroy the contained object
@ -530,7 +534,9 @@ impl<T: ?Sized + PartialEq> PartialEq for Rc<T> {
/// five == Rc::new(5);
/// ```
#[inline(always)]
fn eq(&self, other: &Rc<T>) -> bool { **self == **other }
fn eq(&self, other: &Rc<T>) -> bool {
**self == **other
}
/// Inequality for two `Rc<T>`s.
///
@ -546,7 +552,9 @@ impl<T: ?Sized + PartialEq> PartialEq for Rc<T> {
/// five != Rc::new(5);
/// ```
#[inline(always)]
fn ne(&self, other: &Rc<T>) -> bool { **self != **other }
fn ne(&self, other: &Rc<T>) -> bool {
**self != **other
}
}
#[stable(feature = "rust1", since = "1.0.0")]
@ -586,7 +594,9 @@ impl<T: ?Sized + PartialOrd> PartialOrd for Rc<T> {
/// five < Rc::new(5);
/// ```
#[inline(always)]
fn lt(&self, other: &Rc<T>) -> bool { **self < **other }
fn lt(&self, other: &Rc<T>) -> bool {
**self < **other
}
/// 'Less-than or equal to' comparison for two `Rc<T>`s.
///
@ -602,7 +612,9 @@ impl<T: ?Sized + PartialOrd> PartialOrd for Rc<T> {
/// five <= Rc::new(5);
/// ```
#[inline(always)]
fn le(&self, other: &Rc<T>) -> bool { **self <= **other }
fn le(&self, other: &Rc<T>) -> bool {
**self <= **other
}
/// Greater-than comparison for two `Rc<T>`s.
///
@ -618,7 +630,9 @@ impl<T: ?Sized + PartialOrd> PartialOrd for Rc<T> {
/// five > Rc::new(5);
/// ```
#[inline(always)]
fn gt(&self, other: &Rc<T>) -> bool { **self > **other }
fn gt(&self, other: &Rc<T>) -> bool {
**self > **other
}
/// 'Greater-than or equal to' comparison for two `Rc<T>`s.
///
@ -634,7 +648,9 @@ impl<T: ?Sized + PartialOrd> PartialOrd for Rc<T> {
/// five >= Rc::new(5);
/// ```
#[inline(always)]
fn ge(&self, other: &Rc<T>) -> bool { **self >= **other }
fn ge(&self, other: &Rc<T>) -> bool {
**self >= **other
}
}
#[stable(feature = "rust1", since = "1.0.0")]
@ -653,7 +669,9 @@ impl<T: ?Sized + Ord> Ord for Rc<T> {
/// five.partial_cmp(&Rc::new(5));
/// ```
#[inline]
fn cmp(&self, other: &Rc<T>) -> Ordering { (**self).cmp(&**other) }
fn cmp(&self, other: &Rc<T>) -> Ordering {
(**self).cmp(&**other)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
@ -764,12 +782,13 @@ impl<T: ?Sized> Drop for Weak<T> {
unsafe {
let ptr = *self._ptr;
if !(*(&ptr as *const _ as *const *const ())).is_null() &&
ptr as *const () as usize != mem::POST_DROP_USIZE {
ptr as *const () as usize != mem::POST_DROP_USIZE {
self.dec_weak();
// the weak count starts at 1, and will only go to zero if all
// the strong pointers have disappeared.
if self.weak() == 0 {
deallocate(ptr as *mut u8, size_of_val(&*ptr),
deallocate(ptr as *mut u8,
size_of_val(&*ptr),
align_of_val(&*ptr))
}
}
@ -821,7 +840,9 @@ trait RcBoxPtr<T: ?Sized> {
fn inner(&self) -> &RcBox<T>;
#[inline]
fn strong(&self) -> usize { self.inner().strong.get() }
fn strong(&self) -> usize {
self.inner().strong.get()
}
#[inline]
fn inc_strong(&self) {
@ -829,10 +850,14 @@ trait RcBoxPtr<T: ?Sized> {
}
#[inline]
fn dec_strong(&self) { self.inner().strong.set(self.strong() - 1); }
fn dec_strong(&self) {
self.inner().strong.set(self.strong() - 1);
}
#[inline]
fn weak(&self) -> usize { self.inner().weak.get() }
fn weak(&self) -> usize {
self.inner().weak.get()
}
#[inline]
fn inc_weak(&self) {
@ -840,7 +865,9 @@ trait RcBoxPtr<T: ?Sized> {
}
#[inline]
fn dec_weak(&self) { self.inner().weak.set(self.weak() - 1); }
fn dec_weak(&self) {
self.inner().weak.set(self.weak() - 1);
}
}
impl<T: ?Sized> RcBoxPtr<T> for Rc<T> {
@ -928,7 +955,7 @@ mod tests {
#[test]
fn weak_self_cyclic() {
struct Cycle {
x: RefCell<Option<Weak<Cycle>>>
x: RefCell<Option<Weak<Cycle>>>,
}
let a = Rc::new(Cycle { x: RefCell::new(None) });
@ -1086,5 +1113,7 @@ mod tests {
}
impl<T: ?Sized> borrow::Borrow<T> for Rc<T> {
fn borrow(&self) -> &T { &**self }
fn borrow(&self) -> &T {
&**self
}
}