diff --git a/src/liballoc/arc.rs b/src/liballoc/arc.rs index 78821403de0..ceca44fc1ac 100644 --- a/src/liballoc/arc.rs +++ b/src/liballoc/arc.rs @@ -214,7 +214,9 @@ impl Arc { #[stable(feature = "arc_unique", since = "1.4.0")] pub fn try_unwrap(this: Self) -> Result { // See `drop` for why all these atomics are like this - if this.inner().strong.compare_and_swap(1, 0, Release) != 1 { return Err(this) } + if this.inner().strong.compare_and_swap(1, 0, Release) != 1 { + return Err(this) + } atomic::fence(Acquire); @@ -251,7 +253,9 @@ impl Arc { let cur = this.inner().weak.load(Relaxed); // check if the weak counter is currently "locked"; if so, spin. - if cur == usize::MAX { continue } + if cur == usize::MAX { + continue + } // NOTE: this code currently ignores the possibility of overflow // into usize::MAX; in general both Rc and Arc need to be adjusted @@ -303,7 +307,9 @@ impl Arc { if self.inner().weak.fetch_sub(1, Release) == 1 { atomic::fence(Acquire); - deallocate(ptr as *mut u8, size_of_val(&*ptr), align_of_val(&*ptr)) + deallocate(ptr as *mut u8, + size_of_val(&*ptr), + align_of_val(&*ptr)) } } } @@ -348,7 +354,9 @@ impl Clone for Arc { // We abort because such a program is incredibly degenerate, and we // don't care to support it. if old_size > MAX_REFCOUNT { - unsafe { abort(); } + unsafe { + abort(); + } } Arc { _ptr: self._ptr } @@ -556,7 +564,9 @@ impl Drop for Arc { // Because `fetch_sub` is already atomic, we do not need to synchronize // with other threads unless we are going to delete the object. This // same logic applies to the below `fetch_sub` to the `weak` count. - if self.inner().strong.fetch_sub(1, Release) != 1 { return } + if self.inner().strong.fetch_sub(1, Release) != 1 { + return + } // This fence is needed to prevent reordering of use of the data and // deletion of the data. Because it is marked `Release`, the decreasing @@ -578,7 +588,7 @@ impl Drop for Arc { atomic::fence(Acquire); unsafe { - self.drop_slow() + self.drop_slow(); } } } @@ -613,11 +623,15 @@ impl Weak { // "stale" read of 0 is fine), and any other value is // confirmed via the CAS below. let n = inner.strong.load(Relaxed); - if n == 0 { return None } + if n == 0 { + return None + } // Relaxed is valid for the same reason it is on Arc's Clone impl let old = inner.strong.compare_and_swap(n, n + 1, Relaxed); - if old == n { return Some(Arc { _ptr: self._ptr }) } + if old == n { + return Some(Arc { _ptr: self._ptr }) + } } } @@ -653,7 +667,9 @@ impl Clone for Weak { // See comments in Arc::clone() for why we do this (for mem::forget). if old_size > MAX_REFCOUNT { - unsafe { abort(); } + unsafe { + abort(); + } } return Weak { _ptr: self._ptr } @@ -705,9 +721,11 @@ impl Drop for Weak { // ref, which can only happen after the lock is released. if self.inner().weak.fetch_sub(1, Release) == 1 { atomic::fence(Acquire); - unsafe { deallocate(ptr as *mut u8, - size_of_val(&*ptr), - align_of_val(&*ptr)) } + unsafe { + deallocate(ptr as *mut u8, + size_of_val(&*ptr), + align_of_val(&*ptr)) + } } } } @@ -727,7 +745,9 @@ impl PartialEq for Arc { /// /// five == Arc::new(5); /// ``` - fn eq(&self, other: &Arc) -> bool { *(*self) == *(*other) } + fn eq(&self, other: &Arc) -> bool { + *(*self) == *(*other) + } /// Inequality for two `Arc`s. /// @@ -742,7 +762,9 @@ impl PartialEq for Arc { /// /// five != Arc::new(5); /// ``` - fn ne(&self, other: &Arc) -> bool { *(*self) != *(*other) } + fn ne(&self, other: &Arc) -> bool { + *(*self) != *(*other) + } } #[stable(feature = "rust1", since = "1.0.0")] impl PartialOrd for Arc { @@ -776,7 +798,9 @@ impl PartialOrd for Arc { /// /// five < Arc::new(5); /// ``` - fn lt(&self, other: &Arc) -> bool { *(*self) < *(*other) } + fn lt(&self, other: &Arc) -> bool { + *(*self) < *(*other) + } /// 'Less-than or equal to' comparison for two `Arc`s. /// @@ -791,7 +815,9 @@ impl PartialOrd for Arc { /// /// five <= Arc::new(5); /// ``` - fn le(&self, other: &Arc) -> bool { *(*self) <= *(*other) } + fn le(&self, other: &Arc) -> bool { + *(*self) <= *(*other) + } /// Greater-than comparison for two `Arc`s. /// @@ -806,7 +832,9 @@ impl PartialOrd for Arc { /// /// five > Arc::new(5); /// ``` - fn gt(&self, other: &Arc) -> bool { *(*self) > *(*other) } + fn gt(&self, other: &Arc) -> bool { + *(*self) > *(*other) + } /// 'Greater-than or equal to' comparison for two `Arc`s. /// @@ -821,11 +849,15 @@ impl PartialOrd for Arc { /// /// five >= Arc::new(5); /// ``` - fn ge(&self, other: &Arc) -> bool { *(*self) >= *(*other) } + fn ge(&self, other: &Arc) -> bool { + *(*self) >= *(*other) + } } #[stable(feature = "rust1", since = "1.0.0")] impl Ord for Arc { - fn cmp(&self, other: &Arc) -> Ordering { (**self).cmp(&**other) } + fn cmp(&self, other: &Arc) -> Ordering { + (**self).cmp(&**other) + } } #[stable(feature = "rust1", since = "1.0.0")] impl Eq for Arc {} @@ -854,7 +886,9 @@ impl fmt::Pointer for Arc { #[stable(feature = "rust1", since = "1.0.0")] impl Default for Arc { #[stable(feature = "rust1", since = "1.0.0")] - fn default() -> Arc { Arc::new(Default::default()) } + fn default() -> Arc { + Arc::new(Default::default()) + } } #[stable(feature = "rust1", since = "1.0.0")] @@ -1015,7 +1049,7 @@ mod tests { #[test] fn weak_self_cyclic() { struct Cycle { - x: Mutex>> + x: Mutex>>, } let a = Arc::new(Cycle { x: Mutex::new(None) }); @@ -1095,7 +1129,9 @@ mod tests { // Make sure deriving works with Arc #[derive(Eq, Ord, PartialEq, PartialOrd, Clone, Debug, Default)] - struct Foo { inner: Arc } + struct Foo { + inner: Arc, + } #[test] fn test_unsized() { @@ -1108,5 +1144,7 @@ mod tests { } impl borrow::Borrow for Arc { - fn borrow(&self) -> &T { &**self } + fn borrow(&self) -> &T { + &**self + } } diff --git a/src/liballoc/boxed.rs b/src/liballoc/boxed.rs index 3239677fc0c..1529187da06 100644 --- a/src/liballoc/boxed.rs +++ b/src/liballoc/boxed.rs @@ -66,7 +66,7 @@ use core::mem; use core::ops::{CoerceUnsized, Deref, DerefMut}; use core::ops::{Placer, Boxed, Place, InPlace, BoxPlace}; use core::ptr::{self, Unique}; -use core::raw::{TraitObject}; +use core::raw::TraitObject; /// A value that represents the heap. This is the default place that the `box` /// keyword allocates into when no place is supplied. @@ -95,7 +95,9 @@ pub const HEAP: ExchangeHeapSingleton = reason = "may be renamed; uncertain about custom allocator design", issue = "27779")] #[derive(Copy, Clone)] -pub struct ExchangeHeapSingleton { _force_singleton: () } +pub struct ExchangeHeapSingleton { + _force_singleton: (), +} /// A pointer type for heap allocation. /// @@ -126,7 +128,7 @@ pub struct Box(Unique); #[unstable(feature = "placement_in", reason = "placement box design is still being worked out.", issue = "27779")] -pub struct IntermediateBox{ +pub struct IntermediateBox { ptr: *mut u8, size: usize, align: usize, @@ -152,9 +154,7 @@ fn make_place() -> IntermediateBox { let p = if size == 0 { heap::EMPTY as *mut u8 } else { - let p = unsafe { - heap::allocate(size, align) - }; + let p = unsafe { heap::allocate(size, align) }; if p.is_null() { panic!("Box make_place allocation failure."); } @@ -165,18 +165,24 @@ fn make_place() -> IntermediateBox { } impl BoxPlace for IntermediateBox { - fn make_place() -> IntermediateBox { make_place() } + fn make_place() -> IntermediateBox { + make_place() + } } impl InPlace for IntermediateBox { type Owner = Box; - unsafe fn finalize(self) -> Box { finalize(self) } + unsafe fn finalize(self) -> Box { + finalize(self) + } } impl Boxed for Box { type Data = T; type Place = IntermediateBox; - unsafe fn finalize(b: IntermediateBox) -> Box { finalize(b) } + unsafe fn finalize(b: IntermediateBox) -> Box { + finalize(b) + } } impl Placer for ExchangeHeapSingleton { @@ -190,9 +196,7 @@ impl Placer for ExchangeHeapSingleton { impl Drop for IntermediateBox { fn drop(&mut self) { if self.size > 0 { - unsafe { - heap::deallocate(self.ptr, self.size, self.align) - } + unsafe { heap::deallocate(self.ptr, self.size, self.align) } } } } @@ -256,13 +260,17 @@ impl Box { #[stable(feature = "rust1", since = "1.0.0")] impl Default for Box { #[stable(feature = "rust1", since = "1.0.0")] - fn default() -> Box { box Default::default() } + fn default() -> Box { + box Default::default() + } } #[stable(feature = "rust1", since = "1.0.0")] impl Default for Box<[T]> { #[stable(feature = "rust1", since = "1.0.0")] - fn default() -> Box<[T]> { Box::<[T; 0]>::new([]) } + fn default() -> Box<[T]> { + Box::<[T; 0]>::new([]) + } } #[stable(feature = "rust1", since = "1.0.0")] @@ -275,8 +283,11 @@ impl Clone for Box { /// let x = Box::new(5); /// let y = x.clone(); /// ``` + #[rustfmt_skip] #[inline] - fn clone(&self) -> Box { box {(**self).clone()} } + fn clone(&self) -> Box { + box { (**self).clone() } + } /// Copies `source`'s contents into `self` without creating a new allocation. /// /// # Examples @@ -311,9 +322,13 @@ impl Clone for Box { #[stable(feature = "rust1", since = "1.0.0")] impl PartialEq for Box { #[inline] - fn eq(&self, other: &Box) -> bool { PartialEq::eq(&**self, &**other) } + fn eq(&self, other: &Box) -> bool { + PartialEq::eq(&**self, &**other) + } #[inline] - fn ne(&self, other: &Box) -> bool { PartialEq::ne(&**self, &**other) } + fn ne(&self, other: &Box) -> bool { + PartialEq::ne(&**self, &**other) + } } #[stable(feature = "rust1", since = "1.0.0")] impl PartialOrd for Box { @@ -322,13 +337,21 @@ impl PartialOrd for Box { PartialOrd::partial_cmp(&**self, &**other) } #[inline] - fn lt(&self, other: &Box) -> bool { PartialOrd::lt(&**self, &**other) } + fn lt(&self, other: &Box) -> bool { + PartialOrd::lt(&**self, &**other) + } #[inline] - fn le(&self, other: &Box) -> bool { PartialOrd::le(&**self, &**other) } + fn le(&self, other: &Box) -> bool { + PartialOrd::le(&**self, &**other) + } #[inline] - fn ge(&self, other: &Box) -> bool { PartialOrd::ge(&**self, &**other) } + fn ge(&self, other: &Box) -> bool { + PartialOrd::ge(&**self, &**other) + } #[inline] - fn gt(&self, other: &Box) -> bool { PartialOrd::gt(&**self, &**other) } + fn gt(&self, other: &Box) -> bool { + PartialOrd::gt(&**self, &**other) + } } #[stable(feature = "rust1", since = "1.0.0")] impl Ord for Box { @@ -356,8 +379,7 @@ impl Box { unsafe { // Get the raw representation of the trait object let raw = Box::into_raw(self); - let to: TraitObject = - mem::transmute::<*mut Any, TraitObject>(raw); + let to: TraitObject = mem::transmute::<*mut Any, TraitObject>(raw); // Extract the data pointer Ok(Box::from_raw(to.data as *mut T)) @@ -408,23 +430,33 @@ impl fmt::Pointer for Box { impl Deref for Box { type Target = T; - fn deref(&self) -> &T { &**self } + fn deref(&self) -> &T { + &**self + } } #[stable(feature = "rust1", since = "1.0.0")] impl DerefMut for Box { - fn deref_mut(&mut self) -> &mut T { &mut **self } + fn deref_mut(&mut self) -> &mut T { + &mut **self + } } #[stable(feature = "rust1", since = "1.0.0")] impl Iterator for Box { type Item = I::Item; - fn next(&mut self) -> Option { (**self).next() } - fn size_hint(&self) -> (usize, Option) { (**self).size_hint() } + fn next(&mut self) -> Option { + (**self).next() + } + fn size_hint(&self) -> (usize, Option) { + (**self).size_hint() + } } #[stable(feature = "rust1", since = "1.0.0")] impl DoubleEndedIterator for Box { - fn next_back(&mut self) -> Option { (**self).next_back() } + fn next_back(&mut self) -> Option { + (**self).next_back() + } } #[stable(feature = "rust1", since = "1.0.0")] impl ExactSizeIterator for Box {} @@ -506,10 +538,7 @@ impl, U: ?Sized> CoerceUnsized> for Box {} #[stable(feature = "box_slice_clone", since = "1.3.0")] impl Clone for Box<[T]> { fn clone(&self) -> Self { - let mut new = BoxBuilder { - data: RawVec::with_capacity(self.len()), - len: 0 - }; + let mut new = BoxBuilder { data: RawVec::with_capacity(self.len()), len: 0 }; let mut target = new.data.ptr(); @@ -555,9 +584,13 @@ impl Clone for Box<[T]> { } impl borrow::Borrow for Box { - fn borrow(&self) -> &T { &**self } + fn borrow(&self) -> &T { + &**self + } } impl borrow::BorrowMut for Box { - fn borrow_mut(&mut self) -> &mut T { &mut **self } + fn borrow_mut(&mut self) -> &mut T { + &mut **self + } } diff --git a/src/liballoc/boxed_test.rs b/src/liballoc/boxed_test.rs index 2ef23b26a56..7f3dadcf24d 100644 --- a/src/liballoc/boxed_test.rs +++ b/src/liballoc/boxed_test.rs @@ -34,12 +34,16 @@ fn any_move() { let b = Box::new(Test) as Box; match a.downcast::() { - Ok(a) => { assert!(a == Box::new(8)); } - Err(..) => panic!() + Ok(a) => { + assert!(a == Box::new(8)); + } + Err(..) => panic!(), } match b.downcast::() { - Ok(a) => { assert!(a == Box::new(Test)); } - Err(..) => panic!() + Ok(a) => { + assert!(a == Box::new(Test)); + } + Err(..) => panic!(), } let a = Box::new(8) as Box; @@ -70,7 +74,8 @@ fn test_show() { #[test] fn deref() { - fn homura>(_: T) { } + fn homura>(_: T) { + } homura(Box::new(765)); } diff --git a/src/liballoc/heap.rs b/src/liballoc/heap.rs index 10cb84d1da1..6961702cbc0 100644 --- a/src/liballoc/heap.rs +++ b/src/liballoc/heap.rs @@ -22,18 +22,24 @@ extern { #[allocator] fn __rust_allocate(size: usize, align: usize) -> *mut u8; fn __rust_deallocate(ptr: *mut u8, old_size: usize, align: usize); - fn __rust_reallocate(ptr: *mut u8, old_size: usize, size: usize, - align: usize) -> *mut u8; - fn __rust_reallocate_inplace(ptr: *mut u8, old_size: usize, size: usize, - align: usize) -> usize; + fn __rust_reallocate(ptr: *mut u8, old_size: usize, size: usize, align: usize) -> *mut u8; + fn __rust_reallocate_inplace(ptr: *mut u8, + old_size: usize, + size: usize, + align: usize) + -> usize; fn __rust_usable_size(size: usize, align: usize) -> usize; } #[inline(always)] fn check_size_and_alignment(size: usize, align: usize) { debug_assert!(size != 0); - debug_assert!(size <= isize::MAX as usize, "Tried to allocate too much: {} bytes", size); - debug_assert!(usize::is_power_of_two(align), "Invalid alignment of allocation: {}", align); + debug_assert!(size <= isize::MAX as usize, + "Tried to allocate too much: {} bytes", + size); + debug_assert!(usize::is_power_of_two(align), + "Invalid alignment of allocation: {}", + align); } // FIXME: #13996: mark the `allocate` and `reallocate` return value as `noalias` @@ -84,8 +90,11 @@ pub unsafe fn reallocate(ptr: *mut u8, old_size: usize, size: usize, align: usiz /// create the allocation referenced by `ptr`. The `old_size` parameter may be /// any value in range_inclusive(requested_size, usable_size). #[inline] -pub unsafe fn reallocate_inplace(ptr: *mut u8, old_size: usize, size: usize, - align: usize) -> usize { +pub unsafe fn reallocate_inplace(ptr: *mut u8, + old_size: usize, + size: usize, + align: usize) + -> usize { check_size_and_alignment(size, align); __rust_reallocate_inplace(ptr, old_size, size, align) } @@ -124,7 +133,9 @@ unsafe fn exchange_malloc(size: usize, align: usize) -> *mut u8 { EMPTY as *mut u8 } else { let ptr = allocate(size, align); - if ptr.is_null() { ::oom() } + if ptr.is_null() { + ::oom() + } ptr } } @@ -148,7 +159,9 @@ mod tests { unsafe { let size = 4000; let ptr = heap::allocate(size, 8); - if ptr.is_null() { ::oom() } + if ptr.is_null() { + ::oom() + } let ret = heap::reallocate_inplace(ptr, size, size, 8); heap::deallocate(ptr, size, 8); assert_eq!(ret, heap::usable_size(size, 8)); diff --git a/src/liballoc/lib.rs b/src/liballoc/lib.rs index 66de5d7bea8..98c729aaba4 100644 --- a/src/liballoc/lib.rs +++ b/src/liballoc/lib.rs @@ -107,8 +107,12 @@ extern crate alloc_system; // Allow testing this library -#[cfg(test)] #[macro_use] extern crate std; -#[cfg(test)] #[macro_use] extern crate log; +#[cfg(test)] +#[macro_use] +extern crate std; +#[cfg(test)] +#[macro_use] +extern crate log; // Heaps provided for low-level allocation strategies @@ -123,7 +127,9 @@ pub mod heap; #[cfg(not(test))] pub mod boxed; #[cfg(test)] -mod boxed { pub use std::boxed::{Box, HEAP}; } +mod boxed { + pub use std::boxed::{Box, HEAP}; +} #[cfg(test)] mod boxed_test; pub mod arc; diff --git a/src/liballoc/raw_vec.rs b/src/liballoc/raw_vec.rs index 97acd0db524..dd2db6fab08 100644 --- a/src/liballoc/raw_vec.rs +++ b/src/liballoc/raw_vec.rs @@ -58,7 +58,11 @@ impl RawVec { pub fn new() -> Self { unsafe { // !0 is usize::MAX. This branch should be stripped at compile time. - let cap = if mem::size_of::() == 0 { !0 } else { 0 }; + let cap = if mem::size_of::() == 0 { + !0 + } else { + 0 + }; // heap::EMPTY doubles as "unallocated" and "zero-sized allocation" RawVec { ptr: Unique::new(heap::EMPTY as *mut T), cap: cap } @@ -92,7 +96,9 @@ impl RawVec { } else { let align = mem::align_of::(); let ptr = heap::allocate(alloc_size, align); - if ptr.is_null() { oom() } + if ptr.is_null() { + oom() + } ptr }; @@ -133,7 +139,11 @@ impl RawVec { /// /// This will always be `usize::MAX` if `T` is zero-sized. pub fn cap(&self) -> usize { - if mem::size_of::() == 0 { !0 } else { self.cap } + if mem::size_of::() == 0 { + !0 + } else { + self.cap + } } /// Doubles the size of the type's backing allocation. This is common enough @@ -190,7 +200,11 @@ impl RawVec { let (new_cap, ptr) = if self.cap == 0 { // skip to 4 because tiny Vec's are dumb; but not if that would cause overflow - let new_cap = if elem_size > (!0) / 8 { 1 } else { 4 }; + let new_cap = if elem_size > (!0) / 8 { + 1 + } else { + 4 + }; let ptr = heap::allocate(new_cap * elem_size, align); (new_cap, ptr) } else { @@ -207,7 +221,9 @@ impl RawVec { }; // If allocate or reallocate fail, we'll get `null` back - if ptr.is_null() { oom() } + if ptr.is_null() { + oom() + } self.ptr = Unique::new(ptr as *mut _); self.cap = new_cap; @@ -246,7 +262,9 @@ impl RawVec { // Don't actually need any more capacity. // Wrapping in case they gave a bad `used_cap`. - if self.cap().wrapping_sub(used_cap) >= needed_extra_cap { return; } + if self.cap().wrapping_sub(used_cap) >= needed_extra_cap { + return; + } // Nothing we can really do about these checks :( let new_cap = used_cap.checked_add(needed_extra_cap).expect("capacity overflow"); @@ -263,7 +281,9 @@ impl RawVec { }; // If allocate or reallocate fail, we'll get `null` back - if ptr.is_null() { oom() } + if ptr.is_null() { + oom() + } self.ptr = Unique::new(ptr as *mut _); self.cap = new_cap; @@ -326,7 +346,9 @@ impl RawVec { // Don't actually need any more capacity. // Wrapping in case they give a bas `used_cap` - if self.cap().wrapping_sub(used_cap) >= needed_extra_cap { return; } + if self.cap().wrapping_sub(used_cap) >= needed_extra_cap { + return; + } // Nothing we can really do about these checks :( let new_cap = used_cap.checked_add(needed_extra_cap) @@ -346,7 +368,9 @@ impl RawVec { }; // If allocate or reallocate fail, we'll get `null` back - if ptr.is_null() { oom() } + if ptr.is_null() { + oom() + } self.ptr = Unique::new(ptr as *mut _); self.cap = new_cap; @@ -374,7 +398,8 @@ impl RawVec { } // This check is my waterloo; it's the only thing Vec wouldn't have to do. - assert!(self.cap >= amount, "Tried to shrink to a larger capacity"); + assert!(self.cap >= amount, + "Tried to shrink to a larger capacity"); if amount == 0 { mem::replace(self, RawVec::new()); @@ -386,7 +411,9 @@ impl RawVec { self.cap * elem_size, amount * elem_size, align); - if ptr.is_null() { oom() } + if ptr.is_null() { + oom() + } self.ptr = Unique::new(ptr as *mut _); } self.cap = amount; @@ -446,6 +473,7 @@ impl Drop for RawVec { #[inline] fn alloc_guard(alloc_size: usize) { if core::usize::BITS < 64 { - assert!(alloc_size <= ::core::isize::MAX as usize, "capacity overflow"); + assert!(alloc_size <= ::core::isize::MAX as usize, + "capacity overflow"); } } diff --git a/src/liballoc/rc.rs b/src/liballoc/rc.rs index 4fe474cef0a..3507f123a6f 100644 --- a/src/liballoc/rc.rs +++ b/src/liballoc/rc.rs @@ -213,7 +213,7 @@ impl Rc { _ptr: NonZero::new(Box::into_raw(box RcBox { strong: Cell::new(1), weak: Cell::new(1), - value: value + value: value, })), } } @@ -290,13 +290,17 @@ impl Rc { #[inline] #[unstable(feature = "rc_counts", reason = "not clearly useful", issue = "28356")] - pub fn weak_count(this: &Self) -> usize { this.weak() - 1 } + pub fn weak_count(this: &Self) -> usize { + this.weak() - 1 + } /// Get the number of strong references to this value. #[inline] #[unstable(feature = "rc_counts", reason = "not clearly useful", issue = "28356")] - pub fn strong_count(this: &Self) -> usize { this.strong() } + pub fn strong_count(this: &Self) -> usize { + this.strong() + } /// Returns true if there are no other `Rc` or `Weak` values that share /// the same inner value. @@ -451,7 +455,7 @@ impl Drop for Rc { unsafe { let ptr = *self._ptr; if !(*(&ptr as *const _ as *const *const ())).is_null() && - ptr as *const () as usize != mem::POST_DROP_USIZE { + ptr as *const () as usize != mem::POST_DROP_USIZE { self.dec_strong(); if self.strong() == 0 { // destroy the contained object @@ -530,7 +534,9 @@ impl PartialEq for Rc { /// five == Rc::new(5); /// ``` #[inline(always)] - fn eq(&self, other: &Rc) -> bool { **self == **other } + fn eq(&self, other: &Rc) -> bool { + **self == **other + } /// Inequality for two `Rc`s. /// @@ -546,7 +552,9 @@ impl PartialEq for Rc { /// five != Rc::new(5); /// ``` #[inline(always)] - fn ne(&self, other: &Rc) -> bool { **self != **other } + fn ne(&self, other: &Rc) -> bool { + **self != **other + } } #[stable(feature = "rust1", since = "1.0.0")] @@ -586,7 +594,9 @@ impl PartialOrd for Rc { /// five < Rc::new(5); /// ``` #[inline(always)] - fn lt(&self, other: &Rc) -> bool { **self < **other } + fn lt(&self, other: &Rc) -> bool { + **self < **other + } /// 'Less-than or equal to' comparison for two `Rc`s. /// @@ -602,7 +612,9 @@ impl PartialOrd for Rc { /// five <= Rc::new(5); /// ``` #[inline(always)] - fn le(&self, other: &Rc) -> bool { **self <= **other } + fn le(&self, other: &Rc) -> bool { + **self <= **other + } /// Greater-than comparison for two `Rc`s. /// @@ -618,7 +630,9 @@ impl PartialOrd for Rc { /// five > Rc::new(5); /// ``` #[inline(always)] - fn gt(&self, other: &Rc) -> bool { **self > **other } + fn gt(&self, other: &Rc) -> bool { + **self > **other + } /// 'Greater-than or equal to' comparison for two `Rc`s. /// @@ -634,7 +648,9 @@ impl PartialOrd for Rc { /// five >= Rc::new(5); /// ``` #[inline(always)] - fn ge(&self, other: &Rc) -> bool { **self >= **other } + fn ge(&self, other: &Rc) -> bool { + **self >= **other + } } #[stable(feature = "rust1", since = "1.0.0")] @@ -653,7 +669,9 @@ impl Ord for Rc { /// five.partial_cmp(&Rc::new(5)); /// ``` #[inline] - fn cmp(&self, other: &Rc) -> Ordering { (**self).cmp(&**other) } + fn cmp(&self, other: &Rc) -> Ordering { + (**self).cmp(&**other) + } } #[stable(feature = "rust1", since = "1.0.0")] @@ -764,12 +782,13 @@ impl Drop for Weak { unsafe { let ptr = *self._ptr; if !(*(&ptr as *const _ as *const *const ())).is_null() && - ptr as *const () as usize != mem::POST_DROP_USIZE { + ptr as *const () as usize != mem::POST_DROP_USIZE { self.dec_weak(); // the weak count starts at 1, and will only go to zero if all // the strong pointers have disappeared. if self.weak() == 0 { - deallocate(ptr as *mut u8, size_of_val(&*ptr), + deallocate(ptr as *mut u8, + size_of_val(&*ptr), align_of_val(&*ptr)) } } @@ -821,7 +840,9 @@ trait RcBoxPtr { fn inner(&self) -> &RcBox; #[inline] - fn strong(&self) -> usize { self.inner().strong.get() } + fn strong(&self) -> usize { + self.inner().strong.get() + } #[inline] fn inc_strong(&self) { @@ -829,10 +850,14 @@ trait RcBoxPtr { } #[inline] - fn dec_strong(&self) { self.inner().strong.set(self.strong() - 1); } + fn dec_strong(&self) { + self.inner().strong.set(self.strong() - 1); + } #[inline] - fn weak(&self) -> usize { self.inner().weak.get() } + fn weak(&self) -> usize { + self.inner().weak.get() + } #[inline] fn inc_weak(&self) { @@ -840,7 +865,9 @@ trait RcBoxPtr { } #[inline] - fn dec_weak(&self) { self.inner().weak.set(self.weak() - 1); } + fn dec_weak(&self) { + self.inner().weak.set(self.weak() - 1); + } } impl RcBoxPtr for Rc { @@ -928,7 +955,7 @@ mod tests { #[test] fn weak_self_cyclic() { struct Cycle { - x: RefCell>> + x: RefCell>>, } let a = Rc::new(Cycle { x: RefCell::new(None) }); @@ -1086,5 +1113,7 @@ mod tests { } impl borrow::Borrow for Rc { - fn borrow(&self) -> &T { &**self } + fn borrow(&self) -> &T { + &**self + } }