From 24ce2597823640726aa302a12a4de787a1fe1f05 Mon Sep 17 00:00:00 2001 From: Sean McArthur Date: Thu, 28 Jun 2018 11:49:47 -0700 Subject: [PATCH] Arc: remove unused allocation from Weak::new() --- src/liballoc/arc.rs | 56 +++++++++++++++++++++++++++++---------------- 1 file changed, 36 insertions(+), 20 deletions(-) diff --git a/src/liballoc/arc.rs b/src/liballoc/arc.rs index 0fbd1408f64..2abd9c85c57 100644 --- a/src/liballoc/arc.rs +++ b/src/liballoc/arc.rs @@ -23,7 +23,7 @@ use core::borrow; use core::fmt; use core::cmp::Ordering; use core::intrinsics::abort; -use core::mem::{self, align_of_val, size_of_val, uninitialized}; +use core::mem::{self, align_of_val, size_of_val}; use core::ops::Deref; use core::ops::CoerceUnsized; use core::ptr::{self, NonNull}; @@ -43,6 +43,9 @@ use vec::Vec; /// necessarily) at _exactly_ `MAX_REFCOUNT + 1` references. const MAX_REFCOUNT: usize = (isize::MAX) as usize; +/// A sentinel value that is used for the pointer of `Weak::new()`. +const WEAK_EMPTY: usize = 1; + /// A thread-safe reference-counting pointer. 'Arc' stands for 'Atomically /// Reference Counted'. /// @@ -235,6 +238,10 @@ impl, U: ?Sized> CoerceUnsized> for Arc {} /// [`None`]: ../../std/option/enum.Option.html#variant.None #[stable(feature = "arc_weak", since = "1.4.0")] pub struct Weak { + // This is a `NonNull` to allow optimizing the size of this type in enums, + // but it is actually not truly "non-null". A `Weak::new()` will set this + // to a sentinel value, instead of needing to allocate some space in the + // heap. ptr: NonNull>, } @@ -1011,8 +1018,8 @@ impl Arc { } impl Weak { - /// Constructs a new `Weak`, allocating memory for `T` without initializing - /// it. Calling [`upgrade`] on the return value always gives [`None`]. + /// Constructs a new `Weak`, without allocating any memory. + /// Calling [`upgrade`] on the return value always gives [`None`]. /// /// [`upgrade`]: struct.Weak.html#method.upgrade /// [`None`]: ../../std/option/enum.Option.html#variant.None @@ -1029,11 +1036,7 @@ impl Weak { pub fn new() -> Weak { unsafe { Weak { - ptr: Box::into_raw_non_null(box ArcInner { - strong: atomic::AtomicUsize::new(0), - weak: atomic::AtomicUsize::new(1), - data: uninitialized(), - }), + ptr: NonNull::new_unchecked(WEAK_EMPTY as *mut _), } } } @@ -1070,7 +1073,11 @@ impl Weak { pub fn upgrade(&self) -> Option> { // We use a CAS loop to increment the strong count instead of a // fetch_add because once the count hits 0 it must never be above 0. - let inner = self.inner(); + let inner = if self.ptr.as_ptr() as *const u8 as usize == WEAK_EMPTY { + return None; + } else { + unsafe { self.ptr.as_ref() } + }; // Relaxed load because any write of 0 that we can observe // leaves the field in a permanently zero state (so a @@ -1092,17 +1099,15 @@ impl Weak { // Relaxed is valid for the same reason it is on Arc's Clone impl match inner.strong.compare_exchange_weak(n, n + 1, Relaxed, Relaxed) { - Ok(_) => return Some(Arc { ptr: self.ptr, phantom: PhantomData }), + Ok(_) => return Some(Arc { + // null checked above + ptr: self.ptr, + phantom: PhantomData, + }), Err(old) => n = old, } } } - - #[inline] - fn inner(&self) -> &ArcInner { - // See comments above for why this is "safe" - unsafe { self.ptr.as_ref() } - } } #[stable(feature = "arc_weak", since = "1.4.0")] @@ -1120,11 +1125,16 @@ impl Clone for Weak { /// ``` #[inline] fn clone(&self) -> Weak { + let inner = if self.ptr.as_ptr() as *const u8 as usize == WEAK_EMPTY { + return Weak { ptr: self.ptr }; + } else { + unsafe { self.ptr.as_ref() } + }; // See comments in Arc::clone() for why this is relaxed. This can use a // fetch_add (ignoring the lock) because the weak count is only locked // where are *no other* weak pointers in existence. (So we can't be // running this code in that case). - let old_size = self.inner().weak.fetch_add(1, Relaxed); + let old_size = inner.weak.fetch_add(1, Relaxed); // See comments in Arc::clone() for why we do this (for mem::forget). if old_size > MAX_REFCOUNT { @@ -1139,8 +1149,8 @@ impl Clone for Weak { #[stable(feature = "downgraded_weak", since = "1.10.0")] impl Default for Weak { - /// Constructs a new `Weak`, allocating memory for `T` without initializing - /// it. Calling [`upgrade`] on the return value always gives [`None`]. + /// Constructs a new `Weak`, without allocating memory. + /// Calling [`upgrade`] on the return value always gives [`None`]. /// /// [`upgrade`]: struct.Weak.html#method.upgrade /// [`None`]: ../../std/option/enum.Option.html#variant.None @@ -1193,7 +1203,13 @@ impl Drop for Weak { // weak count can only be locked if there was precisely one weak ref, // meaning that drop could only subsequently run ON that remaining weak // ref, which can only happen after the lock is released. - if self.inner().weak.fetch_sub(1, Release) == 1 { + let inner = if self.ptr.as_ptr() as *const u8 as usize == WEAK_EMPTY { + return; + } else { + unsafe { self.ptr.as_ref() } + }; + + if inner.weak.fetch_sub(1, Release) == 1 { atomic::fence(Acquire); unsafe { Global.dealloc(self.ptr.cast(), Layout::for_value(self.ptr.as_ref()))