use core::ptr::{self, NonNull};
use core::slice::from_raw_parts_mut;
-use crate::alloc::{box_free, handle_alloc_error, AllocErr, AllocRef, Global, Layout};
+use crate::alloc::{box_free, handle_alloc_error, AllocError, AllocRef, Global, Layout};
use crate::borrow::{Cow, ToOwned};
use crate::string::String;
use crate::vec::Vec;
impl<T: ?Sized + Unsize<U>, U: ?Sized> DispatchFromDyn<Rc<U>> for Rc<T> {}
impl<T: ?Sized> Rc<T> {
+ #[inline(always)]
+ fn inner(&self) -> &RcBox<T> {
+ // This unsafety is ok because while this Rc is alive we're guaranteed
+ // that the inner pointer is valid.
+ unsafe { self.ptr.as_ref() }
+ }
+
fn from_inner(ptr: NonNull<RcBox<T>>) -> Self {
Self { ptr, phantom: PhantomData }
}
)
}
+ /// Constructs a new `Rc<T>` using a weak reference to itself. Attempting
+ /// to upgrade the weak reference before this function returns will result
+ /// in a `None` value. However, the weak reference may be cloned freely and
+ /// stored for use at a later time.
+ #[unstable(feature = "arc_new_cyclic", issue = "75861")]
+ pub fn new_cyclic(data_fn: impl FnOnce(&Weak<T>) -> T) -> Rc<T> {
+ // Construct the inner in the "uninitialized" state with a single
+ // weak reference.
+ let uninit_ptr: NonNull<_> = Box::leak(box RcBox {
+ strong: Cell::new(0),
+ weak: Cell::new(1),
+ value: mem::MaybeUninit::<T>::uninit(),
+ })
+ .into();
+
+ let init_ptr: NonNull<RcBox<T>> = uninit_ptr.cast();
+
+ let weak = Weak { ptr: init_ptr };
+
+ // It's important we don't give up ownership of the weak pointer, or
+ // else the memory might be freed by the time `data_fn` returns. If
+ // we really wanted to pass ownership, we could create an additional
+ // weak pointer for ourselves, but this would result in additional
+ // updates to the weak reference count which might not be necessary
+ // otherwise.
+ let data = data_fn(&weak);
+
+ unsafe {
+ let inner = init_ptr.as_ptr();
+ ptr::write(&raw mut (*inner).value, data);
+
+ let prev_value = (*inner).strong.get();
+ debug_assert_eq!(prev_value, 0, "No prior strong references should exist");
+ (*inner).strong.set(1);
+ }
+
+ let strong = Rc::from_inner(init_ptr);
+
+ // Strong references should collectively own a shared weak reference,
+ // so don't run the destructor for our old weak reference.
+ mem::forget(weak);
+ strong
+ }
+
/// Constructs a new `Rc` with uninitialized contents.
///
/// # Examples
/// assert_eq!(*zero, 0)
/// ```
///
- /// [zeroed]: ../../std/mem/union.MaybeUninit.html#method.zeroed
+ /// [zeroed]: mem::MaybeUninit::zeroed
#[unstable(feature = "new_uninit", issue = "63291")]
pub fn new_zeroed() -> Rc<mem::MaybeUninit<T>> {
unsafe {
// the strong count, and then remove the implicit "strong weak"
// pointer while also handling drop logic by just crafting a
// fake Weak.
- this.dec_strong();
+ this.inner().dec_strong();
let _weak = Weak { ptr: this.ptr };
forget(this);
Ok(val)
/// assert_eq!(*values, [0, 0, 0])
/// ```
///
- /// [zeroed]: ../../std/mem/union.MaybeUninit.html#method.zeroed
+ /// [zeroed]: mem::MaybeUninit::zeroed
#[unstable(feature = "new_uninit", issue = "63291")]
pub fn new_zeroed_slice(len: usize) -> Rc<[mem::MaybeUninit<T>]> {
unsafe {
/// Calling this when the content is not yet fully initialized
/// causes immediate undefined behavior.
///
- /// [`MaybeUninit::assume_init`]: ../../std/mem/union.MaybeUninit.html#method.assume_init
+ /// [`MaybeUninit::assume_init`]: mem::MaybeUninit::assume_init
///
/// # Examples
///
/// Calling this when the content is not yet fully initialized
/// causes immediate undefined behavior.
///
- /// [`MaybeUninit::assume_init`]: ../../std/mem/union.MaybeUninit.html#method.assume_init
+ /// [`MaybeUninit::assume_init`]: mem::MaybeUninit::assume_init
///
/// # Examples
///
/// ```
#[stable(feature = "rc_weak", since = "1.4.0")]
pub fn downgrade(this: &Self) -> Weak<T> {
- this.inc_weak();
+ this.inner().inc_weak();
// Make sure we do not create a dangling Weak
debug_assert!(!is_dangling(this.ptr));
Weak { ptr: this.ptr }
#[inline]
#[stable(feature = "rc_counts", since = "1.15.0")]
pub fn weak_count(this: &Self) -> usize {
- this.weak() - 1
+ this.inner().weak() - 1
}
/// Gets the number of strong (`Rc`) pointers to this allocation.
#[inline]
#[stable(feature = "rc_counts", since = "1.15.0")]
pub fn strong_count(this: &Self) -> usize {
- this.strong()
+ this.inner().strong()
}
/// Returns `true` if there are no other `Rc` or [`Weak`] pointers to
#[inline]
#[unstable(feature = "get_mut_unchecked", issue = "63292")]
pub unsafe fn get_mut_unchecked(this: &mut Self) -> &mut T {
- unsafe { &mut this.ptr.as_mut().value }
+ // We are careful to *not* create a reference covering the "count" fields, as
+ // this would conflict with accesses to the reference counts (e.g. by `Weak`).
+ unsafe { &mut (*this.ptr.as_ptr()).value }
}
#[inline]
unsafe {
let mut swap = Rc::new(ptr::read(&this.ptr.as_ref().value));
mem::swap(this, &mut swap);
- swap.dec_strong();
+ swap.inner().dec_strong();
// Remove implicit strong-weak ref (no need to craft a fake
// Weak here -- we know other Weaks can clean up for us)
- swap.dec_weak();
+ swap.inner().dec_weak();
forget(swap);
}
}
/// and must return back a (potentially fat)-pointer for the `RcBox<T>`.
unsafe fn allocate_for_layout(
value_layout: Layout,
- allocate: impl FnOnce(Layout) -> Result<NonNull<[u8]>, AllocErr>,
+ allocate: impl FnOnce(Layout) -> Result<NonNull<[u8]>, AllocError>,
mem_to_rcbox: impl FnOnce(*mut u8) -> *mut RcBox<T>,
) -> *mut RcBox<T> {
// Calculate layout using the given value layout.
/// ```
fn drop(&mut self) {
unsafe {
- self.dec_strong();
- if self.strong() == 0 {
+ self.inner().dec_strong();
+ if self.inner().strong() == 0 {
// destroy the contained object
- ptr::drop_in_place(self.ptr.as_mut());
+ ptr::drop_in_place(Self::get_mut_unchecked(self));
// remove the implicit "strong weak" pointer now that we've
// destroyed the contents.
- self.dec_weak();
+ self.inner().dec_weak();
- if self.weak() == 0 {
+ if self.inner().weak() == 0 {
Global.dealloc(self.ptr.cast(), Layout::for_value(self.ptr.as_ref()));
}
}
/// ```
#[inline]
fn clone(&self) -> Rc<T> {
- self.inc_strong();
+ self.inner().inc_strong();
Self::from_inner(self.ptr)
}
}
address == usize::MAX
}
+/// Helper type to allow accessing the reference counts without
+/// making any assertions about the data field.
+struct WeakInner<'a> {
+ weak: &'a Cell<usize>,
+ strong: &'a Cell<usize>,
+}
+
impl<T: ?Sized> Weak<T> {
/// Attempts to upgrade the `Weak` pointer to an [`Rc`], delaying
/// dropping of the inner value if successful.
.unwrap_or(0)
}
- /// Returns `None` when the pointer is dangling and there is no allocated `RcBox`
+ /// Returns `None` when the pointer is dangling and there is no allocated `RcBox`,
/// (i.e., when this `Weak` was created by `Weak::new`).
#[inline]
- fn inner(&self) -> Option<&RcBox<T>> {
- if is_dangling(self.ptr) { None } else { Some(unsafe { self.ptr.as_ref() }) }
+ fn inner(&self) -> Option<WeakInner<'_>> {
+ if is_dangling(self.ptr) {
+ None
+ } else {
+ // We are careful to *not* create a reference covering the "data" field, as
+ // the field may be mutated concurrently (for example, if the last `Rc`
+ // is dropped, the data field will be dropped in-place).
+ Some(unsafe {
+ let ptr = self.ptr.as_ptr();
+ WeakInner { strong: &(*ptr).strong, weak: &(*ptr).weak }
+ })
+ }
}
/// Returns `true` if the two `Weak`s point to the same allocation (similar to
/// assert!(other_weak_foo.upgrade().is_none());
/// ```
fn drop(&mut self) {
- if let Some(inner) = self.inner() {
- inner.dec_weak();
- // the weak count starts at 1, and will only go to zero if all
- // the strong pointers have disappeared.
- if inner.weak() == 0 {
- unsafe {
- Global.dealloc(self.ptr.cast(), Layout::for_value(self.ptr.as_ref()));
- }
+ let inner = if let Some(inner) = self.inner() { inner } else { return };
+
+ inner.dec_weak();
+ // the weak count starts at 1, and will only go to zero if all
+ // the strong pointers have disappeared.
+ if inner.weak() == 0 {
+ unsafe {
+ Global.dealloc(self.ptr.cast(), Layout::for_value(self.ptr.as_ref()));
}
}
}
// clone these much in Rust thanks to ownership and move-semantics.
#[doc(hidden)]
-trait RcBoxPtr<T: ?Sized> {
- fn inner(&self) -> &RcBox<T>;
+trait RcInnerPtr {
+ fn weak_ref(&self) -> &Cell<usize>;
+ fn strong_ref(&self) -> &Cell<usize>;
#[inline]
fn strong(&self) -> usize {
- self.inner().strong.get()
+ self.strong_ref().get()
}
#[inline]
if strong == 0 || strong == usize::MAX {
abort();
}
- self.inner().strong.set(strong + 1);
+ self.strong_ref().set(strong + 1);
}
#[inline]
fn dec_strong(&self) {
- self.inner().strong.set(self.strong() - 1);
+ self.strong_ref().set(self.strong() - 1);
}
#[inline]
fn weak(&self) -> usize {
- self.inner().weak.get()
+ self.weak_ref().get()
}
#[inline]
if weak == 0 || weak == usize::MAX {
abort();
}
- self.inner().weak.set(weak + 1);
+ self.weak_ref().set(weak + 1);
}
#[inline]
fn dec_weak(&self) {
- self.inner().weak.set(self.weak() - 1);
+ self.weak_ref().set(self.weak() - 1);
}
}
-impl<T: ?Sized> RcBoxPtr<T> for Rc<T> {
+impl<T: ?Sized> RcInnerPtr for RcBox<T> {
#[inline(always)]
- fn inner(&self) -> &RcBox<T> {
- unsafe { self.ptr.as_ref() }
+ fn weak_ref(&self) -> &Cell<usize> {
+ &self.weak
+ }
+
+ #[inline(always)]
+ fn strong_ref(&self) -> &Cell<usize> {
+ &self.strong
}
}
-impl<T: ?Sized> RcBoxPtr<T> for RcBox<T> {
+impl<'a> RcInnerPtr for WeakInner<'a> {
#[inline(always)]
- fn inner(&self) -> &RcBox<T> {
- self
+ fn weak_ref(&self) -> &Cell<usize> {
+ self.weak
+ }
+
+ #[inline(always)]
+ fn strong_ref(&self) -> &Cell<usize> {
+ self.strong
}
}