use core::ops::{CoerceUnsized, Deref, DispatchFromDyn, Receiver};
use core::pin::Pin;
use core::ptr::{self, NonNull};
-use core::slice::{self, from_raw_parts_mut};
+use core::slice::from_raw_parts_mut;
use core::sync::atomic;
use core::sync::atomic::Ordering::{Acquire, Relaxed, Release, SeqCst};
use crate::alloc::{box_free, handle_alloc_error, AllocInit, AllocRef, Global, Layout};
+use crate::borrow::{Cow, ToOwned};
use crate::boxed::Box;
use crate::rc::is_dangling;
use crate::string::String;
/// counting in general.
///
/// [rc_examples]: ../../std/rc/index.html#examples
-#[cfg_attr(all(bootstrap, not(test)), lang = "arc")]
#[cfg_attr(not(test), rustc_diagnostic_item = "Arc")]
#[stable(feature = "rust1", since = "1.0.0")]
pub struct Arc<T: ?Sized> {
weak: atomic::AtomicUsize::new(1),
data,
};
- Self::from_inner(Box::into_raw_non_null(x))
+ Self::from_inner(Box::leak(x).into())
}
/// Constructs a new `Arc` with uninitialized contents.
/// # Examples
///
/// ```
- /// #![feature(weak_into_raw)]
- ///
/// use std::sync::Arc;
///
/// let x = Arc::new("hello".to_owned());
/// assert_eq!(x_ptr, Arc::as_ptr(&y));
/// assert_eq!(unsafe { &*x_ptr }, "hello");
/// ```
- #[unstable(feature = "weak_into_raw", issue = "60728")]
+ #[stable(feature = "weak_into_raw", since = "1.45.0")]
pub fn as_ptr(this: &Self) -> *const T {
let ptr: *mut ArcInner<T> = NonNull::as_ptr(this.ptr);
let fake_ptr = ptr as *mut T;
///
/// ```
/// #![feature(rc_into_raw_non_null)]
+ /// #![allow(deprecated)]
///
/// use std::sync::Arc;
///
/// assert_eq!(deref, "hello");
/// ```
#[unstable(feature = "rc_into_raw_non_null", issue = "47336")]
+ #[rustc_deprecated(since = "1.44.0", reason = "use `Arc::into_raw` instead")]
#[inline]
pub fn into_raw_non_null(this: Self) -> NonNull<T> {
// safe because Arc guarantees its pointer is non-null
this.inner().strong.load(SeqCst)
}
+ /// Increments the strong reference count on the `Arc<T>` associated with the
+ /// provided pointer by one.
+ ///
+ /// # Safety
+ ///
+ /// The pointer must have been obtained through `Arc::into_raw`, and the
+ /// associated `Arc` instance must be valid (i.e. the strong count must be at
+ /// least 1) for the duration of this method.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(arc_mutate_strong_count)]
+ ///
+ /// use std::sync::Arc;
+ ///
+ /// let five = Arc::new(5);
+ ///
+ /// unsafe {
+ /// let ptr = Arc::into_raw(five);
+ /// Arc::incr_strong_count(ptr);
+ ///
+ /// // This assertion is deterministic because we haven't shared
+ /// // the `Arc` between threads.
+ /// let five = Arc::from_raw(ptr);
+ /// assert_eq!(2, Arc::strong_count(&five));
+ /// }
+ /// ```
+ #[inline]
+ #[unstable(feature = "arc_mutate_strong_count", issue = "71983")]
+ pub unsafe fn incr_strong_count(ptr: *const T) {
+ // Retain Arc, but don't touch refcount by wrapping in ManuallyDrop
+ let arc = mem::ManuallyDrop::new(Arc::<T>::from_raw(ptr));
+ // Now increase refcount, but don't drop new refcount either
+ let _arc_clone: mem::ManuallyDrop<_> = arc.clone();
+ }
+
+ /// Decrements the strong reference count on the `Arc<T>` associated with the
+ /// provided pointer by one.
+ ///
+ /// # Safety
+ ///
+ /// The pointer must have been obtained through `Arc::into_raw`, and the
+ /// associated `Arc` instance must be valid (i.e. the strong count must be at
+ /// least 1) when invoking this method. This method can be used to release the final
+ /// `Arc` and backing storage, but **should not** be called after the final `Arc` has been
+ /// released.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(arc_mutate_strong_count)]
+ ///
+ /// use std::sync::Arc;
+ ///
+ /// let five = Arc::new(5);
+ ///
+ /// unsafe {
+ /// let ptr = Arc::into_raw(five);
+ /// Arc::incr_strong_count(ptr);
+ ///
+ /// // Those assertions are deterministic because we haven't shared
+ /// // the `Arc` between threads.
+ /// let five = Arc::from_raw(ptr);
+ /// assert_eq!(2, Arc::strong_count(&five));
+ /// Arc::decr_strong_count(ptr);
+ /// assert_eq!(1, Arc::strong_count(&five));
+ /// }
+ /// ```
+ #[inline]
+ #[unstable(feature = "arc_mutate_strong_count", issue = "71983")]
+ pub unsafe fn decr_strong_count(ptr: *const T) {
+ mem::drop(Arc::from_raw(ptr));
+ }
+
#[inline]
fn inner(&self) -> &ArcInner<T> {
// This unsafety is ok because while this arc is alive we're guaranteed
unsafe fn drop_slow(&mut self) {
// Destroy the data at this time, even though we may not free the box
// allocation itself (there may still be weak pointers lying around).
- ptr::drop_in_place(&mut self.ptr.as_mut().data);
+ ptr::drop_in_place(Self::get_mut_unchecked(self));
- if self.inner().weak.fetch_sub(1, Release) == 1 {
- acquire!(self.inner().weak);
- Global.dealloc(self.ptr.cast(), Layout::for_value(self.ptr.as_ref()))
- }
+ // Drop the weak ref collectively held by all strong references
+ drop(Weak { ptr: self.ptr });
}
#[inline]
}
impl<T> Arc<[T]> {
- /// Copy elements from slice into newly allocated Arc<[T]>
+ /// Copy elements from slice into newly allocated Arc<\[T\]>
///
/// Unsafe because the caller must either take ownership or bind `T: Copy`.
unsafe fn copy_from_slice(v: &[T]) -> Arc<[T]> {
// We abort because such a program is incredibly degenerate, and we
// don't care to support it.
if old_size > MAX_REFCOUNT {
+ // remove `unsafe` on bootstrap bump
+ #[cfg_attr(not(bootstrap), allow(unused_unsafe))]
unsafe {
abort();
}
// As with `get_mut()`, the unsafety is ok because our reference was
// either unique to begin with, or became one upon cloning the contents.
- unsafe { &mut this.ptr.as_mut().data }
+ unsafe { Self::get_mut_unchecked(this) }
}
}
#[inline]
#[unstable(feature = "get_mut_unchecked", issue = "63292")]
pub unsafe fn get_mut_unchecked(this: &mut Self) -> &mut T {
- &mut this.ptr.as_mut().data
+ // We are careful to *not* create a reference covering the "count" fields, as
+ // this would alias with concurrent access to the reference counts (e.g. by `Weak`).
+ &mut (*this.ptr.as_ptr()).data
}
/// Determine whether this is the unique reference (including weak refs) to
/// # Examples
///
/// ```
- /// #![feature(weak_into_raw)]
- ///
/// use std::sync::Arc;
/// use std::ptr;
///
/// ```
///
/// [`null`]: ../../std/ptr/fn.null.html
- #[unstable(feature = "weak_into_raw", issue = "60728")]
+ #[stable(feature = "weak_into_raw", since = "1.45.0")]
pub fn as_ptr(&self) -> *const T {
let offset = data_offset_sized::<T>();
let ptr = self.ptr.cast::<u8>().as_ptr().wrapping_offset(offset);
/// # Examples
///
/// ```
- /// #![feature(weak_into_raw)]
- ///
/// use std::sync::{Arc, Weak};
///
/// let strong = Arc::new("hello".to_owned());
///
/// [`from_raw`]: struct.Weak.html#method.from_raw
/// [`as_ptr`]: struct.Weak.html#method.as_ptr
- #[unstable(feature = "weak_into_raw", issue = "60728")]
+ #[stable(feature = "weak_into_raw", since = "1.45.0")]
pub fn into_raw(self) -> *const T {
let result = self.as_ptr();
mem::forget(self);
/// # Examples
///
/// ```
- /// #![feature(weak_into_raw)]
- ///
/// use std::sync::{Arc, Weak};
///
/// let strong = Arc::new("hello".to_owned());
/// [`Weak`]: struct.Weak.html
/// [`Arc`]: struct.Arc.html
/// [`forget`]: ../../std/mem/fn.forget.html
- #[unstable(feature = "weak_into_raw", issue = "60728")]
+ #[stable(feature = "weak_into_raw", since = "1.45.0")]
pub unsafe fn from_raw(ptr: *const T) -> Self {
if ptr.is_null() {
Self::new()
}
}
+/// Helper type to allow accessing the reference counts without
+/// making any assertions about the data field.
+struct WeakInner<'a> {
+ weak: &'a atomic::AtomicUsize,
+ strong: &'a atomic::AtomicUsize,
+}
+
impl<T: ?Sized> Weak<T> {
/// Attempts to upgrade the `Weak` pointer to an [`Arc`], delaying
/// dropping of the inner value if successful.
// See comments in `Arc::clone` for why we do this (for `mem::forget`).
if n > MAX_REFCOUNT {
+ // remove `unsafe` on bootstrap bump
+ #[cfg_attr(not(bootstrap), allow(unused_unsafe))]
unsafe {
abort();
}
/// Returns `None` when the pointer is dangling and there is no allocated `ArcInner`,
/// (i.e., when this `Weak` was created by `Weak::new`).
#[inline]
- fn inner(&self) -> Option<&ArcInner<T>> {
- if is_dangling(self.ptr) { None } else { Some(unsafe { self.ptr.as_ref() }) }
+ fn inner(&self) -> Option<WeakInner<'_>> {
+ if is_dangling(self.ptr) {
+ None
+ } else {
+ // We are careful to *not* create a reference covering the "data" field, as
+ // the field may be mutated concurrently (for example, if the last `Arc`
+ // is dropped, the data field will be dropped in-place).
+ Some(unsafe {
+ let ptr = self.ptr.as_ptr();
+ WeakInner { strong: &(*ptr).strong, weak: &(*ptr).weak }
+ })
+ }
}
/// Returns `true` if the two `Weak`s point to the same allocation (similar to
// See comments in Arc::clone() for why we do this (for mem::forget).
if old_size > MAX_REFCOUNT {
+ #[cfg_attr(not(bootstrap), allow(unused_unsafe))] // remove `unsafe` on bootstrap bump
unsafe {
abort();
}
///
/// We can only do this when `T: Eq` as a `PartialEq` might be deliberately irreflexive.
#[stable(feature = "rust1", since = "1.0.0")]
-impl<T: ?Sized + Eq> ArcEqIdent<T> for Arc<T> {
+impl<T: ?Sized + crate::rc::MarkerEq> ArcEqIdent<T> for Arc<T> {
#[inline]
fn eq(&self, other: &Arc<T>) -> bool {
Arc::ptr_eq(self, other) || **self == **other
}
}
+#[stable(feature = "shared_from_cow", since = "1.45.0")]
+impl<'a, B> From<Cow<'a, B>> for Arc<B>
+where
+ B: ToOwned + ?Sized,
+ Arc<B>: From<&'a B> + From<B::Owned>,
+{
+ #[inline]
+ fn from(cow: Cow<'a, B>) -> Arc<B> {
+ match cow {
+ Cow::Borrowed(s) => Arc::from(s),
+ Cow::Owned(s) => Arc::from(s),
+ }
+ }
+}
+
#[stable(feature = "boxed_slice_try_from", since = "1.43.0")]
impl<T, const N: usize> TryFrom<Arc<[T]>> for Arc<[T; N]>
where
/// # assert_eq!(&*evens, &*(0..10).collect::<Vec<_>>());
/// ```
fn from_iter<I: iter::IntoIterator<Item = T>>(iter: I) -> Self {
- ArcFromIter::from_iter(iter.into_iter())
+ ToArcSlice::to_arc_slice(iter.into_iter())
}
}
/// Specialization trait used for collecting into `Arc<[T]>`.
-trait ArcFromIter<T, I> {
- fn from_iter(iter: I) -> Self;
+trait ToArcSlice<T>: Iterator<Item = T> + Sized {
+ fn to_arc_slice(self) -> Arc<[T]>;
}
-impl<T, I: Iterator<Item = T>> ArcFromIter<T, I> for Arc<[T]> {
- default fn from_iter(iter: I) -> Self {
- iter.collect::<Vec<T>>().into()
+impl<T, I: Iterator<Item = T>> ToArcSlice<T> for I {
+ default fn to_arc_slice(self) -> Arc<[T]> {
+ self.collect::<Vec<T>>().into()
}
}
-impl<T, I: iter::TrustedLen<Item = T>> ArcFromIter<T, I> for Arc<[T]> {
- default fn from_iter(iter: I) -> Self {
+impl<T, I: iter::TrustedLen<Item = T>> ToArcSlice<T> for I {
+ fn to_arc_slice(self) -> Arc<[T]> {
// This is the case for a `TrustedLen` iterator.
- let (low, high) = iter.size_hint();
+ let (low, high) = self.size_hint();
if let Some(high) = high {
debug_assert_eq!(
low,
unsafe {
// SAFETY: We need to ensure that the iterator has an exact length and we have.
- Arc::from_iter_exact(iter, low)
+ Arc::from_iter_exact(self, low)
}
} else {
// Fall back to normal implementation.
- iter.collect::<Vec<T>>().into()
+ self.collect::<Vec<T>>().into()
}
}
}
-impl<'a, T: 'a + Clone> ArcFromIter<&'a T, slice::Iter<'a, T>> for Arc<[T]> {
- fn from_iter(iter: slice::Iter<'a, T>) -> Self {
- // Delegate to `impl<T: Clone> From<&[T]> for Arc<[T]>`.
- //
- // In the case that `T: Copy`, we get to use `ptr::copy_nonoverlapping`
- // which is even more performant.
- //
- // In the fall-back case we have `T: Clone`. This is still better
- // than the `TrustedLen` implementation as slices have a known length
- // and so we get to avoid calling `size_hint` and avoid the branching.
- iter.as_slice().into()
- }
-}
-
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: ?Sized> borrow::Borrow<T> for Arc<T> {
fn borrow(&self) -> &T {