/// use std::thread;
///
/// fn main() {
-/// let numbers: Vec<_> = (0..100u32).collect();
+/// let numbers: Vec<_> = (0..100).collect();
/// let shared_numbers = Arc::new(numbers);
///
/// for _ in 0..10 {
#[unsafe_no_drop_flag]
#[stable(feature = "rust1", since = "1.0.0")]
pub struct Arc<T: ?Sized> {
- // FIXME #12808: strange name to try to avoid interfering with
- // field accesses of the contained type via Deref
- _ptr: Shared<ArcInner<T>>,
+ ptr: Shared<ArcInner<T>>,
}
#[stable(feature = "rust1", since = "1.0.0")]
#[unsafe_no_drop_flag]
#[stable(feature = "arc_weak", since = "1.4.0")]
pub struct Weak<T: ?Sized> {
- // FIXME #12808: strange name to try to avoid interfering with
- // field accesses of the contained type via Deref
- _ptr: Shared<ArcInner<T>>,
+ ptr: Shared<ArcInner<T>>,
}
#[stable(feature = "arc_weak", since = "1.4.0")]
weak: atomic::AtomicUsize::new(1),
data: data,
};
- Arc { _ptr: unsafe { Shared::new(Box::into_raw(x)) } }
+ Arc { ptr: unsafe { Shared::new(Box::into_raw(x)) } }
}
- /// Unwraps the contained value if the `Arc<T>` has only one strong reference.
- /// This will succeed even if there are outstanding weak references.
+ /// Unwraps the contained value if the `Arc<T>` has exactly one strong reference.
///
/// Otherwise, an `Err` is returned with the same `Arc<T>`.
///
+ /// This will succeed even if there are outstanding weak references.
+ ///
/// # Examples
///
/// ```
#[stable(feature = "arc_unique", since = "1.4.0")]
pub fn try_unwrap(this: Self) -> Result<T, Self> {
// See `drop` for why all these atomics are like this
- if this.inner().strong.compare_and_swap(1, 0, Release) != 1 {
+ if this.inner().strong.compare_exchange(1, 0, Release, Relaxed).is_err() {
return Err(this);
}
atomic::fence(Acquire);
unsafe {
- let ptr = *this._ptr;
+ let ptr = *this.ptr;
let elem = ptr::read(&(*ptr).data);
// Make a weak pointer to clean up the implicit strong-weak reference
- let _weak = Weak { _ptr: this._ptr };
+ let _weak = Weak { ptr: this.ptr };
mem::forget(this);
Ok(elem)
/// ```
#[stable(feature = "arc_weak", since = "1.4.0")]
pub fn downgrade(this: &Self) -> Weak<T> {
- loop {
- // This Relaxed is OK because we're checking the value in the CAS
- // below.
- let cur = this.inner().weak.load(Relaxed);
+ // This Relaxed is OK because we're checking the value in the CAS
+ // below.
+ let mut cur = this.inner().weak.load(Relaxed);
+ loop {
// check if the weak counter is currently "locked"; if so, spin.
if cur == usize::MAX {
+ cur = this.inner().weak.load(Relaxed);
continue;
}
// Unlike with Clone(), we need this to be an Acquire read to
// synchronize with the write coming from `is_unique`, so that the
// events prior to that write happen before this read.
- if this.inner().weak.compare_and_swap(cur, cur + 1, Acquire) == cur {
- return Weak { _ptr: this._ptr };
+ match this.inner().weak.compare_exchange_weak(cur, cur + 1, Acquire, Relaxed) {
+ Ok(_) => return Weak { ptr: this.ptr },
+ Err(old) => cur = old,
}
}
}
// `ArcInner` structure itself is `Sync` because the inner data is
// `Sync` as well, so we're ok loaning out an immutable pointer to these
// contents.
- unsafe { &**self._ptr }
+ unsafe { &**self.ptr }
}
// Non-inlined part of `drop`.
#[inline(never)]
unsafe fn drop_slow(&mut self) {
- let ptr = *self._ptr;
+ let ptr = *self.ptr;
// Destroy the data at this time, even though we may not free the box
// allocation itself (there may still be weak pointers lying around).
}
}
- Arc { _ptr: self._ptr }
+ Arc { ptr: self.ptr }
}
}
// before release writes (i.e., decrements) to `strong`. Since we hold a
// weak count, there's no chance the ArcInner itself could be
// deallocated.
- if this.inner().strong.compare_and_swap(1, 0, Acquire) != 1 {
+ if this.inner().strong.compare_exchange(1, 0, Acquire, Relaxed).is_err() {
// Another strong pointer exists; clone
*this = Arc::new((**this).clone());
} else if this.inner().weak.load(Relaxed) != 1 {
// Materialize our own implicit weak pointer, so that it can clean
// up the ArcInner as needed.
- let weak = Weak { _ptr: this._ptr };
+ let weak = Weak { ptr: this.ptr };
// mark the data itself as already deallocated
unsafe {
// here (due to zeroing) because data is no longer accessed by
// other threads (due to there being no more strong refs at this
// point).
- let mut swap = Arc::new(ptr::read(&(**weak._ptr).data));
+ let mut swap = Arc::new(ptr::read(&(**weak.ptr).data));
mem::swap(this, &mut swap);
mem::forget(swap);
}
// As with `get_mut()`, the unsafety is ok because our reference was
// either unique to begin with, or became one upon cloning the contents.
unsafe {
- let inner = &mut **this._ptr;
+ let inner = &mut **this.ptr;
&mut inner.data
}
}
// the Arc itself to be `mut`, so we're returning the only possible
// reference to the inner data.
unsafe {
- let inner = &mut **this._ptr;
+ let inner = &mut **this.ptr;
Some(&mut inner.data)
}
} else {
// The acquire label here ensures a happens-before relationship with any
// writes to `strong` prior to decrements of the `weak` count (via drop,
// which uses Release).
- if self.inner().weak.compare_and_swap(1, usize::MAX, Acquire) == 1 {
+ if self.inner().weak.compare_exchange(1, usize::MAX, Acquire, Relaxed).is_ok() {
// Due to the previous acquire read, this will observe any writes to
// `strong` that were due to upgrading weak pointers; only strong
// clones remain, which require that the strong count is > 1 anyway.
// This structure has #[unsafe_no_drop_flag], so this drop glue may run
// more than once (but it is guaranteed to be zeroed after the first if
// it's run more than once)
- let thin = *self._ptr as *const ();
+ let thin = *self.ptr as *const ();
if thin as usize == mem::POST_DROP_USIZE {
return;
// We use a CAS loop to increment the strong count instead of a
// fetch_add because once the count hits 0 it must never be above 0.
let inner = self.inner();
+
+ // Relaxed load because any write of 0 that we can observe
+ // leaves the field in a permanently zero state (so a
+ // "stale" read of 0 is fine), and any other value is
+ // confirmed via the CAS below.
+ let mut n = inner.strong.load(Relaxed);
+
loop {
- // Relaxed load because any write of 0 that we can observe
- // leaves the field in a permanently zero state (so a
- // "stale" read of 0 is fine), and any other value is
- // confirmed via the CAS below.
- let n = inner.strong.load(Relaxed);
if n == 0 {
return None;
}
}
// Relaxed is valid for the same reason it is on Arc's Clone impl
- let old = inner.strong.compare_and_swap(n, n + 1, Relaxed);
- if old == n {
- return Some(Arc { _ptr: self._ptr });
+ match inner.strong.compare_exchange_weak(n, n + 1, Relaxed, Relaxed) {
+ Ok(_) => return Some(Arc { ptr: self.ptr }),
+ Err(old) => n = old,
}
}
}
#[inline]
fn inner(&self) -> &ArcInner<T> {
// See comments above for why this is "safe"
- unsafe { &**self._ptr }
+ unsafe { &**self.ptr }
}
}
}
}
- return Weak { _ptr: self._ptr };
+ return Weak { ptr: self.ptr };
}
}
/// } // implicit drop
/// ```
fn drop(&mut self) {
- let ptr = *self._ptr;
+ let ptr = *self.ptr;
let thin = ptr as *const ();
// see comments above for why this check is here
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: ?Sized> fmt::Pointer for Arc<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- fmt::Pointer::fmt(&*self._ptr, f)
+ fmt::Pointer::fmt(&*self.ptr, f)
}
}
issue = "30425")]
pub fn new() -> Weak<T> {
unsafe {
- Weak { _ptr: Shared::new(Box::into_raw(box ArcInner {
+ Weak { ptr: Shared::new(Box::into_raw(box ArcInner {
strong: atomic::AtomicUsize::new(0),
weak: atomic::AtomicUsize::new(1),
data: uninitialized(),
#[test]
fn test_strong_count() {
- let a = Arc::new(0u32);
+ let a = Arc::new(0);
assert!(Arc::strong_count(&a) == 1);
let w = Arc::downgrade(&a);
assert!(Arc::strong_count(&a) == 1);
#[test]
fn test_weak_count() {
- let a = Arc::new(0u32);
+ let a = Arc::new(0);
assert!(Arc::strong_count(&a) == 1);
assert!(Arc::weak_count(&a) == 0);
let w = Arc::downgrade(&a);
#[test]
fn show_arc() {
- let a = Arc::new(5u32);
+ let a = Arc::new(5);
assert_eq!(format!("{:?}", a), "5");
}