]> git.proxmox.com Git - rustc.git/blame - src/liballoc/sync.rs
New upstream version 1.44.1+dfsg1
[rustc.git] / src / liballoc / sync.rs
CommitLineData
85aaf69f 1#![stable(feature = "rust1", since = "1.0.0")]
1a4d82fc 2
c30ab7b3 3//! Thread-safe reference-counting pointers.
1a4d82fc 4//!
c30ab7b3 5//! See the [`Arc<T>`][arc] documentation for more details.
1a4d82fc 6//!
c30ab7b3 7//! [arc]: struct.Arc.html
1a4d82fc 8
94b46f34 9use core::any::Any;
416331ca 10use core::array::LengthAtMost32;
e9174d1e 11use core::borrow;
60c5eb7d 12use core::cmp::Ordering;
dfeec247
XL
13use core::convert::{From, TryFrom};
14use core::fmt;
15use core::hash::{Hash, Hasher};
92a42be0 16use core::intrinsics::abort;
dfeec247
XL
17use core::iter;
18use core::marker::{PhantomData, Unpin, Unsize};
dc9dc135 19use core::mem::{self, align_of, align_of_val, size_of_val};
dfeec247 20use core::ops::{CoerceUnsized, Deref, DispatchFromDyn, Receiver};
0bf4aa26 21use core::pin::Pin;
2c00a5a8 22use core::ptr::{self, NonNull};
416331ca 23use core::slice::{self, from_raw_parts_mut};
dfeec247
XL
24use core::sync::atomic;
25use core::sync::atomic::Ordering::{Acquire, Relaxed, Release, SeqCst};
041b39d2 26
ba9703b0 27use crate::alloc::{box_free, handle_alloc_error, AllocInit, AllocRef, Global, Layout};
9fa01778
XL
28use crate::boxed::Box;
29use crate::rc::is_dangling;
30use crate::string::String;
31use crate::vec::Vec;
1a4d82fc 32
416331ca
XL
33#[cfg(test)]
34mod tests;
35
c30ab7b3
SL
36/// A soft limit on the amount of references that may be made to an `Arc`.
37///
38/// Going above this limit will abort your program (although not
39/// necessarily) at _exactly_ `MAX_REFCOUNT + 1` references.
c1a9b12d
SL
40const MAX_REFCOUNT: usize = (isize::MAX) as usize;
41
ba9703b0
XL
42#[cfg(not(sanitize = "thread"))]
43macro_rules! acquire {
44 ($x:expr) => {
45 atomic::fence(Acquire)
46 };
47}
48
49// ThreadSanitizer does not support memory fences. To avoid false positive
50// reports in Arc / Weak implementation use atomic loads for synchronization
51// instead.
52#[cfg(sanitize = "thread")]
53macro_rules! acquire {
54 ($x:expr) => {
55 $x.load(Acquire)
56 };
57}
58
041b39d2
XL
59/// A thread-safe reference-counting pointer. 'Arc' stands for 'Atomically
60/// Reference Counted'.
1a4d82fc 61///
c30ab7b3
SL
62/// The type `Arc<T>` provides shared ownership of a value of type `T`,
63/// allocated in the heap. Invoking [`clone`][clone] on `Arc` produces
e74abb32 64/// a new `Arc` instance, which points to the same allocation on the heap as the
b7449926 65/// source `Arc`, while increasing a reference count. When the last `Arc`
e74abb32
XL
66/// pointer to a given allocation is destroyed, the value stored in that allocation (often
67/// referred to as "inner value") is also dropped.
1a4d82fc 68///
c30ab7b3 69/// Shared references in Rust disallow mutation by default, and `Arc` is no
ea8adc8c
XL
70/// exception: you cannot generally obtain a mutable reference to something
71/// inside an `Arc`. If you need to mutate through an `Arc`, use
72/// [`Mutex`][mutex], [`RwLock`][rwlock], or one of the [`Atomic`][atomic]
73/// types.
9e0c209e 74///
7cac9316
XL
75/// ## Thread Safety
76///
77/// Unlike [`Rc<T>`], `Arc<T>` uses atomic operations for its reference
83c7162d 78/// counting. This means that it is thread-safe. The disadvantage is that
7cac9316 79/// atomic operations are more expensive than ordinary memory accesses. If you
e74abb32 80/// are not sharing reference-counted allocations between threads, consider using
7cac9316
XL
81/// [`Rc<T>`] for lower overhead. [`Rc<T>`] is a safe default, because the
82/// compiler will catch any attempt to send an [`Rc<T>`] between threads.
83/// However, a library might choose `Arc<T>` in order to give library consumers
c30ab7b3 84/// more flexibility.
1a4d82fc 85///
7cac9316
XL
86/// `Arc<T>` will implement [`Send`] and [`Sync`] as long as the `T` implements
87/// [`Send`] and [`Sync`]. Why can't you put a non-thread-safe type `T` in an
88/// `Arc<T>` to make it thread-safe? This may be a bit counter-intuitive at
89/// first: after all, isn't the point of `Arc<T>` thread safety? The key is
90/// this: `Arc<T>` makes it thread safe to have multiple ownership of the same
91/// data, but it doesn't add thread safety to its data. Consider
ea8adc8c
XL
92/// `Arc<`[`RefCell<T>`]`>`. [`RefCell<T>`] isn't [`Sync`], and if `Arc<T>` was always
93/// [`Send`], `Arc<`[`RefCell<T>`]`>` would be as well. But then we'd have a problem:
94/// [`RefCell<T>`] is not thread safe; it keeps track of the borrowing count using
7cac9316
XL
95/// non-atomic operations.
96///
97/// In the end, this means that you may need to pair `Arc<T>` with some sort of
ea8adc8c 98/// [`std::sync`] type, usually [`Mutex<T>`][mutex].
7cac9316
XL
99///
100/// ## Breaking cycles with `Weak`
101///
c30ab7b3 102/// The [`downgrade`][downgrade] method can be used to create a non-owning
32a655c1 103/// [`Weak`][weak] pointer. A [`Weak`][weak] pointer can be [`upgrade`][upgrade]d
e74abb32
XL
104/// to an `Arc`, but this will return [`None`] if the value stored in the allocation has
105/// already been dropped. In other words, `Weak` pointers do not keep the value
106/// inside the allocation alive; however, they *do* keep the allocation
107/// (the backing store for the value) alive.
c30ab7b3
SL
108///
109/// A cycle between `Arc` pointers will never be deallocated. For this reason,
32a655c1
SL
110/// [`Weak`][weak] is used to break cycles. For example, a tree could have
111/// strong `Arc` pointers from parent nodes to children, and [`Weak`][weak]
112/// pointers from children back to their parents.
c30ab7b3 113///
7cac9316
XL
114/// # Cloning references
115///
116/// Creating a new reference from an existing reference counted pointer is done using the
3b2f2976 117/// `Clone` trait implemented for [`Arc<T>`][arc] and [`Weak<T>`][weak].
7cac9316
XL
118///
119/// ```
120/// use std::sync::Arc;
121/// let foo = Arc::new(vec![1.0, 2.0, 3.0]);
122/// // The two syntaxes below are equivalent.
123/// let a = foo.clone();
124/// let b = Arc::clone(&foo);
b7449926 125/// // a, b, and foo are all Arcs that point to the same memory location
7cac9316
XL
126/// ```
127///
7cac9316
XL
128/// ## `Deref` behavior
129///
c30ab7b3
SL
130/// `Arc<T>` automatically dereferences to `T` (via the [`Deref`][deref] trait),
131/// so you can call `T`'s methods on a value of type `Arc<T>`. To avoid name
13cf67c4
XL
132/// clashes with `T`'s methods, the methods of `Arc<T>` itself are associated
133/// functions, called using function-like syntax:
c34b1796
AL
134///
135/// ```
1a4d82fc 136/// use std::sync::Arc;
c30ab7b3 137/// let my_arc = Arc::new(());
1a4d82fc 138///
c30ab7b3
SL
139/// Arc::downgrade(&my_arc);
140/// ```
1a4d82fc 141///
e74abb32
XL
142/// [`Weak<T>`][weak] does not auto-dereference to `T`, because the inner value may have
143/// already been dropped.
1a4d82fc 144///
c30ab7b3
SL
145/// [arc]: struct.Arc.html
146/// [weak]: struct.Weak.html
7cac9316 147/// [`Rc<T>`]: ../../std/rc/struct.Rc.html
c30ab7b3
SL
148/// [clone]: ../../std/clone/trait.Clone.html#tymethod.clone
149/// [mutex]: ../../std/sync/struct.Mutex.html
150/// [rwlock]: ../../std/sync/struct.RwLock.html
151/// [atomic]: ../../std/sync/atomic/index.html
32a655c1 152/// [`Send`]: ../../std/marker/trait.Send.html
7cac9316 153/// [`Sync`]: ../../std/marker/trait.Sync.html
c30ab7b3
SL
154/// [deref]: ../../std/ops/trait.Deref.html
155/// [downgrade]: struct.Arc.html#method.downgrade
156/// [upgrade]: struct.Weak.html#method.upgrade
32a655c1 157/// [`None`]: ../../std/option/enum.Option.html#variant.None
ea8adc8c
XL
158/// [`RefCell<T>`]: ../../std/cell/struct.RefCell.html
159/// [`std::sync`]: ../../std/sync/index.html
160/// [`Arc::clone(&from)`]: #method.clone
1a4d82fc 161///
c30ab7b3 162/// # Examples
5bcae85e 163///
c30ab7b3
SL
164/// Sharing some immutable data between threads:
165///
166// Note that we **do not** run these tests here. The windows builders get super
167// unhappy if a thread outlives the main thread and then exits at the same time
168// (something deadlocks) so we just avoid this entirely by not running these
169// tests.
5bcae85e 170/// ```no_run
c30ab7b3 171/// use std::sync::Arc;
5bcae85e
SL
172/// use std::thread;
173///
c30ab7b3 174/// let five = Arc::new(5);
5bcae85e
SL
175///
176/// for _ in 0..10 {
7cac9316 177/// let five = Arc::clone(&five);
5bcae85e
SL
178///
179/// thread::spawn(move || {
c30ab7b3
SL
180/// println!("{:?}", five);
181/// });
182/// }
183/// ```
5bcae85e 184///
32a655c1
SL
185/// Sharing a mutable [`AtomicUsize`]:
186///
187/// [`AtomicUsize`]: ../../std/sync/atomic/struct.AtomicUsize.html
5bcae85e 188///
c30ab7b3
SL
189/// ```no_run
190/// use std::sync::Arc;
191/// use std::sync::atomic::{AtomicUsize, Ordering};
192/// use std::thread;
193///
194/// let val = Arc::new(AtomicUsize::new(5));
195///
196/// for _ in 0..10 {
7cac9316 197/// let val = Arc::clone(&val);
c30ab7b3
SL
198///
199/// thread::spawn(move || {
200/// let v = val.fetch_add(1, Ordering::SeqCst);
201/// println!("{:?}", v);
5bcae85e
SL
202/// });
203/// }
204/// ```
c30ab7b3
SL
205///
206/// See the [`rc` documentation][rc_examples] for more examples of reference
207/// counting in general.
208///
209/// [rc_examples]: ../../std/rc/index.html#examples
ba9703b0
XL
210#[cfg_attr(all(bootstrap, not(test)), lang = "arc")]
211#[cfg_attr(not(test), rustc_diagnostic_item = "Arc")]
85aaf69f 212#[stable(feature = "rust1", since = "1.0.0")]
62682a34 213pub struct Arc<T: ?Sized> {
2c00a5a8 214 ptr: NonNull<ArcInner<T>>,
60c5eb7d 215 phantom: PhantomData<ArcInner<T>>,
1a4d82fc
JJ
216}
217
92a42be0
SL
218#[stable(feature = "rust1", since = "1.0.0")]
219unsafe impl<T: ?Sized + Sync + Send> Send for Arc<T> {}
220#[stable(feature = "rust1", since = "1.0.0")]
221unsafe impl<T: ?Sized + Sync + Send> Sync for Arc<T> {}
1a4d82fc 222
92a42be0 223#[unstable(feature = "coerce_unsized", issue = "27732")]
62682a34 224impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<Arc<U>> for Arc<T> {}
1a4d82fc 225
dfeec247 226#[unstable(feature = "dispatch_from_dyn", issue = "none")]
a1dfa0c6
XL
227impl<T: ?Sized + Unsize<U>, U: ?Sized> DispatchFromDyn<Arc<U>> for Arc<T> {}
228
416331ca
XL
229impl<T: ?Sized> Arc<T> {
230 fn from_inner(ptr: NonNull<ArcInner<T>>) -> Self {
dfeec247 231 Self { ptr, phantom: PhantomData }
416331ca
XL
232 }
233
234 unsafe fn from_ptr(ptr: *mut ArcInner<T>) -> Self {
235 Self::from_inner(NonNull::new_unchecked(ptr))
236 }
237}
238
cc61c64b 239/// `Weak` is a version of [`Arc`] that holds a non-owning reference to the
e74abb32 240/// managed allocation. The allocation is accessed by calling [`upgrade`] on the `Weak`
cc61c64b 241/// pointer, which returns an [`Option`]`<`[`Arc`]`<T>>`.
1a4d82fc 242///
cc61c64b 243/// Since a `Weak` reference does not count towards ownership, it will not
e74abb32
XL
244/// prevent the value stored in the allocation from being dropped, and `Weak` itself makes no
245/// guarantees about the value still being present. Thus it may return [`None`]
246/// when [`upgrade`]d. Note however that a `Weak` reference *does* prevent the allocation
247/// itself (the backing store) from being deallocated.
5bcae85e 248///
e74abb32
XL
249/// A `Weak` pointer is useful for keeping a temporary reference to the allocation
250/// managed by [`Arc`] without preventing its inner value from being dropped. It is also used to
251/// prevent circular references between [`Arc`] pointers, since mutual owning references
cc61c64b
XL
252/// would never allow either [`Arc`] to be dropped. For example, a tree could
253/// have strong [`Arc`] pointers from parent nodes to children, and `Weak`
254/// pointers from children back to their parents.
5bcae85e 255///
cc61c64b 256/// The typical way to obtain a `Weak` pointer is to call [`Arc::downgrade`].
c30ab7b3 257///
cc61c64b
XL
258/// [`Arc`]: struct.Arc.html
259/// [`Arc::downgrade`]: struct.Arc.html#method.downgrade
260/// [`upgrade`]: struct.Weak.html#method.upgrade
261/// [`Option`]: ../../std/option/enum.Option.html
262/// [`None`]: ../../std/option/enum.Option.html#variant.None
e9174d1e 263#[stable(feature = "arc_weak", since = "1.4.0")]
62682a34 264pub struct Weak<T: ?Sized> {
8faf50e0
XL
265 // This is a `NonNull` to allow optimizing the size of this type in enums,
266 // but it is not necessarily a valid pointer.
267 // `Weak::new` sets this to `usize::MAX` so that it doesn’t need
268 // to allocate space on the heap. That's not a value a real pointer
269 // will ever have because RcBox has alignment at least 2.
2c00a5a8 270 ptr: NonNull<ArcInner<T>>,
1a4d82fc
JJ
271}
272
7453a54e 273#[stable(feature = "arc_weak", since = "1.4.0")]
92a42be0 274unsafe impl<T: ?Sized + Sync + Send> Send for Weak<T> {}
7453a54e 275#[stable(feature = "arc_weak", since = "1.4.0")]
92a42be0 276unsafe impl<T: ?Sized + Sync + Send> Sync for Weak<T> {}
1a4d82fc 277
92a42be0 278#[unstable(feature = "coerce_unsized", issue = "27732")]
c1a9b12d 279impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<Weak<U>> for Weak<T> {}
dfeec247 280#[unstable(feature = "dispatch_from_dyn", issue = "none")]
a1dfa0c6 281impl<T: ?Sized + Unsize<U>, U: ?Sized> DispatchFromDyn<Weak<U>> for Weak<T> {}
c1a9b12d 282
7453a54e 283#[stable(feature = "arc_weak", since = "1.4.0")]
62682a34 284impl<T: ?Sized + fmt::Debug> fmt::Debug for Weak<T> {
9fa01778 285 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
c34b1796
AL
286 write!(f, "(Weak)")
287 }
288}
289
ba9703b0
XL
290// This is repr(C) to future-proof against possible field-reordering, which
291// would interfere with otherwise safe [into|from]_raw() of transmutable
292// inner types.
293#[repr(C)]
62682a34 294struct ArcInner<T: ?Sized> {
85aaf69f 295 strong: atomic::AtomicUsize,
c1a9b12d
SL
296
297 // the value usize::MAX acts as a sentinel for temporarily "locking" the
298 // ability to upgrade weak pointers or downgrade strong ones; this is used
e9174d1e 299 // to avoid races in `make_mut` and `get_mut`.
85aaf69f 300 weak: atomic::AtomicUsize,
c1a9b12d 301
1a4d82fc
JJ
302 data: T,
303}
304
62682a34
SL
305unsafe impl<T: ?Sized + Sync + Send> Send for ArcInner<T> {}
306unsafe impl<T: ?Sized + Sync + Send> Sync for ArcInner<T> {}
1a4d82fc
JJ
307
308impl<T> Arc<T> {
309 /// Constructs a new `Arc<T>`.
310 ///
311 /// # Examples
312 ///
313 /// ```
314 /// use std::sync::Arc;
315 ///
85aaf69f 316 /// let five = Arc::new(5);
1a4d82fc
JJ
317 /// ```
318 #[inline]
85aaf69f 319 #[stable(feature = "rust1", since = "1.0.0")]
1a4d82fc
JJ
320 pub fn new(data: T) -> Arc<T> {
321 // Start the weak pointer count as 1 which is the weak pointer that's
322 // held by all the strong pointers (kinda), see std/rc.rs for more info
c34b1796 323 let x: Box<_> = box ArcInner {
85aaf69f
SL
324 strong: atomic::AtomicUsize::new(1),
325 weak: atomic::AtomicUsize::new(1),
3b2f2976 326 data,
1a4d82fc 327 };
416331ca 328 Self::from_inner(Box::into_raw_non_null(x))
e9174d1e
SL
329 }
330
e1599b0c
XL
331 /// Constructs a new `Arc` with uninitialized contents.
332 ///
333 /// # Examples
334 ///
335 /// ```
336 /// #![feature(new_uninit)]
337 /// #![feature(get_mut_unchecked)]
338 ///
339 /// use std::sync::Arc;
340 ///
341 /// let mut five = Arc::<u32>::new_uninit();
342 ///
343 /// let five = unsafe {
344 /// // Deferred initialization:
345 /// Arc::get_mut_unchecked(&mut five).as_mut_ptr().write(5);
346 ///
347 /// five.assume_init()
348 /// };
349 ///
350 /// assert_eq!(*five, 5)
351 /// ```
352 #[unstable(feature = "new_uninit", issue = "63291")]
353 pub fn new_uninit() -> Arc<mem::MaybeUninit<T>> {
354 unsafe {
dfeec247
XL
355 Arc::from_ptr(Arc::allocate_for_layout(Layout::new::<T>(), |mem| {
356 mem as *mut ArcInner<mem::MaybeUninit<T>>
357 }))
e1599b0c
XL
358 }
359 }
360
60c5eb7d
XL
361 /// Constructs a new `Arc` with uninitialized contents, with the memory
362 /// being filled with `0` bytes.
363 ///
364 /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and incorrect usage
365 /// of this method.
366 ///
367 /// # Examples
368 ///
369 /// ```
370 /// #![feature(new_uninit)]
371 ///
372 /// use std::sync::Arc;
373 ///
374 /// let zero = Arc::<u32>::new_zeroed();
375 /// let zero = unsafe { zero.assume_init() };
376 ///
377 /// assert_eq!(*zero, 0)
378 /// ```
379 ///
380 /// [zeroed]: ../../std/mem/union.MaybeUninit.html#method.zeroed
381 #[unstable(feature = "new_uninit", issue = "63291")]
382 pub fn new_zeroed() -> Arc<mem::MaybeUninit<T>> {
383 unsafe {
384 let mut uninit = Self::new_uninit();
385 ptr::write_bytes::<T>(Arc::get_mut_unchecked(&mut uninit).as_mut_ptr(), 0, 1);
386 uninit
387 }
388 }
389
0731742a
XL
390 /// Constructs a new `Pin<Arc<T>>`. If `T` does not implement `Unpin`, then
391 /// `data` will be pinned in memory and unable to be moved.
392 #[stable(feature = "pin", since = "1.33.0")]
393 pub fn pin(data: T) -> Pin<Arc<T>> {
0bf4aa26
XL
394 unsafe { Pin::new_unchecked(Arc::new(data)) }
395 }
396
e74abb32 397 /// Returns the inner value, if the `Arc` has exactly one strong reference.
e9174d1e 398 ///
c30ab7b3
SL
399 /// Otherwise, an [`Err`][result] is returned with the same `Arc` that was
400 /// passed in.
e9174d1e 401 ///
54a0048b
SL
402 /// This will succeed even if there are outstanding weak references.
403 ///
c30ab7b3
SL
404 /// [result]: ../../std/result/enum.Result.html
405 ///
e9174d1e
SL
406 /// # Examples
407 ///
408 /// ```
409 /// use std::sync::Arc;
410 ///
411 /// let x = Arc::new(3);
412 /// assert_eq!(Arc::try_unwrap(x), Ok(3));
413 ///
414 /// let x = Arc::new(4);
7cac9316 415 /// let _y = Arc::clone(&x);
c30ab7b3 416 /// assert_eq!(*Arc::try_unwrap(x).unwrap_err(), 4);
e9174d1e
SL
417 /// ```
418 #[inline]
419 #[stable(feature = "arc_unique", since = "1.4.0")]
420 pub fn try_unwrap(this: Self) -> Result<T, Self> {
421 // See `drop` for why all these atomics are like this
54a0048b 422 if this.inner().strong.compare_exchange(1, 0, Release, Relaxed).is_err() {
92a42be0 423 return Err(this);
b039eaaf 424 }
e9174d1e 425
ba9703b0 426 acquire!(this.inner().strong);
e9174d1e
SL
427
428 unsafe {
7cac9316 429 let elem = ptr::read(&this.ptr.as_ref().data);
e9174d1e
SL
430
431 // Make a weak pointer to clean up the implicit strong-weak reference
54a0048b 432 let _weak = Weak { ptr: this.ptr };
e9174d1e
SL
433 mem::forget(this);
434
435 Ok(elem)
436 }
1a4d82fc 437 }
ea8adc8c 438}
476ff2be 439
e1599b0c
XL
440impl<T> Arc<[T]> {
441 /// Constructs a new reference-counted slice with uninitialized contents.
442 ///
443 /// # Examples
444 ///
445 /// ```
446 /// #![feature(new_uninit)]
447 /// #![feature(get_mut_unchecked)]
448 ///
449 /// use std::sync::Arc;
450 ///
451 /// let mut values = Arc::<[u32]>::new_uninit_slice(3);
452 ///
453 /// let values = unsafe {
454 /// // Deferred initialization:
455 /// Arc::get_mut_unchecked(&mut values)[0].as_mut_ptr().write(1);
456 /// Arc::get_mut_unchecked(&mut values)[1].as_mut_ptr().write(2);
457 /// Arc::get_mut_unchecked(&mut values)[2].as_mut_ptr().write(3);
458 ///
459 /// values.assume_init()
460 /// };
461 ///
462 /// assert_eq!(*values, [1, 2, 3])
463 /// ```
464 #[unstable(feature = "new_uninit", issue = "63291")]
465 pub fn new_uninit_slice(len: usize) -> Arc<[mem::MaybeUninit<T>]> {
dfeec247 466 unsafe { Arc::from_ptr(Arc::allocate_for_slice(len)) }
e1599b0c
XL
467 }
468}
469
470impl<T> Arc<mem::MaybeUninit<T>> {
471 /// Converts to `Arc<T>`.
472 ///
473 /// # Safety
474 ///
475 /// As with [`MaybeUninit::assume_init`],
e74abb32 476 /// it is up to the caller to guarantee that the inner value
e1599b0c
XL
477 /// really is in an initialized state.
478 /// Calling this when the content is not yet fully initialized
479 /// causes immediate undefined behavior.
480 ///
481 /// [`MaybeUninit::assume_init`]: ../../std/mem/union.MaybeUninit.html#method.assume_init
482 ///
483 /// # Examples
484 ///
485 /// ```
486 /// #![feature(new_uninit)]
487 /// #![feature(get_mut_unchecked)]
488 ///
489 /// use std::sync::Arc;
490 ///
491 /// let mut five = Arc::<u32>::new_uninit();
492 ///
493 /// let five = unsafe {
494 /// // Deferred initialization:
495 /// Arc::get_mut_unchecked(&mut five).as_mut_ptr().write(5);
496 ///
497 /// five.assume_init()
498 /// };
499 ///
500 /// assert_eq!(*five, 5)
501 /// ```
502 #[unstable(feature = "new_uninit", issue = "63291")]
503 #[inline]
504 pub unsafe fn assume_init(self) -> Arc<T> {
505 Arc::from_inner(mem::ManuallyDrop::new(self).ptr.cast())
506 }
507}
508
509impl<T> Arc<[mem::MaybeUninit<T>]> {
510 /// Converts to `Arc<[T]>`.
511 ///
512 /// # Safety
513 ///
514 /// As with [`MaybeUninit::assume_init`],
e74abb32 515 /// it is up to the caller to guarantee that the inner value
e1599b0c
XL
516 /// really is in an initialized state.
517 /// Calling this when the content is not yet fully initialized
518 /// causes immediate undefined behavior.
519 ///
520 /// [`MaybeUninit::assume_init`]: ../../std/mem/union.MaybeUninit.html#method.assume_init
521 ///
522 /// # Examples
523 ///
524 /// ```
525 /// #![feature(new_uninit)]
526 /// #![feature(get_mut_unchecked)]
527 ///
528 /// use std::sync::Arc;
529 ///
530 /// let mut values = Arc::<[u32]>::new_uninit_slice(3);
531 ///
532 /// let values = unsafe {
533 /// // Deferred initialization:
534 /// Arc::get_mut_unchecked(&mut values)[0].as_mut_ptr().write(1);
535 /// Arc::get_mut_unchecked(&mut values)[1].as_mut_ptr().write(2);
536 /// Arc::get_mut_unchecked(&mut values)[2].as_mut_ptr().write(3);
537 ///
538 /// values.assume_init()
539 /// };
540 ///
541 /// assert_eq!(*values, [1, 2, 3])
542 /// ```
543 #[unstable(feature = "new_uninit", issue = "63291")]
544 #[inline]
545 pub unsafe fn assume_init(self) -> Arc<[T]> {
546 Arc::from_ptr(mem::ManuallyDrop::new(self).ptr.as_ptr() as _)
547 }
548}
549
ea8adc8c 550impl<T: ?Sized> Arc<T> {
476ff2be
SL
551 /// Consumes the `Arc`, returning the wrapped pointer.
552 ///
553 /// To avoid a memory leak the pointer must be converted back to an `Arc` using
554 /// [`Arc::from_raw`][from_raw].
555 ///
556 /// [from_raw]: struct.Arc.html#method.from_raw
557 ///
558 /// # Examples
559 ///
560 /// ```
476ff2be
SL
561 /// use std::sync::Arc;
562 ///
dc9dc135 563 /// let x = Arc::new("hello".to_owned());
476ff2be 564 /// let x_ptr = Arc::into_raw(x);
dc9dc135 565 /// assert_eq!(unsafe { &*x_ptr }, "hello");
476ff2be 566 /// ```
8bb4bdeb
XL
567 #[stable(feature = "rc_raw", since = "1.17.0")]
568 pub fn into_raw(this: Self) -> *const T {
ba9703b0
XL
569 let ptr = Self::as_ptr(&this);
570 mem::forget(this);
571 ptr
572 }
573
574 /// Provides a raw pointer to the data.
575 ///
576 /// The counts are not affected in way and the `Arc` is not consumed. The pointer is valid for
577 /// as long as there are strong counts in the `Arc`.
578 ///
579 /// # Examples
580 ///
581 /// ```
582 /// #![feature(weak_into_raw)]
583 ///
584 /// use std::sync::Arc;
585 ///
586 /// let x = Arc::new("hello".to_owned());
587 /// let y = Arc::clone(&x);
588 /// let x_ptr = Arc::as_ptr(&x);
589 /// assert_eq!(x_ptr, Arc::as_ptr(&y));
590 /// assert_eq!(unsafe { &*x_ptr }, "hello");
591 /// ```
592 #[unstable(feature = "weak_into_raw", issue = "60728")]
593 pub fn as_ptr(this: &Self) -> *const T {
dfeec247
XL
594 let ptr: *mut ArcInner<T> = NonNull::as_ptr(this.ptr);
595 let fake_ptr = ptr as *mut T;
dfeec247
XL
596
597 // SAFETY: This cannot go through Deref::deref.
598 // Instead, we manually offset the pointer rather than manifesting a reference.
599 // This is so that the returned pointer retains the same provenance as our pointer.
600 // This is required so that e.g. `get_mut` can write through the pointer
601 // after the Arc is recovered through `from_raw`.
602 unsafe {
603 let offset = data_offset(&(*ptr).data);
604 set_data_ptr(fake_ptr, (ptr as *mut u8).offset(offset))
605 }
476ff2be
SL
606 }
607
ba9703b0 608 /// Constructs an `Arc<T>` from a raw pointer.
476ff2be 609 ///
ba9703b0
XL
610 /// The raw pointer must have been previously returned by a call to
611 /// [`Arc<U>::into_raw`][into_raw] where `U` must have the same size and
612 /// alignment as `T`. This is trivially true if `U` is `T`.
613 /// Note that if `U` is not `T` but has the same size and alignment, this is
614 /// basically like transmuting references of different types. See
615 /// [`mem::transmute`][transmute] for more information on what
616 /// restrictions apply in this case.
476ff2be 617 ///
ba9703b0
XL
618 /// The user of `from_raw` has to make sure a specific value of `T` is only
619 /// dropped once.
620 ///
621 /// This function is unsafe because improper use may lead to memory unsafety,
622 /// even if the returned `Arc<T>` is never accessed.
476ff2be
SL
623 ///
624 /// [into_raw]: struct.Arc.html#method.into_raw
ba9703b0 625 /// [transmute]: ../../std/mem/fn.transmute.html
476ff2be
SL
626 ///
627 /// # Examples
628 ///
629 /// ```
476ff2be
SL
630 /// use std::sync::Arc;
631 ///
dc9dc135 632 /// let x = Arc::new("hello".to_owned());
476ff2be
SL
633 /// let x_ptr = Arc::into_raw(x);
634 ///
635 /// unsafe {
636 /// // Convert back to an `Arc` to prevent leak.
637 /// let x = Arc::from_raw(x_ptr);
dc9dc135 638 /// assert_eq!(&*x, "hello");
476ff2be 639 ///
e1599b0c 640 /// // Further calls to `Arc::from_raw(x_ptr)` would be memory-unsafe.
476ff2be
SL
641 /// }
642 ///
643 /// // The memory was freed when `x` went out of scope above, so `x_ptr` is now dangling!
644 /// ```
8bb4bdeb
XL
645 #[stable(feature = "rc_raw", since = "1.17.0")]
646 pub unsafe fn from_raw(ptr: *const T) -> Self {
dc9dc135 647 let offset = data_offset(ptr);
ea8adc8c
XL
648
649 // Reverse the offset to find the original ArcInner.
650 let fake_ptr = ptr as *mut ArcInner<T>;
651 let arc_ptr = set_data_ptr(fake_ptr, (ptr as *mut u8).offset(-offset));
652
416331ca 653 Self::from_ptr(arc_ptr)
476ff2be 654 }
1a4d82fc 655
9fa01778
XL
656 /// Consumes the `Arc`, returning the wrapped pointer as `NonNull<T>`.
657 ///
658 /// # Examples
659 ///
660 /// ```
661 /// #![feature(rc_into_raw_non_null)]
662 ///
663 /// use std::sync::Arc;
664 ///
dc9dc135 665 /// let x = Arc::new("hello".to_owned());
9fa01778 666 /// let ptr = Arc::into_raw_non_null(x);
dc9dc135
XL
667 /// let deref = unsafe { ptr.as_ref() };
668 /// assert_eq!(deref, "hello");
9fa01778
XL
669 /// ```
670 #[unstable(feature = "rc_into_raw_non_null", issue = "47336")]
671 #[inline]
672 pub fn into_raw_non_null(this: Self) -> NonNull<T> {
673 // safe because Arc guarantees its pointer is non-null
674 unsafe { NonNull::new_unchecked(Arc::into_raw(this) as *mut _) }
675 }
676
e74abb32 677 /// Creates a new [`Weak`][weak] pointer to this allocation.
c30ab7b3
SL
678 ///
679 /// [weak]: struct.Weak.html
1a4d82fc
JJ
680 ///
681 /// # Examples
682 ///
683 /// ```
684 /// use std::sync::Arc;
685 ///
85aaf69f 686 /// let five = Arc::new(5);
1a4d82fc 687 ///
e9174d1e 688 /// let weak_five = Arc::downgrade(&five);
1a4d82fc 689 /// ```
e9174d1e
SL
690 #[stable(feature = "arc_weak", since = "1.4.0")]
691 pub fn downgrade(this: &Self) -> Weak<T> {
54a0048b
SL
692 // This Relaxed is OK because we're checking the value in the CAS
693 // below.
694 let mut cur = this.inner().weak.load(Relaxed);
c1a9b12d 695
54a0048b 696 loop {
c1a9b12d 697 // check if the weak counter is currently "locked"; if so, spin.
b039eaaf 698 if cur == usize::MAX {
54a0048b 699 cur = this.inner().weak.load(Relaxed);
92a42be0 700 continue;
b039eaaf 701 }
c1a9b12d
SL
702
703 // NOTE: this code currently ignores the possibility of overflow
704 // into usize::MAX; in general both Rc and Arc need to be adjusted
705 // to deal with overflow.
706
707 // Unlike with Clone(), we need this to be an Acquire read to
708 // synchronize with the write coming from `is_unique`, so that the
709 // events prior to that write happen before this read.
54a0048b 710 match this.inner().weak.compare_exchange_weak(cur, cur + 1, Acquire, Relaxed) {
8faf50e0
XL
711 Ok(_) => {
712 // Make sure we do not create a dangling Weak
713 debug_assert!(!is_dangling(this.ptr));
714 return Weak { ptr: this.ptr };
715 }
54a0048b 716 Err(old) => cur = old,
c1a9b12d
SL
717 }
718 }
1a4d82fc 719 }
1a4d82fc 720
e74abb32 721 /// Gets the number of [`Weak`][weak] pointers to this allocation.
c30ab7b3 722 ///
c30ab7b3
SL
723 /// [weak]: struct.Weak.html
724 ///
476ff2be
SL
725 /// # Safety
726 ///
727 /// This method by itself is safe, but using it correctly requires extra care.
728 /// Another thread can change the weak count at any time,
729 /// including potentially between calling this method and acting on the result.
730 ///
c30ab7b3
SL
731 /// # Examples
732 ///
733 /// ```
c30ab7b3
SL
734 /// use std::sync::Arc;
735 ///
736 /// let five = Arc::new(5);
737 /// let _weak_five = Arc::downgrade(&five);
738 ///
739 /// // This assertion is deterministic because we haven't shared
740 /// // the `Arc` or `Weak` between threads.
741 /// assert_eq!(1, Arc::weak_count(&five));
742 /// ```
62682a34 743 #[inline]
476ff2be 744 #[stable(feature = "arc_counts", since = "1.15.0")]
e9174d1e 745 pub fn weak_count(this: &Self) -> usize {
3b2f2976
XL
746 let cnt = this.inner().weak.load(SeqCst);
747 // If the weak count is currently locked, the value of the
748 // count was 0 just before taking the lock.
749 if cnt == usize::MAX { 0 } else { cnt - 1 }
62682a34
SL
750 }
751
e74abb32 752 /// Gets the number of strong (`Arc`) pointers to this allocation.
c30ab7b3 753 ///
476ff2be
SL
754 /// # Safety
755 ///
756 /// This method by itself is safe, but using it correctly requires extra care.
757 /// Another thread can change the strong count at any time,
758 /// including potentially between calling this method and acting on the result.
c30ab7b3
SL
759 ///
760 /// # Examples
761 ///
762 /// ```
c30ab7b3
SL
763 /// use std::sync::Arc;
764 ///
765 /// let five = Arc::new(5);
7cac9316 766 /// let _also_five = Arc::clone(&five);
c30ab7b3
SL
767 ///
768 /// // This assertion is deterministic because we haven't shared
769 /// // the `Arc` between threads.
770 /// assert_eq!(2, Arc::strong_count(&five));
771 /// ```
62682a34 772 #[inline]
476ff2be 773 #[stable(feature = "arc_counts", since = "1.15.0")]
e9174d1e 774 pub fn strong_count(this: &Self) -> usize {
62682a34
SL
775 this.inner().strong.load(SeqCst)
776 }
777
1a4d82fc
JJ
778 #[inline]
779 fn inner(&self) -> &ArcInner<T> {
c34b1796
AL
780 // This unsafety is ok because while this arc is alive we're guaranteed
781 // that the inner pointer is valid. Furthermore, we know that the
782 // `ArcInner` structure itself is `Sync` because the inner data is
783 // `Sync` as well, so we're ok loaning out an immutable pointer to these
784 // contents.
7cac9316 785 unsafe { self.ptr.as_ref() }
1a4d82fc 786 }
c34b1796
AL
787
788 // Non-inlined part of `drop`.
789 #[inline(never)]
790 unsafe fn drop_slow(&mut self) {
c34b1796
AL
791 // Destroy the data at this time, even though we may not free the box
792 // allocation itself (there may still be weak pointers lying around).
7cac9316 793 ptr::drop_in_place(&mut self.ptr.as_mut().data);
c34b1796
AL
794
795 if self.inner().weak.fetch_sub(1, Release) == 1 {
ba9703b0 796 acquire!(self.inner().weak);
94b46f34 797 Global.dealloc(self.ptr.cast(), Layout::for_value(self.ptr.as_ref()))
c34b1796
AL
798 }
799 }
9e0c209e
SL
800
801 #[inline]
8bb4bdeb 802 #[stable(feature = "ptr_eq", since = "1.17.0")]
e74abb32
XL
803 /// Returns `true` if the two `Arc`s point to the same allocation
804 /// (in a vein similar to [`ptr::eq`]).
9e0c209e
SL
805 ///
806 /// # Examples
807 ///
808 /// ```
9e0c209e
SL
809 /// use std::sync::Arc;
810 ///
811 /// let five = Arc::new(5);
7cac9316 812 /// let same_five = Arc::clone(&five);
9e0c209e
SL
813 /// let other_five = Arc::new(5);
814 ///
815 /// assert!(Arc::ptr_eq(&five, &same_five));
816 /// assert!(!Arc::ptr_eq(&five, &other_five));
817 /// ```
e74abb32
XL
818 ///
819 /// [`ptr::eq`]: ../../std/ptr/fn.eq.html
9e0c209e 820 pub fn ptr_eq(this: &Self, other: &Self) -> bool {
7cac9316 821 this.ptr.as_ptr() == other.ptr.as_ptr()
9e0c209e 822 }
1a4d82fc
JJ
823}
824
3b2f2976 825impl<T: ?Sized> Arc<T> {
416331ca 826 /// Allocates an `ArcInner<T>` with sufficient space for
e74abb32 827 /// a possibly-unsized inner value where the value has the layout provided.
416331ca
XL
828 ///
829 /// The function `mem_to_arcinner` is called with the data pointer
830 /// and must return back a (potentially fat)-pointer for the `ArcInner<T>`.
e1599b0c 831 unsafe fn allocate_for_layout(
416331ca 832 value_layout: Layout,
dfeec247 833 mem_to_arcinner: impl FnOnce(*mut u8) -> *mut ArcInner<T>,
416331ca
XL
834 ) -> *mut ArcInner<T> {
835 // Calculate layout using the given value layout.
a1dfa0c6
XL
836 // Previously, layout was calculated on the expression
837 // `&*(ptr as *const ArcInner<T>)`, but this created a misaligned
838 // reference (see #54908).
dfeec247 839 let layout = Layout::new::<ArcInner<()>>().extend(value_layout).unwrap().0.pad_to_align();
3b2f2976 840
ba9703b0
XL
841 let mem = Global
842 .alloc(layout, AllocInit::Uninitialized)
843 .unwrap_or_else(|_| handle_alloc_error(layout));
3b2f2976 844
a1dfa0c6 845 // Initialize the ArcInner
ba9703b0 846 let inner = mem_to_arcinner(mem.ptr.as_ptr());
a1dfa0c6 847 debug_assert_eq!(Layout::for_value(&*inner), layout);
3b2f2976
XL
848
849 ptr::write(&mut (*inner).strong, atomic::AtomicUsize::new(1));
850 ptr::write(&mut (*inner).weak, atomic::AtomicUsize::new(1));
851
852 inner
853 }
854
e74abb32 855 /// Allocates an `ArcInner<T>` with sufficient space for an unsized inner value.
416331ca
XL
856 unsafe fn allocate_for_ptr(ptr: *const T) -> *mut ArcInner<T> {
857 // Allocate for the `ArcInner<T>` using the given value.
dfeec247
XL
858 Self::allocate_for_layout(Layout::for_value(&*ptr), |mem| {
859 set_data_ptr(ptr as *mut T, mem) as *mut ArcInner<T>
860 })
416331ca
XL
861 }
862
3b2f2976
XL
863 fn from_box(v: Box<T>) -> Arc<T> {
864 unsafe {
83c7162d
XL
865 let box_unique = Box::into_unique(v);
866 let bptr = box_unique.as_ptr();
3b2f2976
XL
867
868 let value_size = size_of_val(&*bptr);
869 let ptr = Self::allocate_for_ptr(bptr);
870
871 // Copy value as bytes
872 ptr::copy_nonoverlapping(
873 bptr as *const T as *const u8,
874 &mut (*ptr).data as *mut _ as *mut u8,
dfeec247
XL
875 value_size,
876 );
3b2f2976
XL
877
878 // Free the allocation without dropping its contents
83c7162d 879 box_free(box_unique);
3b2f2976 880
416331ca 881 Self::from_ptr(ptr)
3b2f2976
XL
882 }
883 }
884}
885
416331ca
XL
886impl<T> Arc<[T]> {
887 /// Allocates an `ArcInner<[T]>` with the given length.
888 unsafe fn allocate_for_slice(len: usize) -> *mut ArcInner<[T]> {
dfeec247
XL
889 Self::allocate_for_layout(Layout::array::<T>(len).unwrap(), |mem| {
890 ptr::slice_from_raw_parts_mut(mem as *mut T, len) as *mut ArcInner<[T]>
891 })
416331ca
XL
892 }
893}
894
895/// Sets the data pointer of a `?Sized` raw pointer.
896///
897/// For a slice/trait object, this sets the `data` field and leaves the rest
898/// unchanged. For a sized raw pointer, this simply sets the pointer.
3b2f2976
XL
899unsafe fn set_data_ptr<T: ?Sized, U>(mut ptr: *mut T, data: *mut U) -> *mut T {
900 ptr::write(&mut ptr as *mut _ as *mut *mut u8, data as *mut u8);
901 ptr
902}
903
904impl<T> Arc<[T]> {
416331ca
XL
905 /// Copy elements from slice into newly allocated Arc<[T]>
906 ///
907 /// Unsafe because the caller must either take ownership or bind `T: Copy`.
3b2f2976 908 unsafe fn copy_from_slice(v: &[T]) -> Arc<[T]> {
416331ca 909 let ptr = Self::allocate_for_slice(v.len());
3b2f2976 910
dfeec247 911 ptr::copy_nonoverlapping(v.as_ptr(), &mut (*ptr).data as *mut [T] as *mut T, v.len());
3b2f2976 912
416331ca 913 Self::from_ptr(ptr)
3b2f2976 914 }
3b2f2976 915
416331ca
XL
916 /// Constructs an `Arc<[T]>` from an iterator known to be of a certain size.
917 ///
918 /// Behavior is undefined should the size be wrong.
919 unsafe fn from_iter_exact(iter: impl iter::Iterator<Item = T>, len: usize) -> Arc<[T]> {
3b2f2976
XL
920 // Panic guard while cloning T elements.
921 // In the event of a panic, elements that have been written
922 // into the new ArcInner will be dropped, then the memory freed.
923 struct Guard<T> {
83c7162d 924 mem: NonNull<u8>,
3b2f2976
XL
925 elems: *mut T,
926 layout: Layout,
927 n_elems: usize,
928 }
929
930 impl<T> Drop for Guard<T> {
931 fn drop(&mut self) {
3b2f2976
XL
932 unsafe {
933 let slice = from_raw_parts_mut(self.elems, self.n_elems);
934 ptr::drop_in_place(slice);
935
416331ca 936 Global.dealloc(self.mem.cast(), self.layout);
3b2f2976
XL
937 }
938 }
939 }
940
416331ca 941 let ptr = Self::allocate_for_slice(len);
3b2f2976 942
416331ca
XL
943 let mem = ptr as *mut _ as *mut u8;
944 let layout = Layout::for_value(&*ptr);
3b2f2976 945
416331ca
XL
946 // Pointer to first element
947 let elems = &mut (*ptr).data as *mut [T] as *mut T;
3b2f2976 948
dfeec247 949 let mut guard = Guard { mem: NonNull::new_unchecked(mem), elems, layout, n_elems: 0 };
3b2f2976 950
416331ca
XL
951 for (i, item) in iter.enumerate() {
952 ptr::write(elems.add(i), item);
953 guard.n_elems += 1;
954 }
955
956 // All clear. Forget the guard so it doesn't free the new ArcInner.
957 mem::forget(guard);
958
959 Self::from_ptr(ptr)
960 }
961}
3b2f2976 962
416331ca
XL
963/// Specialization trait used for `From<&[T]>`.
964trait ArcFromSlice<T> {
965 fn from_slice(slice: &[T]) -> Self;
966}
3b2f2976 967
416331ca
XL
968impl<T: Clone> ArcFromSlice<T> for Arc<[T]> {
969 #[inline]
970 default fn from_slice(v: &[T]) -> Self {
dfeec247 971 unsafe { Self::from_iter_exact(v.iter().cloned(), v.len()) }
3b2f2976
XL
972 }
973}
974
975impl<T: Copy> ArcFromSlice<T> for Arc<[T]> {
976 #[inline]
977 fn from_slice(v: &[T]) -> Self {
978 unsafe { Arc::copy_from_slice(v) }
979 }
980}
981
85aaf69f 982#[stable(feature = "rust1", since = "1.0.0")]
62682a34 983impl<T: ?Sized> Clone for Arc<T> {
c30ab7b3 984 /// Makes a clone of the `Arc` pointer.
1a4d82fc 985 ///
e74abb32 986 /// This creates another pointer to the same allocation, increasing the
c30ab7b3 987 /// strong reference count.
1a4d82fc
JJ
988 ///
989 /// # Examples
990 ///
991 /// ```
992 /// use std::sync::Arc;
993 ///
85aaf69f 994 /// let five = Arc::new(5);
1a4d82fc 995 ///
0bf4aa26 996 /// let _ = Arc::clone(&five);
1a4d82fc
JJ
997 /// ```
998 #[inline]
999 fn clone(&self) -> Arc<T> {
c34b1796
AL
1000 // Using a relaxed ordering is alright here, as knowledge of the
1001 // original reference prevents other threads from erroneously deleting
1002 // the object.
1a4d82fc 1003 //
c34b1796
AL
1004 // As explained in the [Boost documentation][1], Increasing the
1005 // reference counter can always be done with memory_order_relaxed: New
1006 // references to an object can only be formed from an existing
1007 // reference, and passing an existing reference from one thread to
1008 // another must already provide any required synchronization.
1a4d82fc
JJ
1009 //
1010 // [1]: (www.boost.org/doc/libs/1_55_0/doc/html/atomic/usage_examples.html)
c1a9b12d
SL
1011 let old_size = self.inner().strong.fetch_add(1, Relaxed);
1012
1013 // However we need to guard against massive refcounts in case someone
1014 // is `mem::forget`ing Arcs. If we don't do this the count can overflow
1015 // and users will use-after free. We racily saturate to `isize::MAX` on
1016 // the assumption that there aren't ~2 billion threads incrementing
1017 // the reference count at once. This branch will never be taken in
1018 // any realistic program.
1019 //
1020 // We abort because such a program is incredibly degenerate, and we
1021 // don't care to support it.
1022 if old_size > MAX_REFCOUNT {
b039eaaf
SL
1023 unsafe {
1024 abort();
1025 }
c1a9b12d
SL
1026 }
1027
416331ca 1028 Self::from_inner(self.ptr)
1a4d82fc
JJ
1029 }
1030}
1031
85aaf69f 1032#[stable(feature = "rust1", since = "1.0.0")]
62682a34 1033impl<T: ?Sized> Deref for Arc<T> {
1a4d82fc
JJ
1034 type Target = T;
1035
1036 #[inline]
1037 fn deref(&self) -> &T {
1038 &self.inner().data
1039 }
1040}
1041
dfeec247 1042#[unstable(feature = "receiver_trait", issue = "none")]
0731742a
XL
1043impl<T: ?Sized> Receiver for Arc<T> {}
1044
c34b1796 1045impl<T: Clone> Arc<T> {
c30ab7b3
SL
1046 /// Makes a mutable reference into the given `Arc`.
1047 ///
e74abb32
XL
1048 /// If there are other `Arc` or [`Weak`][weak] pointers to the same allocation,
1049 /// then `make_mut` will create a new allocation and invoke [`clone`][clone] on the inner value
1050 /// to ensure unique ownership. This is also referred to as clone-on-write.
1051 ///
1052 /// Note that this differs from the behavior of [`Rc::make_mut`] which disassociates
1053 /// any remaining `Weak` pointers.
1a4d82fc 1054 ///
c30ab7b3
SL
1055 /// See also [`get_mut`][get_mut], which will fail rather than cloning.
1056 ///
1057 /// [weak]: struct.Weak.html
1058 /// [clone]: ../../std/clone/trait.Clone.html#tymethod.clone
1059 /// [get_mut]: struct.Arc.html#method.get_mut
e74abb32 1060 /// [`Rc::make_mut`]: ../rc/struct.Rc.html#method.make_mut
62682a34 1061 ///
1a4d82fc
JJ
1062 /// # Examples
1063 ///
1064 /// ```
1065 /// use std::sync::Arc;
1066 ///
e9174d1e
SL
1067 /// let mut data = Arc::new(5);
1068 ///
1069 /// *Arc::make_mut(&mut data) += 1; // Won't clone anything
7cac9316 1070 /// let mut other_data = Arc::clone(&data); // Won't clone inner data
e9174d1e
SL
1071 /// *Arc::make_mut(&mut data) += 1; // Clones inner data
1072 /// *Arc::make_mut(&mut data) += 1; // Won't clone anything
1073 /// *Arc::make_mut(&mut other_data) *= 2; // Won't clone anything
1074 ///
e74abb32 1075 /// // Now `data` and `other_data` point to different allocations.
e9174d1e
SL
1076 /// assert_eq!(*data, 8);
1077 /// assert_eq!(*other_data, 12);
1a4d82fc
JJ
1078 /// ```
1079 #[inline]
e9174d1e
SL
1080 #[stable(feature = "arc_unique", since = "1.4.0")]
1081 pub fn make_mut(this: &mut Self) -> &mut T {
c1a9b12d
SL
1082 // Note that we hold both a strong reference and a weak reference.
1083 // Thus, releasing our strong reference only will not, by itself, cause
1084 // the memory to be deallocated.
62682a34 1085 //
c1a9b12d
SL
1086 // Use Acquire to ensure that we see any writes to `weak` that happen
1087 // before release writes (i.e., decrements) to `strong`. Since we hold a
1088 // weak count, there's no chance the ArcInner itself could be
1089 // deallocated.
54a0048b 1090 if this.inner().strong.compare_exchange(1, 0, Acquire, Relaxed).is_err() {
9cc50fc6 1091 // Another strong pointer exists; clone
c1a9b12d
SL
1092 *this = Arc::new((**this).clone());
1093 } else if this.inner().weak.load(Relaxed) != 1 {
1094 // Relaxed suffices in the above because this is fundamentally an
1095 // optimization: we are always racing with weak pointers being
1096 // dropped. Worst case, we end up allocated a new Arc unnecessarily.
1097
1098 // We removed the last strong ref, but there are additional weak
1099 // refs remaining. We'll move the contents to a new Arc, and
1100 // invalidate the other weak refs.
1101
1102 // Note that it is not possible for the read of `weak` to yield
1103 // usize::MAX (i.e., locked), since the weak count can only be
1104 // locked by a thread with a strong reference.
1105
1106 // Materialize our own implicit weak pointer, so that it can clean
1107 // up the ArcInner as needed.
54a0048b 1108 let weak = Weak { ptr: this.ptr };
c1a9b12d
SL
1109
1110 // mark the data itself as already deallocated
1111 unsafe {
1112 // there is no data race in the implicit write caused by `read`
1113 // here (due to zeroing) because data is no longer accessed by
1114 // other threads (due to there being no more strong refs at this
1115 // point).
7cac9316 1116 let mut swap = Arc::new(ptr::read(&weak.ptr.as_ref().data));
c1a9b12d
SL
1117 mem::swap(this, &mut swap);
1118 mem::forget(swap);
1119 }
1120 } else {
1121 // We were the sole reference of either kind; bump back up the
1122 // strong ref count.
1123 this.inner().strong.store(1, Release);
1a4d82fc 1124 }
c1a9b12d 1125
9346a6ac 1126 // As with `get_mut()`, the unsafety is ok because our reference was
c34b1796 1127 // either unique to begin with, or became one upon cloning the contents.
dfeec247 1128 unsafe { &mut this.ptr.as_mut().data }
1a4d82fc
JJ
1129 }
1130}
1131
c1a9b12d 1132impl<T: ?Sized> Arc<T> {
e74abb32
XL
1133 /// Returns a mutable reference into the given `Arc`, if there are
1134 /// no other `Arc` or [`Weak`][weak] pointers to the same allocation.
c30ab7b3
SL
1135 ///
1136 /// Returns [`None`][option] otherwise, because it is not safe to
1137 /// mutate a shared value.
1138 ///
1139 /// See also [`make_mut`][make_mut], which will [`clone`][clone]
e74abb32 1140 /// the inner value when there are other pointers.
c30ab7b3
SL
1141 ///
1142 /// [weak]: struct.Weak.html
1143 /// [option]: ../../std/option/enum.Option.html
1144 /// [make_mut]: struct.Arc.html#method.make_mut
1145 /// [clone]: ../../std/clone/trait.Clone.html#tymethod.clone
c1a9b12d
SL
1146 ///
1147 /// # Examples
1148 ///
1149 /// ```
e9174d1e 1150 /// use std::sync::Arc;
c1a9b12d
SL
1151 ///
1152 /// let mut x = Arc::new(3);
1153 /// *Arc::get_mut(&mut x).unwrap() = 4;
1154 /// assert_eq!(*x, 4);
1155 ///
7cac9316 1156 /// let _y = Arc::clone(&x);
c1a9b12d 1157 /// assert!(Arc::get_mut(&mut x).is_none());
c1a9b12d
SL
1158 /// ```
1159 #[inline]
e9174d1e
SL
1160 #[stable(feature = "arc_unique", since = "1.4.0")]
1161 pub fn get_mut(this: &mut Self) -> Option<&mut T> {
c1a9b12d
SL
1162 if this.is_unique() {
1163 // This unsafety is ok because we're guaranteed that the pointer
1164 // returned is the *only* pointer that will ever be returned to T. Our
1165 // reference count is guaranteed to be 1 at this point, and we required
1166 // the Arc itself to be `mut`, so we're returning the only possible
1167 // reference to the inner data.
dfeec247 1168 unsafe { Some(Arc::get_mut_unchecked(this)) }
c1a9b12d
SL
1169 } else {
1170 None
1171 }
1172 }
1173
e74abb32 1174 /// Returns a mutable reference into the given `Arc`,
e1599b0c
XL
1175 /// without any check.
1176 ///
1177 /// See also [`get_mut`], which is safe and does appropriate checks.
1178 ///
1179 /// [`get_mut`]: struct.Arc.html#method.get_mut
1180 ///
1181 /// # Safety
1182 ///
e74abb32 1183 /// Any other `Arc` or [`Weak`] pointers to the same allocation must not be dereferenced
e1599b0c
XL
1184 /// for the duration of the returned borrow.
1185 /// This is trivially the case if no such pointers exist,
1186 /// for example immediately after `Arc::new`.
1187 ///
1188 /// # Examples
1189 ///
1190 /// ```
1191 /// #![feature(get_mut_unchecked)]
1192 ///
1193 /// use std::sync::Arc;
1194 ///
1195 /// let mut x = Arc::new(String::new());
1196 /// unsafe {
1197 /// Arc::get_mut_unchecked(&mut x).push_str("foo")
1198 /// }
1199 /// assert_eq!(*x, "foo");
1200 /// ```
1201 #[inline]
1202 #[unstable(feature = "get_mut_unchecked", issue = "63292")]
1203 pub unsafe fn get_mut_unchecked(this: &mut Self) -> &mut T {
1204 &mut this.ptr.as_mut().data
1205 }
1206
c1a9b12d
SL
1207 /// Determine whether this is the unique reference (including weak refs) to
1208 /// the underlying data.
1209 ///
1210 /// Note that this requires locking the weak ref count.
1211 fn is_unique(&mut self) -> bool {
1212 // lock the weak pointer count if we appear to be the sole weak pointer
1213 // holder.
1214 //
1215 // The acquire label here ensures a happens-before relationship with any
8faf50e0
XL
1216 // writes to `strong` (in particular in `Weak::upgrade`) prior to decrements
1217 // of the `weak` count (via `Weak::drop`, which uses release). If the upgraded
1218 // weak ref was never dropped, the CAS here will fail so we do not care to synchronize.
54a0048b 1219 if self.inner().weak.compare_exchange(1, usize::MAX, Acquire, Relaxed).is_ok() {
8faf50e0
XL
1220 // This needs to be an `Acquire` to synchronize with the decrement of the `strong`
1221 // counter in `drop` -- the only access that happens when any but the last reference
1222 // is being dropped.
1223 let unique = self.inner().strong.load(Acquire) == 1;
c1a9b12d
SL
1224
1225 // The release write here synchronizes with a read in `downgrade`,
1226 // effectively preventing the above read of `strong` from happening
1227 // after the write.
1228 self.inner().weak.store(1, Release); // release the lock
1229 unique
1230 } else {
1231 false
1232 }
1233 }
1234}
1235
85aaf69f 1236#[stable(feature = "rust1", since = "1.0.0")]
32a655c1 1237unsafe impl<#[may_dangle] T: ?Sized> Drop for Arc<T> {
c30ab7b3 1238 /// Drops the `Arc`.
1a4d82fc 1239 ///
c34b1796 1240 /// This will decrement the strong reference count. If the strong reference
c30ab7b3 1241 /// count reaches zero then the only other references (if any) are
b7449926 1242 /// [`Weak`], so we `drop` the inner value.
1a4d82fc
JJ
1243 ///
1244 /// # Examples
1245 ///
1246 /// ```
1247 /// use std::sync::Arc;
1248 ///
c30ab7b3 1249 /// struct Foo;
1a4d82fc 1250 ///
c30ab7b3
SL
1251 /// impl Drop for Foo {
1252 /// fn drop(&mut self) {
1253 /// println!("dropped!");
1254 /// }
1a4d82fc 1255 /// }
1a4d82fc 1256 ///
c30ab7b3 1257 /// let foo = Arc::new(Foo);
7cac9316 1258 /// let foo2 = Arc::clone(&foo);
1a4d82fc 1259 ///
c30ab7b3
SL
1260 /// drop(foo); // Doesn't print anything
1261 /// drop(foo2); // Prints "dropped!"
1a4d82fc 1262 /// ```
0731742a
XL
1263 ///
1264 /// [`Weak`]: ../../std/sync/struct.Weak.html
c34b1796 1265 #[inline]
1a4d82fc 1266 fn drop(&mut self) {
c34b1796
AL
1267 // Because `fetch_sub` is already atomic, we do not need to synchronize
1268 // with other threads unless we are going to delete the object. This
1269 // same logic applies to the below `fetch_sub` to the `weak` count.
b039eaaf 1270 if self.inner().strong.fetch_sub(1, Release) != 1 {
92a42be0 1271 return;
b039eaaf 1272 }
1a4d82fc 1273
c34b1796
AL
1274 // This fence is needed to prevent reordering of use of the data and
1275 // deletion of the data. Because it is marked `Release`, the decreasing
1276 // of the reference count synchronizes with this `Acquire` fence. This
1277 // means that use of the data happens before decreasing the reference
1278 // count, which happens before this fence, which happens before the
1279 // deletion of the data.
1a4d82fc
JJ
1280 //
1281 // As explained in the [Boost documentation][1],
1282 //
c34b1796
AL
1283 // > It is important to enforce any possible access to the object in one
1284 // > thread (through an existing reference) to *happen before* deleting
1285 // > the object in a different thread. This is achieved by a "release"
1286 // > operation after dropping a reference (any access to the object
1287 // > through this reference must obviously happened before), and an
1288 // > "acquire" operation before deleting the object.
1a4d82fc 1289 //
7cac9316
XL
1290 // In particular, while the contents of an Arc are usually immutable, it's
1291 // possible to have interior writes to something like a Mutex<T>. Since a
1292 // Mutex is not acquired when it is deleted, we can't rely on its
1293 // synchronization logic to make writes in thread A visible to a destructor
1294 // running in thread B.
1295 //
1296 // Also note that the Acquire fence here could probably be replaced with an
1297 // Acquire load, which could improve performance in highly-contended
1298 // situations. See [2].
1299 //
1a4d82fc 1300 // [1]: (www.boost.org/doc/libs/1_55_0/doc/html/atomic/usage_examples.html)
7cac9316 1301 // [2]: (https://github.com/rust-lang/rust/pull/41714)
ba9703b0 1302 acquire!(self.inner().strong);
1a4d82fc 1303
c34b1796 1304 unsafe {
b039eaaf 1305 self.drop_slow();
1a4d82fc
JJ
1306 }
1307 }
1308}
1309
8faf50e0 1310impl Arc<dyn Any + Send + Sync> {
94b46f34 1311 #[inline]
8faf50e0
XL
1312 #[stable(feature = "rc_downcast", since = "1.29.0")]
1313 /// Attempt to downcast the `Arc<dyn Any + Send + Sync>` to a concrete type.
94b46f34
XL
1314 ///
1315 /// # Examples
1316 ///
1317 /// ```
94b46f34
XL
1318 /// use std::any::Any;
1319 /// use std::sync::Arc;
1320 ///
8faf50e0 1321 /// fn print_if_string(value: Arc<dyn Any + Send + Sync>) {
94b46f34
XL
1322 /// if let Ok(string) = value.downcast::<String>() {
1323 /// println!("String ({}): {}", string.len(), string);
1324 /// }
1325 /// }
1326 ///
e74abb32
XL
1327 /// let my_string = "Hello World".to_string();
1328 /// print_if_string(Arc::new(my_string));
1329 /// print_if_string(Arc::new(0i8));
94b46f34
XL
1330 /// ```
1331 pub fn downcast<T>(self) -> Result<Arc<T>, Self>
1332 where
1333 T: Any + Send + Sync + 'static,
1334 {
1335 if (*self).is::<T>() {
1336 let ptr = self.ptr.cast::<ArcInner<T>>();
1337 mem::forget(self);
416331ca 1338 Ok(Arc::from_inner(ptr))
94b46f34
XL
1339 } else {
1340 Err(self)
1341 }
1342 }
1343}
1344
a7813a04 1345impl<T> Weak<T> {
8faf50e0
XL
1346 /// Constructs a new `Weak<T>`, without allocating any memory.
1347 /// Calling [`upgrade`] on the return value always gives [`None`].
c30ab7b3 1348 ///
cc61c64b
XL
1349 /// [`upgrade`]: struct.Weak.html#method.upgrade
1350 /// [`None`]: ../../std/option/enum.Option.html#variant.None
a7813a04
XL
1351 ///
1352 /// # Examples
1353 ///
1354 /// ```
1355 /// use std::sync::Weak;
1356 ///
1357 /// let empty: Weak<i64> = Weak::new();
c30ab7b3 1358 /// assert!(empty.upgrade().is_none());
a7813a04
XL
1359 /// ```
1360 #[stable(feature = "downgraded_weak", since = "1.10.0")]
1361 pub fn new() -> Weak<T> {
dfeec247 1362 Weak { ptr: NonNull::new(usize::MAX as *mut ArcInner<T>).expect("MAX is not 0") }
a7813a04 1363 }
dc9dc135
XL
1364
1365 /// Returns a raw pointer to the object `T` pointed to by this `Weak<T>`.
1366 ///
ba9703b0
XL
1367 /// The pointer is valid only if there are some strong references. The pointer may be dangling,
1368 /// unaligned or even [`null`] otherwise.
dc9dc135
XL
1369 ///
1370 /// # Examples
1371 ///
1372 /// ```
1373 /// #![feature(weak_into_raw)]
1374 ///
416331ca 1375 /// use std::sync::Arc;
dc9dc135
XL
1376 /// use std::ptr;
1377 ///
1378 /// let strong = Arc::new("hello".to_owned());
1379 /// let weak = Arc::downgrade(&strong);
1380 /// // Both point to the same object
ba9703b0 1381 /// assert!(ptr::eq(&*strong, weak.as_ptr()));
dc9dc135 1382 /// // The strong here keeps it alive, so we can still access the object.
ba9703b0 1383 /// assert_eq!("hello", unsafe { &*weak.as_ptr() });
dc9dc135
XL
1384 ///
1385 /// drop(strong);
ba9703b0 1386 /// // But not any more. We can do weak.as_ptr(), but accessing the pointer would lead to
dc9dc135 1387 /// // undefined behaviour.
ba9703b0 1388 /// // assert_eq!("hello", unsafe { &*weak.as_ptr() });
dc9dc135
XL
1389 /// ```
1390 ///
1391 /// [`null`]: ../../std/ptr/fn.null.html
1392 #[unstable(feature = "weak_into_raw", issue = "60728")]
ba9703b0
XL
1393 pub fn as_ptr(&self) -> *const T {
1394 let offset = data_offset_sized::<T>();
1395 let ptr = self.ptr.cast::<u8>().as_ptr().wrapping_offset(offset);
1396 ptr as *const T
dc9dc135
XL
1397 }
1398
1399 /// Consumes the `Weak<T>` and turns it into a raw pointer.
1400 ///
1401 /// This converts the weak pointer into a raw pointer, preserving the original weak count. It
1402 /// can be turned back into the `Weak<T>` with [`from_raw`].
1403 ///
1404 /// The same restrictions of accessing the target of the pointer as with
ba9703b0 1405 /// [`as_ptr`] apply.
dc9dc135
XL
1406 ///
1407 /// # Examples
1408 ///
1409 /// ```
1410 /// #![feature(weak_into_raw)]
1411 ///
1412 /// use std::sync::{Arc, Weak};
1413 ///
1414 /// let strong = Arc::new("hello".to_owned());
1415 /// let weak = Arc::downgrade(&strong);
416331ca 1416 /// let raw = weak.into_raw();
dc9dc135
XL
1417 ///
1418 /// assert_eq!(1, Arc::weak_count(&strong));
1419 /// assert_eq!("hello", unsafe { &*raw });
1420 ///
1421 /// drop(unsafe { Weak::from_raw(raw) });
1422 /// assert_eq!(0, Arc::weak_count(&strong));
1423 /// ```
1424 ///
1425 /// [`from_raw`]: struct.Weak.html#method.from_raw
ba9703b0 1426 /// [`as_ptr`]: struct.Weak.html#method.as_ptr
dc9dc135 1427 #[unstable(feature = "weak_into_raw", issue = "60728")]
416331ca 1428 pub fn into_raw(self) -> *const T {
ba9703b0 1429 let result = self.as_ptr();
416331ca 1430 mem::forget(self);
dc9dc135
XL
1431 result
1432 }
1433
1434 /// Converts a raw pointer previously created by [`into_raw`] back into
1435 /// `Weak<T>`.
1436 ///
1437 /// This can be used to safely get a strong reference (by calling [`upgrade`]
1438 /// later) or to deallocate the weak count by dropping the `Weak<T>`.
1439 ///
60c5eb7d
XL
1440 /// It takes ownership of one weak count (with the exception of pointers created by [`new`],
1441 /// as these don't have any corresponding weak count).
dc9dc135
XL
1442 ///
1443 /// # Safety
1444 ///
ba9703b0
XL
1445 /// The pointer must have originated from the [`into_raw`] and must still own its potential
1446 /// weak reference count.
60c5eb7d
XL
1447 ///
1448 /// It is allowed for the strong count to be 0 at the time of calling this, but the weak count
1449 /// must be non-zero or the pointer must have originated from a dangling `Weak<T>` (one created
1450 /// by [`new`]).
dc9dc135
XL
1451 ///
1452 /// # Examples
1453 ///
1454 /// ```
1455 /// #![feature(weak_into_raw)]
1456 ///
1457 /// use std::sync::{Arc, Weak};
1458 ///
1459 /// let strong = Arc::new("hello".to_owned());
1460 ///
416331ca
XL
1461 /// let raw_1 = Arc::downgrade(&strong).into_raw();
1462 /// let raw_2 = Arc::downgrade(&strong).into_raw();
dc9dc135
XL
1463 ///
1464 /// assert_eq!(2, Arc::weak_count(&strong));
1465 ///
416331ca 1466 /// assert_eq!("hello", &*unsafe { Weak::from_raw(raw_1) }.upgrade().unwrap());
dc9dc135
XL
1467 /// assert_eq!(1, Arc::weak_count(&strong));
1468 ///
1469 /// drop(strong);
1470 ///
1471 /// // Decrement the last weak count.
416331ca 1472 /// assert!(unsafe { Weak::from_raw(raw_2) }.upgrade().is_none());
dc9dc135
XL
1473 /// ```
1474 ///
60c5eb7d 1475 /// [`new`]: struct.Weak.html#method.new
dc9dc135
XL
1476 /// [`into_raw`]: struct.Weak.html#method.into_raw
1477 /// [`upgrade`]: struct.Weak.html#method.upgrade
1478 /// [`Weak`]: struct.Weak.html
1479 /// [`Arc`]: struct.Arc.html
60c5eb7d 1480 /// [`forget`]: ../../std/mem/fn.forget.html
dc9dc135
XL
1481 #[unstable(feature = "weak_into_raw", issue = "60728")]
1482 pub unsafe fn from_raw(ptr: *const T) -> Self {
1483 if ptr.is_null() {
1484 Self::new()
1485 } else {
1486 // See Arc::from_raw for details
1487 let offset = data_offset(ptr);
1488 let fake_ptr = ptr as *mut ArcInner<T>;
1489 let ptr = set_data_ptr(fake_ptr, (ptr as *mut u8).offset(-offset));
dfeec247 1490 Weak { ptr: NonNull::new(ptr).expect("Invalid pointer passed to from_raw") }
dc9dc135
XL
1491 }
1492 }
a7813a04
XL
1493}
1494
62682a34 1495impl<T: ?Sized> Weak<T> {
e74abb32
XL
1496 /// Attempts to upgrade the `Weak` pointer to an [`Arc`], delaying
1497 /// dropping of the inner value if successful.
1a4d82fc 1498 ///
e74abb32 1499 /// Returns [`None`] if the inner value has since been dropped.
1a4d82fc 1500 ///
cc61c64b
XL
1501 /// [`Arc`]: struct.Arc.html
1502 /// [`None`]: ../../std/option/enum.Option.html#variant.None
1a4d82fc
JJ
1503 ///
1504 /// # Examples
1505 ///
1506 /// ```
1507 /// use std::sync::Arc;
1508 ///
85aaf69f 1509 /// let five = Arc::new(5);
1a4d82fc 1510 ///
e9174d1e 1511 /// let weak_five = Arc::downgrade(&five);
1a4d82fc
JJ
1512 ///
1513 /// let strong_five: Option<Arc<_>> = weak_five.upgrade();
c30ab7b3
SL
1514 /// assert!(strong_five.is_some());
1515 ///
1516 /// // Destroy all strong pointers.
1517 /// drop(strong_five);
1518 /// drop(five);
1519 ///
1520 /// assert!(weak_five.upgrade().is_none());
1a4d82fc 1521 /// ```
e9174d1e 1522 #[stable(feature = "arc_weak", since = "1.4.0")]
1a4d82fc 1523 pub fn upgrade(&self) -> Option<Arc<T>> {
c34b1796 1524 // We use a CAS loop to increment the strong count instead of a
9346a6ac 1525 // fetch_add because once the count hits 0 it must never be above 0.
8faf50e0 1526 let inner = self.inner()?;
54a0048b
SL
1527
1528 // Relaxed load because any write of 0 that we can observe
1529 // leaves the field in a permanently zero state (so a
1530 // "stale" read of 0 is fine), and any other value is
1531 // confirmed via the CAS below.
1532 let mut n = inner.strong.load(Relaxed);
1533
1a4d82fc 1534 loop {
b039eaaf 1535 if n == 0 {
92a42be0
SL
1536 return None;
1537 }
1538
1539 // See comments in `Arc::clone` for why we do this (for `mem::forget`).
1540 if n > MAX_REFCOUNT {
3157f602
XL
1541 unsafe {
1542 abort();
1543 }
b039eaaf 1544 }
c1a9b12d
SL
1545
1546 // Relaxed is valid for the same reason it is on Arc's Clone impl
54a0048b 1547 match inner.strong.compare_exchange_weak(n, n + 1, Relaxed, Relaxed) {
416331ca 1548 Ok(_) => return Some(Arc::from_inner(self.ptr)), // null checked above
54a0048b 1549 Err(old) => n = old,
b039eaaf 1550 }
1a4d82fc
JJ
1551 }
1552 }
1553
e74abb32 1554 /// Gets the number of strong (`Arc`) pointers pointing to this allocation.
9fa01778
XL
1555 ///
1556 /// If `self` was created using [`Weak::new`], this will return 0.
1557 ///
1558 /// [`Weak::new`]: #method.new
60c5eb7d 1559 #[stable(feature = "weak_counts", since = "1.41.0")]
9fa01778 1560 pub fn strong_count(&self) -> usize {
dfeec247 1561 if let Some(inner) = self.inner() { inner.strong.load(SeqCst) } else { 0 }
9fa01778
XL
1562 }
1563
1564 /// Gets an approximation of the number of `Weak` pointers pointing to this
e74abb32 1565 /// allocation.
9fa01778 1566 ///
60c5eb7d
XL
1567 /// If `self` was created using [`Weak::new`], or if there are no remaining
1568 /// strong pointers, this will return 0.
9fa01778
XL
1569 ///
1570 /// # Accuracy
1571 ///
1572 /// Due to implementation details, the returned value can be off by 1 in
1573 /// either direction when other threads are manipulating any `Arc`s or
e74abb32 1574 /// `Weak`s pointing to the same allocation.
9fa01778
XL
1575 ///
1576 /// [`Weak::new`]: #method.new
60c5eb7d
XL
1577 #[stable(feature = "weak_counts", since = "1.41.0")]
1578 pub fn weak_count(&self) -> usize {
dfeec247
XL
1579 self.inner()
1580 .map(|inner| {
1581 let weak = inner.weak.load(SeqCst);
1582 let strong = inner.strong.load(SeqCst);
1583 if strong == 0 {
1584 0
1585 } else {
1586 // Since we observed that there was at least one strong pointer
1587 // after reading the weak count, we know that the implicit weak
1588 // reference (present whenever any strong references are alive)
1589 // was still around when we observed the weak count, and can
1590 // therefore safely subtract it.
1591 weak - 1
1592 }
1593 })
1594 .unwrap_or(0)
9fa01778
XL
1595 }
1596
1597 /// Returns `None` when the pointer is dangling and there is no allocated `ArcInner`,
1598 /// (i.e., when this `Weak` was created by `Weak::new`).
1a4d82fc 1599 #[inline]
8faf50e0 1600 fn inner(&self) -> Option<&ArcInner<T>> {
dfeec247 1601 if is_dangling(self.ptr) { None } else { Some(unsafe { self.ptr.as_ref() }) }
1a4d82fc 1602 }
0731742a 1603
e74abb32
XL
1604 /// Returns `true` if the two `Weak`s point to the same allocation (similar to
1605 /// [`ptr::eq`]), or if both don't point to any allocation
e1599b0c 1606 /// (because they were created with `Weak::new()`).
0731742a
XL
1607 ///
1608 /// # Notes
1609 ///
1610 /// Since this compares pointers it means that `Weak::new()` will equal each
e74abb32 1611 /// other, even though they don't point to any allocation.
0731742a 1612 ///
0731742a
XL
1613 /// # Examples
1614 ///
1615 /// ```
dc9dc135 1616 /// use std::sync::Arc;
0731742a
XL
1617 ///
1618 /// let first_rc = Arc::new(5);
1619 /// let first = Arc::downgrade(&first_rc);
1620 /// let second = Arc::downgrade(&first_rc);
1621 ///
dc9dc135 1622 /// assert!(first.ptr_eq(&second));
0731742a
XL
1623 ///
1624 /// let third_rc = Arc::new(5);
1625 /// let third = Arc::downgrade(&third_rc);
1626 ///
dc9dc135 1627 /// assert!(!first.ptr_eq(&third));
0731742a
XL
1628 /// ```
1629 ///
1630 /// Comparing `Weak::new`.
1631 ///
1632 /// ```
0731742a
XL
1633 /// use std::sync::{Arc, Weak};
1634 ///
1635 /// let first = Weak::new();
1636 /// let second = Weak::new();
dc9dc135 1637 /// assert!(first.ptr_eq(&second));
0731742a
XL
1638 ///
1639 /// let third_rc = Arc::new(());
1640 /// let third = Arc::downgrade(&third_rc);
dc9dc135 1641 /// assert!(!first.ptr_eq(&third));
0731742a 1642 /// ```
e74abb32
XL
1643 ///
1644 /// [`ptr::eq`]: ../../std/ptr/fn.eq.html
0731742a 1645 #[inline]
e1599b0c 1646 #[stable(feature = "weak_ptr_eq", since = "1.39.0")]
dc9dc135
XL
1647 pub fn ptr_eq(&self, other: &Self) -> bool {
1648 self.ptr.as_ptr() == other.ptr.as_ptr()
0731742a 1649 }
1a4d82fc
JJ
1650}
1651
e9174d1e 1652#[stable(feature = "arc_weak", since = "1.4.0")]
62682a34 1653impl<T: ?Sized> Clone for Weak<T> {
e74abb32 1654 /// Makes a clone of the `Weak` pointer that points to the same allocation.
1a4d82fc
JJ
1655 ///
1656 /// # Examples
1657 ///
1658 /// ```
7cac9316 1659 /// use std::sync::{Arc, Weak};
1a4d82fc 1660 ///
e9174d1e 1661 /// let weak_five = Arc::downgrade(&Arc::new(5));
1a4d82fc 1662 ///
0bf4aa26 1663 /// let _ = Weak::clone(&weak_five);
1a4d82fc
JJ
1664 /// ```
1665 #[inline]
1666 fn clone(&self) -> Weak<T> {
8faf50e0
XL
1667 let inner = if let Some(inner) = self.inner() {
1668 inner
1669 } else {
1670 return Weak { ptr: self.ptr };
1671 };
c1a9b12d
SL
1672 // See comments in Arc::clone() for why this is relaxed. This can use a
1673 // fetch_add (ignoring the lock) because the weak count is only locked
1674 // where are *no other* weak pointers in existence. (So we can't be
1675 // running this code in that case).
8faf50e0 1676 let old_size = inner.weak.fetch_add(1, Relaxed);
c1a9b12d
SL
1677
1678 // See comments in Arc::clone() for why we do this (for mem::forget).
1679 if old_size > MAX_REFCOUNT {
b039eaaf
SL
1680 unsafe {
1681 abort();
1682 }
c1a9b12d
SL
1683 }
1684
e74abb32 1685 Weak { ptr: self.ptr }
1a4d82fc
JJ
1686 }
1687}
1688
a7813a04
XL
1689#[stable(feature = "downgraded_weak", since = "1.10.0")]
1690impl<T> Default for Weak<T> {
8faf50e0 1691 /// Constructs a new `Weak<T>`, without allocating memory.
0731742a 1692 /// Calling [`upgrade`] on the return value always
b7449926 1693 /// gives [`None`].
c30ab7b3 1694 ///
cc61c64b 1695 /// [`None`]: ../../std/option/enum.Option.html#variant.None
0731742a 1696 /// [`upgrade`]: ../../std/sync/struct.Weak.html#method.upgrade
c30ab7b3
SL
1697 ///
1698 /// # Examples
1699 ///
1700 /// ```
1701 /// use std::sync::Weak;
1702 ///
1703 /// let empty: Weak<i64> = Default::default();
1704 /// assert!(empty.upgrade().is_none());
1705 /// ```
a7813a04
XL
1706 fn default() -> Weak<T> {
1707 Weak::new()
1708 }
1709}
1710
7453a54e 1711#[stable(feature = "arc_weak", since = "1.4.0")]
62682a34 1712impl<T: ?Sized> Drop for Weak<T> {
c30ab7b3 1713 /// Drops the `Weak` pointer.
1a4d82fc 1714 ///
1a4d82fc
JJ
1715 /// # Examples
1716 ///
1717 /// ```
7cac9316 1718 /// use std::sync::{Arc, Weak};
1a4d82fc 1719 ///
c30ab7b3 1720 /// struct Foo;
1a4d82fc 1721 ///
c30ab7b3
SL
1722 /// impl Drop for Foo {
1723 /// fn drop(&mut self) {
1724 /// println!("dropped!");
1725 /// }
1a4d82fc 1726 /// }
1a4d82fc 1727 ///
c30ab7b3
SL
1728 /// let foo = Arc::new(Foo);
1729 /// let weak_foo = Arc::downgrade(&foo);
7cac9316 1730 /// let other_weak_foo = Weak::clone(&weak_foo);
1a4d82fc 1731 ///
c30ab7b3
SL
1732 /// drop(weak_foo); // Doesn't print anything
1733 /// drop(foo); // Prints "dropped!"
1734 ///
1735 /// assert!(other_weak_foo.upgrade().is_none());
1a4d82fc
JJ
1736 /// ```
1737 fn drop(&mut self) {
c34b1796
AL
1738 // If we find out that we were the last weak pointer, then its time to
1739 // deallocate the data entirely. See the discussion in Arc::drop() about
1740 // the memory orderings
c1a9b12d
SL
1741 //
1742 // It's not necessary to check for the locked state here, because the
1743 // weak count can only be locked if there was precisely one weak ref,
1744 // meaning that drop could only subsequently run ON that remaining weak
1745 // ref, which can only happen after the lock is released.
dfeec247 1746 let inner = if let Some(inner) = self.inner() { inner } else { return };
8faf50e0
XL
1747
1748 if inner.weak.fetch_sub(1, Release) == 1 {
ba9703b0 1749 acquire!(inner.weak);
dfeec247 1750 unsafe { Global.dealloc(self.ptr.cast(), Layout::for_value(self.ptr.as_ref())) }
1a4d82fc
JJ
1751 }
1752 }
1753}
1754
0731742a
XL
1755#[stable(feature = "rust1", since = "1.0.0")]
1756trait ArcEqIdent<T: ?Sized + PartialEq> {
1757 fn eq(&self, other: &Arc<T>) -> bool;
1758 fn ne(&self, other: &Arc<T>) -> bool;
1759}
1760
1761#[stable(feature = "rust1", since = "1.0.0")]
1762impl<T: ?Sized + PartialEq> ArcEqIdent<T> for Arc<T> {
1763 #[inline]
1764 default fn eq(&self, other: &Arc<T>) -> bool {
1765 **self == **other
1766 }
1767 #[inline]
1768 default fn ne(&self, other: &Arc<T>) -> bool {
1769 **self != **other
1770 }
1771}
1772
48663c56
XL
1773/// We're doing this specialization here, and not as a more general optimization on `&T`, because it
1774/// would otherwise add a cost to all equality checks on refs. We assume that `Arc`s are used to
1775/// store large values, that are slow to clone, but also heavy to check for equality, causing this
1776/// cost to pay off more easily. It's also more likely to have two `Arc` clones, that point to
1777/// the same value, than two `&T`s.
e74abb32
XL
1778///
1779/// We can only do this when `T: Eq` as a `PartialEq` might be deliberately irreflexive.
0731742a
XL
1780#[stable(feature = "rust1", since = "1.0.0")]
1781impl<T: ?Sized + Eq> ArcEqIdent<T> for Arc<T> {
1782 #[inline]
1783 fn eq(&self, other: &Arc<T>) -> bool {
1784 Arc::ptr_eq(self, other) || **self == **other
1785 }
1786
1787 #[inline]
1788 fn ne(&self, other: &Arc<T>) -> bool {
1789 !Arc::ptr_eq(self, other) && **self != **other
1790 }
1791}
1792
85aaf69f 1793#[stable(feature = "rust1", since = "1.0.0")]
62682a34 1794impl<T: ?Sized + PartialEq> PartialEq for Arc<T> {
c30ab7b3 1795 /// Equality for two `Arc`s.
1a4d82fc 1796 ///
e74abb32
XL
1797 /// Two `Arc`s are equal if their inner values are equal, even if they are
1798 /// stored in different allocation.
1a4d82fc 1799 ///
e74abb32
XL
1800 /// If `T` also implements `Eq` (implying reflexivity of equality),
1801 /// two `Arc`s that point to the same allocation are always equal.
0731742a 1802 ///
1a4d82fc
JJ
1803 /// # Examples
1804 ///
1805 /// ```
1806 /// use std::sync::Arc;
1807 ///
85aaf69f 1808 /// let five = Arc::new(5);
1a4d82fc 1809 ///
c30ab7b3 1810 /// assert!(five == Arc::new(5));
1a4d82fc 1811 /// ```
0731742a 1812 #[inline]
b039eaaf 1813 fn eq(&self, other: &Arc<T>) -> bool {
0731742a 1814 ArcEqIdent::eq(self, other)
b039eaaf 1815 }
1a4d82fc 1816
c30ab7b3 1817 /// Inequality for two `Arc`s.
1a4d82fc 1818 ///
c30ab7b3 1819 /// Two `Arc`s are unequal if their inner values are unequal.
1a4d82fc 1820 ///
e74abb32
XL
1821 /// If `T` also implements `Eq` (implying reflexivity of equality),
1822 /// two `Arc`s that point to the same value are never unequal.
0731742a 1823 ///
1a4d82fc
JJ
1824 /// # Examples
1825 ///
1826 /// ```
1827 /// use std::sync::Arc;
1828 ///
85aaf69f 1829 /// let five = Arc::new(5);
1a4d82fc 1830 ///
c30ab7b3 1831 /// assert!(five != Arc::new(6));
1a4d82fc 1832 /// ```
0731742a 1833 #[inline]
b039eaaf 1834 fn ne(&self, other: &Arc<T>) -> bool {
0731742a 1835 ArcEqIdent::ne(self, other)
b039eaaf 1836 }
1a4d82fc 1837}
0731742a 1838
85aaf69f 1839#[stable(feature = "rust1", since = "1.0.0")]
62682a34 1840impl<T: ?Sized + PartialOrd> PartialOrd for Arc<T> {
c30ab7b3 1841 /// Partial comparison for two `Arc`s.
1a4d82fc
JJ
1842 ///
1843 /// The two are compared by calling `partial_cmp()` on their inner values.
1844 ///
1845 /// # Examples
1846 ///
1847 /// ```
1848 /// use std::sync::Arc;
c30ab7b3 1849 /// use std::cmp::Ordering;
1a4d82fc 1850 ///
85aaf69f 1851 /// let five = Arc::new(5);
1a4d82fc 1852 ///
c30ab7b3 1853 /// assert_eq!(Some(Ordering::Less), five.partial_cmp(&Arc::new(6)));
1a4d82fc
JJ
1854 /// ```
1855 fn partial_cmp(&self, other: &Arc<T>) -> Option<Ordering> {
1856 (**self).partial_cmp(&**other)
1857 }
1858
c30ab7b3 1859 /// Less-than comparison for two `Arc`s.
1a4d82fc
JJ
1860 ///
1861 /// The two are compared by calling `<` on their inner values.
1862 ///
1863 /// # Examples
1864 ///
1865 /// ```
1866 /// use std::sync::Arc;
1867 ///
85aaf69f 1868 /// let five = Arc::new(5);
1a4d82fc 1869 ///
c30ab7b3 1870 /// assert!(five < Arc::new(6));
1a4d82fc 1871 /// ```
b039eaaf
SL
1872 fn lt(&self, other: &Arc<T>) -> bool {
1873 *(*self) < *(*other)
1874 }
1a4d82fc 1875
c30ab7b3 1876 /// 'Less than or equal to' comparison for two `Arc`s.
1a4d82fc
JJ
1877 ///
1878 /// The two are compared by calling `<=` on their inner values.
1879 ///
1880 /// # Examples
1881 ///
1882 /// ```
1883 /// use std::sync::Arc;
1884 ///
85aaf69f 1885 /// let five = Arc::new(5);
1a4d82fc 1886 ///
c30ab7b3 1887 /// assert!(five <= Arc::new(5));
1a4d82fc 1888 /// ```
b039eaaf
SL
1889 fn le(&self, other: &Arc<T>) -> bool {
1890 *(*self) <= *(*other)
1891 }
1a4d82fc 1892
c30ab7b3 1893 /// Greater-than comparison for two `Arc`s.
1a4d82fc
JJ
1894 ///
1895 /// The two are compared by calling `>` on their inner values.
1896 ///
1897 /// # Examples
1898 ///
1899 /// ```
1900 /// use std::sync::Arc;
1901 ///
85aaf69f 1902 /// let five = Arc::new(5);
1a4d82fc 1903 ///
c30ab7b3 1904 /// assert!(five > Arc::new(4));
1a4d82fc 1905 /// ```
b039eaaf
SL
1906 fn gt(&self, other: &Arc<T>) -> bool {
1907 *(*self) > *(*other)
1908 }
1a4d82fc 1909
c30ab7b3 1910 /// 'Greater than or equal to' comparison for two `Arc`s.
1a4d82fc
JJ
1911 ///
1912 /// The two are compared by calling `>=` on their inner values.
1913 ///
1914 /// # Examples
1915 ///
1916 /// ```
1917 /// use std::sync::Arc;
1918 ///
85aaf69f 1919 /// let five = Arc::new(5);
1a4d82fc 1920 ///
c30ab7b3 1921 /// assert!(five >= Arc::new(5));
1a4d82fc 1922 /// ```
b039eaaf
SL
1923 fn ge(&self, other: &Arc<T>) -> bool {
1924 *(*self) >= *(*other)
1925 }
1a4d82fc 1926}
85aaf69f 1927#[stable(feature = "rust1", since = "1.0.0")]
62682a34 1928impl<T: ?Sized + Ord> Ord for Arc<T> {
c30ab7b3
SL
1929 /// Comparison for two `Arc`s.
1930 ///
1931 /// The two are compared by calling `cmp()` on their inner values.
1932 ///
1933 /// # Examples
1934 ///
1935 /// ```
1936 /// use std::sync::Arc;
1937 /// use std::cmp::Ordering;
1938 ///
1939 /// let five = Arc::new(5);
1940 ///
1941 /// assert_eq!(Ordering::Less, five.cmp(&Arc::new(6)));
1942 /// ```
b039eaaf
SL
1943 fn cmp(&self, other: &Arc<T>) -> Ordering {
1944 (**self).cmp(&**other)
1945 }
1a4d82fc 1946}
85aaf69f 1947#[stable(feature = "rust1", since = "1.0.0")]
62682a34 1948impl<T: ?Sized + Eq> Eq for Arc<T> {}
1a4d82fc 1949
85aaf69f 1950#[stable(feature = "rust1", since = "1.0.0")]
62682a34 1951impl<T: ?Sized + fmt::Display> fmt::Display for Arc<T> {
9fa01778 1952 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
85aaf69f 1953 fmt::Display::fmt(&**self, f)
1a4d82fc
JJ
1954 }
1955}
1956
85aaf69f 1957#[stable(feature = "rust1", since = "1.0.0")]
62682a34 1958impl<T: ?Sized + fmt::Debug> fmt::Debug for Arc<T> {
9fa01778 1959 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
85aaf69f 1960 fmt::Debug::fmt(&**self, f)
1a4d82fc
JJ
1961 }
1962}
1963
9346a6ac 1964#[stable(feature = "rust1", since = "1.0.0")]
7453a54e 1965impl<T: ?Sized> fmt::Pointer for Arc<T> {
9fa01778 1966 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
ff7c6d11 1967 fmt::Pointer::fmt(&(&**self as *const T), f)
9346a6ac
AL
1968 }
1969}
1970
85aaf69f 1971#[stable(feature = "rust1", since = "1.0.0")]
d9579d0f 1972impl<T: Default> Default for Arc<T> {
c30ab7b3
SL
1973 /// Creates a new `Arc<T>`, with the `Default` value for `T`.
1974 ///
1975 /// # Examples
1976 ///
1977 /// ```
1978 /// use std::sync::Arc;
1979 ///
1980 /// let x: Arc<i32> = Default::default();
1981 /// assert_eq!(*x, 0);
1982 /// ```
b039eaaf
SL
1983 fn default() -> Arc<T> {
1984 Arc::new(Default::default())
1985 }
1a4d82fc
JJ
1986}
1987
85aaf69f 1988#[stable(feature = "rust1", since = "1.0.0")]
62682a34 1989impl<T: ?Sized + Hash> Hash for Arc<T> {
85aaf69f
SL
1990 fn hash<H: Hasher>(&self, state: &mut H) {
1991 (**self).hash(state)
1992 }
1993}
1a4d82fc 1994
92a42be0
SL
1995#[stable(feature = "from_for_ptrs", since = "1.6.0")]
1996impl<T> From<T> for Arc<T> {
1997 fn from(t: T) -> Self {
1998 Arc::new(t)
1999 }
2000}
2001
3b2f2976 2002#[stable(feature = "shared_from_slice", since = "1.21.0")]
9fa01778 2003impl<T: Clone> From<&[T]> for Arc<[T]> {
3b2f2976
XL
2004 #[inline]
2005 fn from(v: &[T]) -> Arc<[T]> {
2006 <Self as ArcFromSlice<T>>::from_slice(v)
2007 }
2008}
2009
2010#[stable(feature = "shared_from_slice", since = "1.21.0")]
9fa01778 2011impl From<&str> for Arc<str> {
3b2f2976
XL
2012 #[inline]
2013 fn from(v: &str) -> Arc<str> {
ff7c6d11
XL
2014 let arc = Arc::<[u8]>::from(v.as_bytes());
2015 unsafe { Arc::from_raw(Arc::into_raw(arc) as *const str) }
3b2f2976
XL
2016 }
2017}
2018
2019#[stable(feature = "shared_from_slice", since = "1.21.0")]
2020impl From<String> for Arc<str> {
2021 #[inline]
2022 fn from(v: String) -> Arc<str> {
2023 Arc::from(&v[..])
2024 }
2025}
2026
2027#[stable(feature = "shared_from_slice", since = "1.21.0")]
2028impl<T: ?Sized> From<Box<T>> for Arc<T> {
2029 #[inline]
2030 fn from(v: Box<T>) -> Arc<T> {
2031 Arc::from_box(v)
2032 }
2033}
2034
2035#[stable(feature = "shared_from_slice", since = "1.21.0")]
2036impl<T> From<Vec<T>> for Arc<[T]> {
2037 #[inline]
2038 fn from(mut v: Vec<T>) -> Arc<[T]> {
2039 unsafe {
2040 let arc = Arc::copy_from_slice(&v);
2041
2042 // Allow the Vec to free its memory, but not destroy its contents
2043 v.set_len(0);
2044
2045 arc
2046 }
2047 }
2048}
2049
74b04a01 2050#[stable(feature = "boxed_slice_try_from", since = "1.43.0")]
416331ca
XL
2051impl<T, const N: usize> TryFrom<Arc<[T]>> for Arc<[T; N]>
2052where
2053 [T; N]: LengthAtMost32,
2054{
2055 type Error = Arc<[T]>;
1a4d82fc 2056
416331ca
XL
2057 fn try_from(boxed_slice: Arc<[T]>) -> Result<Self, Self::Error> {
2058 if boxed_slice.len() == N {
2059 Ok(unsafe { Arc::from_raw(Arc::into_raw(boxed_slice) as *mut [T; N]) })
2060 } else {
2061 Err(boxed_slice)
3b2f2976 2062 }
3b2f2976 2063 }
416331ca 2064}
3b2f2976 2065
416331ca
XL
2066#[stable(feature = "shared_from_iter", since = "1.37.0")]
2067impl<T> iter::FromIterator<T> for Arc<[T]> {
2068 /// Takes each element in the `Iterator` and collects it into an `Arc<[T]>`.
2069 ///
2070 /// # Performance characteristics
2071 ///
2072 /// ## The general case
2073 ///
2074 /// In the general case, collecting into `Arc<[T]>` is done by first
2075 /// collecting into a `Vec<T>`. That is, when writing the following:
2076 ///
2077 /// ```rust
2078 /// # use std::sync::Arc;
2079 /// let evens: Arc<[u8]> = (0..10).filter(|&x| x % 2 == 0).collect();
2080 /// # assert_eq!(&*evens, &[0, 2, 4, 6, 8]);
2081 /// ```
2082 ///
2083 /// this behaves as if we wrote:
2084 ///
2085 /// ```rust
2086 /// # use std::sync::Arc;
2087 /// let evens: Arc<[u8]> = (0..10).filter(|&x| x % 2 == 0)
2088 /// .collect::<Vec<_>>() // The first set of allocations happens here.
2089 /// .into(); // A second allocation for `Arc<[T]>` happens here.
2090 /// # assert_eq!(&*evens, &[0, 2, 4, 6, 8]);
2091 /// ```
2092 ///
2093 /// This will allocate as many times as needed for constructing the `Vec<T>`
2094 /// and then it will allocate once for turning the `Vec<T>` into the `Arc<[T]>`.
2095 ///
2096 /// ## Iterators of known length
2097 ///
2098 /// When your `Iterator` implements `TrustedLen` and is of an exact size,
2099 /// a single allocation will be made for the `Arc<[T]>`. For example:
2100 ///
2101 /// ```rust
2102 /// # use std::sync::Arc;
2103 /// let evens: Arc<[u8]> = (0..10).collect(); // Just a single allocation happens here.
2104 /// # assert_eq!(&*evens, &*(0..10).collect::<Vec<_>>());
2105 /// ```
2106 fn from_iter<I: iter::IntoIterator<Item = T>>(iter: I) -> Self {
2107 ArcFromIter::from_iter(iter.into_iter())
3b2f2976 2108 }
416331ca 2109}
3b2f2976 2110
416331ca
XL
2111/// Specialization trait used for collecting into `Arc<[T]>`.
2112trait ArcFromIter<T, I> {
2113 fn from_iter(iter: I) -> Self;
2114}
3b2f2976 2115
416331ca
XL
2116impl<T, I: Iterator<Item = T>> ArcFromIter<T, I> for Arc<[T]> {
2117 default fn from_iter(iter: I) -> Self {
2118 iter.collect::<Vec<T>>().into()
3b2f2976 2119 }
416331ca 2120}
3b2f2976 2121
416331ca
XL
2122impl<T, I: iter::TrustedLen<Item = T>> ArcFromIter<T, I> for Arc<[T]> {
2123 default fn from_iter(iter: I) -> Self {
2124 // This is the case for a `TrustedLen` iterator.
2125 let (low, high) = iter.size_hint();
2126 if let Some(high) = high {
2127 debug_assert_eq!(
dfeec247
XL
2128 low,
2129 high,
416331ca
XL
2130 "TrustedLen iterator's size hint is not exact: {:?}",
2131 (low, high)
2132 );
3b2f2976 2133
416331ca
XL
2134 unsafe {
2135 // SAFETY: We need to ensure that the iterator has an exact length and we have.
2136 Arc::from_iter_exact(iter, low)
3b2f2976 2137 }
416331ca
XL
2138 } else {
2139 // Fall back to normal implementation.
2140 iter.collect::<Vec<T>>().into()
3b2f2976 2141 }
3b2f2976 2142 }
416331ca 2143}
3b2f2976 2144
416331ca
XL
2145impl<'a, T: 'a + Clone> ArcFromIter<&'a T, slice::Iter<'a, T>> for Arc<[T]> {
2146 fn from_iter(iter: slice::Iter<'a, T>) -> Self {
2147 // Delegate to `impl<T: Clone> From<&[T]> for Arc<[T]>`.
2148 //
2149 // In the case that `T: Copy`, we get to use `ptr::copy_nonoverlapping`
2150 // which is even more performant.
2151 //
2152 // In the fall-back case we have `T: Clone`. This is still better
2153 // than the `TrustedLen` implementation as slices have a known length
2154 // and so we get to avoid calling `size_hint` and avoid the branching.
2155 iter.as_slice().into()
94b46f34 2156 }
1a4d82fc 2157}
e9174d1e 2158
92a42be0 2159#[stable(feature = "rust1", since = "1.0.0")]
e9174d1e 2160impl<T: ?Sized> borrow::Borrow<T> for Arc<T> {
b039eaaf
SL
2161 fn borrow(&self) -> &T {
2162 &**self
2163 }
2164}
2165
2166#[stable(since = "1.5.0", feature = "smart_ptr_as_ref")]
2167impl<T: ?Sized> AsRef<T> for Arc<T> {
2168 fn as_ref(&self) -> &T {
2169 &**self
2170 }
e9174d1e 2171}
b7449926 2172
0731742a 2173#[stable(feature = "pin", since = "1.33.0")]
dfeec247 2174impl<T: ?Sized> Unpin for Arc<T> {}
dc9dc135 2175
416331ca 2176/// Computes the offset of the data field within `ArcInner`.
dc9dc135 2177unsafe fn data_offset<T: ?Sized>(ptr: *const T) -> isize {
416331ca
XL
2178 // Align the unsized value to the end of the `ArcInner`.
2179 // Because it is `?Sized`, it will always be the last field in memory.
dfeec247
XL
2180 // Note: This is a detail of the current implementation of the compiler,
2181 // and is not a guaranteed language detail. Do not rely on it outside of std.
416331ca 2182 data_offset_align(align_of_val(&*ptr))
dc9dc135
XL
2183}
2184
416331ca 2185/// Computes the offset of the data field within `ArcInner`.
dc9dc135
XL
2186///
2187/// Unlike [`data_offset`], this doesn't need the pointer, but it works only on `T: Sized`.
2188fn data_offset_sized<T>() -> isize {
416331ca
XL
2189 data_offset_align(align_of::<T>())
2190}
2191
2192#[inline]
2193fn data_offset_align(align: usize) -> isize {
dc9dc135
XL
2194 let layout = Layout::new::<ArcInner<()>>();
2195 (layout.size() + layout.padding_needed_for(align)) as isize
2196}