]> git.proxmox.com Git - rustc.git/blame - src/liballoc/arc.rs
New upstream version 1.28.0~beta.14+dfsg1
[rustc.git] / src / liballoc / arc.rs
CommitLineData
1a4d82fc
JJ
1// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
2// file at the top-level directory of this distribution and at
3// http://rust-lang.org/COPYRIGHT.
4//
5// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8// option. This file may not be copied, modified, or distributed
9// except according to those terms.
10
85aaf69f 11#![stable(feature = "rust1", since = "1.0.0")]
1a4d82fc 12
c30ab7b3 13//! Thread-safe reference-counting pointers.
1a4d82fc 14//!
c30ab7b3 15//! See the [`Arc<T>`][arc] documentation for more details.
1a4d82fc 16//!
c30ab7b3 17//! [arc]: struct.Arc.html
1a4d82fc 18
94b46f34 19use core::any::Any;
e9174d1e 20use core::sync::atomic;
3157f602 21use core::sync::atomic::Ordering::{Acquire, Relaxed, Release, SeqCst};
e9174d1e 22use core::borrow;
85aaf69f 23use core::fmt;
c34b1796 24use core::cmp::Ordering;
92a42be0 25use core::intrinsics::abort;
ea8adc8c 26use core::mem::{self, align_of_val, size_of_val, uninitialized};
92a42be0 27use core::ops::Deref;
92a42be0 28use core::ops::CoerceUnsized;
2c00a5a8 29use core::ptr::{self, NonNull};
ff7c6d11 30use core::marker::{Unsize, PhantomData};
1a4d82fc 31use core::hash::{Hash, Hasher};
3157f602 32use core::{isize, usize};
92a42be0 33use core::convert::From;
041b39d2 34
94b46f34 35use alloc::{Global, Alloc, Layout, box_free, handle_alloc_error};
3b2f2976
XL
36use boxed::Box;
37use string::String;
38use vec::Vec;
1a4d82fc 39
c30ab7b3
SL
40/// A soft limit on the amount of references that may be made to an `Arc`.
41///
42/// Going above this limit will abort your program (although not
43/// necessarily) at _exactly_ `MAX_REFCOUNT + 1` references.
c1a9b12d
SL
44const MAX_REFCOUNT: usize = (isize::MAX) as usize;
45
041b39d2
XL
46/// A thread-safe reference-counting pointer. 'Arc' stands for 'Atomically
47/// Reference Counted'.
1a4d82fc 48///
c30ab7b3
SL
49/// The type `Arc<T>` provides shared ownership of a value of type `T`,
50/// allocated in the heap. Invoking [`clone`][clone] on `Arc` produces
51/// a new pointer to the same value in the heap. When the last `Arc`
52/// pointer to a given value is destroyed, the pointed-to value is
53/// also destroyed.
1a4d82fc 54///
c30ab7b3 55/// Shared references in Rust disallow mutation by default, and `Arc` is no
ea8adc8c
XL
56/// exception: you cannot generally obtain a mutable reference to something
57/// inside an `Arc`. If you need to mutate through an `Arc`, use
58/// [`Mutex`][mutex], [`RwLock`][rwlock], or one of the [`Atomic`][atomic]
59/// types.
9e0c209e 60///
7cac9316
XL
61/// ## Thread Safety
62///
63/// Unlike [`Rc<T>`], `Arc<T>` uses atomic operations for its reference
83c7162d 64/// counting. This means that it is thread-safe. The disadvantage is that
7cac9316
XL
65/// atomic operations are more expensive than ordinary memory accesses. If you
66/// are not sharing reference-counted values between threads, consider using
67/// [`Rc<T>`] for lower overhead. [`Rc<T>`] is a safe default, because the
68/// compiler will catch any attempt to send an [`Rc<T>`] between threads.
69/// However, a library might choose `Arc<T>` in order to give library consumers
c30ab7b3 70/// more flexibility.
1a4d82fc 71///
7cac9316
XL
72/// `Arc<T>` will implement [`Send`] and [`Sync`] as long as the `T` implements
73/// [`Send`] and [`Sync`]. Why can't you put a non-thread-safe type `T` in an
74/// `Arc<T>` to make it thread-safe? This may be a bit counter-intuitive at
75/// first: after all, isn't the point of `Arc<T>` thread safety? The key is
76/// this: `Arc<T>` makes it thread safe to have multiple ownership of the same
77/// data, but it doesn't add thread safety to its data. Consider
ea8adc8c
XL
78/// `Arc<`[`RefCell<T>`]`>`. [`RefCell<T>`] isn't [`Sync`], and if `Arc<T>` was always
79/// [`Send`], `Arc<`[`RefCell<T>`]`>` would be as well. But then we'd have a problem:
80/// [`RefCell<T>`] is not thread safe; it keeps track of the borrowing count using
7cac9316
XL
81/// non-atomic operations.
82///
83/// In the end, this means that you may need to pair `Arc<T>` with some sort of
ea8adc8c 84/// [`std::sync`] type, usually [`Mutex<T>`][mutex].
7cac9316
XL
85///
86/// ## Breaking cycles with `Weak`
87///
c30ab7b3 88/// The [`downgrade`][downgrade] method can be used to create a non-owning
32a655c1
SL
89/// [`Weak`][weak] pointer. A [`Weak`][weak] pointer can be [`upgrade`][upgrade]d
90/// to an `Arc`, but this will return [`None`] if the value has already been
91/// dropped.
c30ab7b3
SL
92///
93/// A cycle between `Arc` pointers will never be deallocated. For this reason,
32a655c1
SL
94/// [`Weak`][weak] is used to break cycles. For example, a tree could have
95/// strong `Arc` pointers from parent nodes to children, and [`Weak`][weak]
96/// pointers from children back to their parents.
c30ab7b3 97///
7cac9316
XL
98/// # Cloning references
99///
100/// Creating a new reference from an existing reference counted pointer is done using the
3b2f2976 101/// `Clone` trait implemented for [`Arc<T>`][arc] and [`Weak<T>`][weak].
7cac9316
XL
102///
103/// ```
104/// use std::sync::Arc;
105/// let foo = Arc::new(vec![1.0, 2.0, 3.0]);
106/// // The two syntaxes below are equivalent.
107/// let a = foo.clone();
108/// let b = Arc::clone(&foo);
109/// // a and b both point to the same memory location as foo.
110/// ```
111///
ea8adc8c 112/// The [`Arc::clone(&from)`] syntax is the most idiomatic because it conveys more explicitly
7cac9316
XL
113/// the meaning of the code. In the example above, this syntax makes it easier to see that
114/// this code is creating a new reference rather than copying the whole content of foo.
115///
116/// ## `Deref` behavior
117///
c30ab7b3
SL
118/// `Arc<T>` automatically dereferences to `T` (via the [`Deref`][deref] trait),
119/// so you can call `T`'s methods on a value of type `Arc<T>`. To avoid name
120/// clashes with `T`'s methods, the methods of `Arc<T>` itself are [associated
121/// functions][assoc], called using function-like syntax:
c34b1796
AL
122///
123/// ```
1a4d82fc 124/// use std::sync::Arc;
c30ab7b3 125/// let my_arc = Arc::new(());
1a4d82fc 126///
c30ab7b3
SL
127/// Arc::downgrade(&my_arc);
128/// ```
1a4d82fc 129///
32a655c1 130/// [`Weak<T>`][weak] does not auto-dereference to `T`, because the value may have
c30ab7b3 131/// already been destroyed.
1a4d82fc 132///
c30ab7b3
SL
133/// [arc]: struct.Arc.html
134/// [weak]: struct.Weak.html
7cac9316 135/// [`Rc<T>`]: ../../std/rc/struct.Rc.html
c30ab7b3
SL
136/// [clone]: ../../std/clone/trait.Clone.html#tymethod.clone
137/// [mutex]: ../../std/sync/struct.Mutex.html
138/// [rwlock]: ../../std/sync/struct.RwLock.html
139/// [atomic]: ../../std/sync/atomic/index.html
32a655c1 140/// [`Send`]: ../../std/marker/trait.Send.html
7cac9316 141/// [`Sync`]: ../../std/marker/trait.Sync.html
c30ab7b3
SL
142/// [deref]: ../../std/ops/trait.Deref.html
143/// [downgrade]: struct.Arc.html#method.downgrade
144/// [upgrade]: struct.Weak.html#method.upgrade
32a655c1 145/// [`None`]: ../../std/option/enum.Option.html#variant.None
cc61c64b 146/// [assoc]: ../../book/first-edition/method-syntax.html#associated-functions
ea8adc8c
XL
147/// [`RefCell<T>`]: ../../std/cell/struct.RefCell.html
148/// [`std::sync`]: ../../std/sync/index.html
149/// [`Arc::clone(&from)`]: #method.clone
1a4d82fc 150///
c30ab7b3 151/// # Examples
5bcae85e 152///
c30ab7b3
SL
153/// Sharing some immutable data between threads:
154///
155// Note that we **do not** run these tests here. The windows builders get super
156// unhappy if a thread outlives the main thread and then exits at the same time
157// (something deadlocks) so we just avoid this entirely by not running these
158// tests.
5bcae85e 159/// ```no_run
c30ab7b3 160/// use std::sync::Arc;
5bcae85e
SL
161/// use std::thread;
162///
c30ab7b3 163/// let five = Arc::new(5);
5bcae85e
SL
164///
165/// for _ in 0..10 {
7cac9316 166/// let five = Arc::clone(&five);
5bcae85e
SL
167///
168/// thread::spawn(move || {
c30ab7b3
SL
169/// println!("{:?}", five);
170/// });
171/// }
172/// ```
5bcae85e 173///
32a655c1
SL
174/// Sharing a mutable [`AtomicUsize`]:
175///
176/// [`AtomicUsize`]: ../../std/sync/atomic/struct.AtomicUsize.html
5bcae85e 177///
c30ab7b3
SL
178/// ```no_run
179/// use std::sync::Arc;
180/// use std::sync::atomic::{AtomicUsize, Ordering};
181/// use std::thread;
182///
183/// let val = Arc::new(AtomicUsize::new(5));
184///
185/// for _ in 0..10 {
7cac9316 186/// let val = Arc::clone(&val);
c30ab7b3
SL
187///
188/// thread::spawn(move || {
189/// let v = val.fetch_add(1, Ordering::SeqCst);
190/// println!("{:?}", v);
5bcae85e
SL
191/// });
192/// }
193/// ```
c30ab7b3
SL
194///
195/// See the [`rc` documentation][rc_examples] for more examples of reference
196/// counting in general.
197///
198/// [rc_examples]: ../../std/rc/index.html#examples
85aaf69f 199#[stable(feature = "rust1", since = "1.0.0")]
62682a34 200pub struct Arc<T: ?Sized> {
2c00a5a8 201 ptr: NonNull<ArcInner<T>>,
ff7c6d11 202 phantom: PhantomData<T>,
1a4d82fc
JJ
203}
204
92a42be0
SL
205#[stable(feature = "rust1", since = "1.0.0")]
206unsafe impl<T: ?Sized + Sync + Send> Send for Arc<T> {}
207#[stable(feature = "rust1", since = "1.0.0")]
208unsafe impl<T: ?Sized + Sync + Send> Sync for Arc<T> {}
1a4d82fc 209
92a42be0 210#[unstable(feature = "coerce_unsized", issue = "27732")]
62682a34 211impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<Arc<U>> for Arc<T> {}
1a4d82fc 212
cc61c64b
XL
213/// `Weak` is a version of [`Arc`] that holds a non-owning reference to the
214/// managed value. The value is accessed by calling [`upgrade`] on the `Weak`
215/// pointer, which returns an [`Option`]`<`[`Arc`]`<T>>`.
1a4d82fc 216///
cc61c64b
XL
217/// Since a `Weak` reference does not count towards ownership, it will not
218/// prevent the inner value from being dropped, and `Weak` itself makes no
219/// guarantees about the value still being present and may return [`None`]
220/// when [`upgrade`]d.
5bcae85e 221///
cc61c64b
XL
222/// A `Weak` pointer is useful for keeping a temporary reference to the value
223/// within [`Arc`] without extending its lifetime. It is also used to prevent
224/// circular references between [`Arc`] pointers, since mutual owning references
225/// would never allow either [`Arc`] to be dropped. For example, a tree could
226/// have strong [`Arc`] pointers from parent nodes to children, and `Weak`
227/// pointers from children back to their parents.
5bcae85e 228///
cc61c64b 229/// The typical way to obtain a `Weak` pointer is to call [`Arc::downgrade`].
c30ab7b3 230///
cc61c64b
XL
231/// [`Arc`]: struct.Arc.html
232/// [`Arc::downgrade`]: struct.Arc.html#method.downgrade
233/// [`upgrade`]: struct.Weak.html#method.upgrade
234/// [`Option`]: ../../std/option/enum.Option.html
235/// [`None`]: ../../std/option/enum.Option.html#variant.None
e9174d1e 236#[stable(feature = "arc_weak", since = "1.4.0")]
62682a34 237pub struct Weak<T: ?Sized> {
2c00a5a8 238 ptr: NonNull<ArcInner<T>>,
1a4d82fc
JJ
239}
240
7453a54e 241#[stable(feature = "arc_weak", since = "1.4.0")]
92a42be0 242unsafe impl<T: ?Sized + Sync + Send> Send for Weak<T> {}
7453a54e 243#[stable(feature = "arc_weak", since = "1.4.0")]
92a42be0 244unsafe impl<T: ?Sized + Sync + Send> Sync for Weak<T> {}
1a4d82fc 245
92a42be0 246#[unstable(feature = "coerce_unsized", issue = "27732")]
c1a9b12d
SL
247impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<Weak<U>> for Weak<T> {}
248
7453a54e 249#[stable(feature = "arc_weak", since = "1.4.0")]
62682a34 250impl<T: ?Sized + fmt::Debug> fmt::Debug for Weak<T> {
c34b1796
AL
251 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
252 write!(f, "(Weak)")
253 }
254}
255
62682a34 256struct ArcInner<T: ?Sized> {
85aaf69f 257 strong: atomic::AtomicUsize,
c1a9b12d
SL
258
259 // the value usize::MAX acts as a sentinel for temporarily "locking" the
260 // ability to upgrade weak pointers or downgrade strong ones; this is used
e9174d1e 261 // to avoid races in `make_mut` and `get_mut`.
85aaf69f 262 weak: atomic::AtomicUsize,
c1a9b12d 263
1a4d82fc
JJ
264 data: T,
265}
266
62682a34
SL
267unsafe impl<T: ?Sized + Sync + Send> Send for ArcInner<T> {}
268unsafe impl<T: ?Sized + Sync + Send> Sync for ArcInner<T> {}
1a4d82fc
JJ
269
270impl<T> Arc<T> {
271 /// Constructs a new `Arc<T>`.
272 ///
273 /// # Examples
274 ///
275 /// ```
276 /// use std::sync::Arc;
277 ///
85aaf69f 278 /// let five = Arc::new(5);
1a4d82fc
JJ
279 /// ```
280 #[inline]
85aaf69f 281 #[stable(feature = "rust1", since = "1.0.0")]
1a4d82fc
JJ
282 pub fn new(data: T) -> Arc<T> {
283 // Start the weak pointer count as 1 which is the weak pointer that's
284 // held by all the strong pointers (kinda), see std/rc.rs for more info
c34b1796 285 let x: Box<_> = box ArcInner {
85aaf69f
SL
286 strong: atomic::AtomicUsize::new(1),
287 weak: atomic::AtomicUsize::new(1),
3b2f2976 288 data,
1a4d82fc 289 };
2c00a5a8 290 Arc { ptr: Box::into_raw_non_null(x), phantom: PhantomData }
e9174d1e
SL
291 }
292
c30ab7b3 293 /// Returns the contained value, if the `Arc` has exactly one strong reference.
e9174d1e 294 ///
c30ab7b3
SL
295 /// Otherwise, an [`Err`][result] is returned with the same `Arc` that was
296 /// passed in.
e9174d1e 297 ///
54a0048b
SL
298 /// This will succeed even if there are outstanding weak references.
299 ///
c30ab7b3
SL
300 /// [result]: ../../std/result/enum.Result.html
301 ///
e9174d1e
SL
302 /// # Examples
303 ///
304 /// ```
305 /// use std::sync::Arc;
306 ///
307 /// let x = Arc::new(3);
308 /// assert_eq!(Arc::try_unwrap(x), Ok(3));
309 ///
310 /// let x = Arc::new(4);
7cac9316 311 /// let _y = Arc::clone(&x);
c30ab7b3 312 /// assert_eq!(*Arc::try_unwrap(x).unwrap_err(), 4);
e9174d1e
SL
313 /// ```
314 #[inline]
315 #[stable(feature = "arc_unique", since = "1.4.0")]
316 pub fn try_unwrap(this: Self) -> Result<T, Self> {
317 // See `drop` for why all these atomics are like this
54a0048b 318 if this.inner().strong.compare_exchange(1, 0, Release, Relaxed).is_err() {
92a42be0 319 return Err(this);
b039eaaf 320 }
e9174d1e
SL
321
322 atomic::fence(Acquire);
323
324 unsafe {
7cac9316 325 let elem = ptr::read(&this.ptr.as_ref().data);
e9174d1e
SL
326
327 // Make a weak pointer to clean up the implicit strong-weak reference
54a0048b 328 let _weak = Weak { ptr: this.ptr };
e9174d1e
SL
329 mem::forget(this);
330
331 Ok(elem)
332 }
1a4d82fc 333 }
ea8adc8c 334}
476ff2be 335
ea8adc8c 336impl<T: ?Sized> Arc<T> {
476ff2be
SL
337 /// Consumes the `Arc`, returning the wrapped pointer.
338 ///
339 /// To avoid a memory leak the pointer must be converted back to an `Arc` using
340 /// [`Arc::from_raw`][from_raw].
341 ///
342 /// [from_raw]: struct.Arc.html#method.from_raw
343 ///
344 /// # Examples
345 ///
346 /// ```
476ff2be
SL
347 /// use std::sync::Arc;
348 ///
349 /// let x = Arc::new(10);
350 /// let x_ptr = Arc::into_raw(x);
351 /// assert_eq!(unsafe { *x_ptr }, 10);
352 /// ```
8bb4bdeb
XL
353 #[stable(feature = "rc_raw", since = "1.17.0")]
354 pub fn into_raw(this: Self) -> *const T {
7cac9316 355 let ptr: *const T = &*this;
476ff2be
SL
356 mem::forget(this);
357 ptr
358 }
359
360 /// Constructs an `Arc` from a raw pointer.
361 ///
362 /// The raw pointer must have been previously returned by a call to a
363 /// [`Arc::into_raw`][into_raw].
364 ///
365 /// This function is unsafe because improper use may lead to memory problems. For example, a
366 /// double-free may occur if the function is called twice on the same raw pointer.
367 ///
368 /// [into_raw]: struct.Arc.html#method.into_raw
369 ///
370 /// # Examples
371 ///
372 /// ```
476ff2be
SL
373 /// use std::sync::Arc;
374 ///
375 /// let x = Arc::new(10);
376 /// let x_ptr = Arc::into_raw(x);
377 ///
378 /// unsafe {
379 /// // Convert back to an `Arc` to prevent leak.
380 /// let x = Arc::from_raw(x_ptr);
381 /// assert_eq!(*x, 10);
382 ///
383 /// // Further calls to `Arc::from_raw(x_ptr)` would be memory unsafe.
384 /// }
385 ///
386 /// // The memory was freed when `x` went out of scope above, so `x_ptr` is now dangling!
387 /// ```
8bb4bdeb
XL
388 #[stable(feature = "rc_raw", since = "1.17.0")]
389 pub unsafe fn from_raw(ptr: *const T) -> Self {
ea8adc8c
XL
390 // Align the unsized value to the end of the ArcInner.
391 // Because it is ?Sized, it will always be the last field in memory.
392 let align = align_of_val(&*ptr);
393 let layout = Layout::new::<ArcInner<()>>();
394 let offset = (layout.size() + layout.padding_needed_for(align)) as isize;
395
396 // Reverse the offset to find the original ArcInner.
397 let fake_ptr = ptr as *mut ArcInner<T>;
398 let arc_ptr = set_data_ptr(fake_ptr, (ptr as *mut u8).offset(-offset));
399
8bb4bdeb 400 Arc {
2c00a5a8 401 ptr: NonNull::new_unchecked(arc_ptr),
ff7c6d11 402 phantom: PhantomData,
8bb4bdeb 403 }
476ff2be 404 }
1a4d82fc 405
c30ab7b3
SL
406 /// Creates a new [`Weak`][weak] pointer to this value.
407 ///
408 /// [weak]: struct.Weak.html
1a4d82fc
JJ
409 ///
410 /// # Examples
411 ///
412 /// ```
413 /// use std::sync::Arc;
414 ///
85aaf69f 415 /// let five = Arc::new(5);
1a4d82fc 416 ///
e9174d1e 417 /// let weak_five = Arc::downgrade(&five);
1a4d82fc 418 /// ```
e9174d1e
SL
419 #[stable(feature = "arc_weak", since = "1.4.0")]
420 pub fn downgrade(this: &Self) -> Weak<T> {
54a0048b
SL
421 // This Relaxed is OK because we're checking the value in the CAS
422 // below.
423 let mut cur = this.inner().weak.load(Relaxed);
c1a9b12d 424
54a0048b 425 loop {
c1a9b12d 426 // check if the weak counter is currently "locked"; if so, spin.
b039eaaf 427 if cur == usize::MAX {
54a0048b 428 cur = this.inner().weak.load(Relaxed);
92a42be0 429 continue;
b039eaaf 430 }
c1a9b12d
SL
431
432 // NOTE: this code currently ignores the possibility of overflow
433 // into usize::MAX; in general both Rc and Arc need to be adjusted
434 // to deal with overflow.
435
436 // Unlike with Clone(), we need this to be an Acquire read to
437 // synchronize with the write coming from `is_unique`, so that the
438 // events prior to that write happen before this read.
54a0048b
SL
439 match this.inner().weak.compare_exchange_weak(cur, cur + 1, Acquire, Relaxed) {
440 Ok(_) => return Weak { ptr: this.ptr },
441 Err(old) => cur = old,
c1a9b12d
SL
442 }
443 }
1a4d82fc 444 }
1a4d82fc 445
c30ab7b3
SL
446 /// Gets the number of [`Weak`][weak] pointers to this value.
447 ///
c30ab7b3
SL
448 /// [weak]: struct.Weak.html
449 ///
476ff2be
SL
450 /// # Safety
451 ///
452 /// This method by itself is safe, but using it correctly requires extra care.
453 /// Another thread can change the weak count at any time,
454 /// including potentially between calling this method and acting on the result.
455 ///
c30ab7b3
SL
456 /// # Examples
457 ///
458 /// ```
c30ab7b3
SL
459 /// use std::sync::Arc;
460 ///
461 /// let five = Arc::new(5);
462 /// let _weak_five = Arc::downgrade(&five);
463 ///
464 /// // This assertion is deterministic because we haven't shared
465 /// // the `Arc` or `Weak` between threads.
466 /// assert_eq!(1, Arc::weak_count(&five));
467 /// ```
62682a34 468 #[inline]
476ff2be 469 #[stable(feature = "arc_counts", since = "1.15.0")]
e9174d1e 470 pub fn weak_count(this: &Self) -> usize {
3b2f2976
XL
471 let cnt = this.inner().weak.load(SeqCst);
472 // If the weak count is currently locked, the value of the
473 // count was 0 just before taking the lock.
474 if cnt == usize::MAX { 0 } else { cnt - 1 }
62682a34
SL
475 }
476
c30ab7b3
SL
477 /// Gets the number of strong (`Arc`) pointers to this value.
478 ///
476ff2be
SL
479 /// # Safety
480 ///
481 /// This method by itself is safe, but using it correctly requires extra care.
482 /// Another thread can change the strong count at any time,
483 /// including potentially between calling this method and acting on the result.
c30ab7b3
SL
484 ///
485 /// # Examples
486 ///
487 /// ```
c30ab7b3
SL
488 /// use std::sync::Arc;
489 ///
490 /// let five = Arc::new(5);
7cac9316 491 /// let _also_five = Arc::clone(&five);
c30ab7b3
SL
492 ///
493 /// // This assertion is deterministic because we haven't shared
494 /// // the `Arc` between threads.
495 /// assert_eq!(2, Arc::strong_count(&five));
496 /// ```
62682a34 497 #[inline]
476ff2be 498 #[stable(feature = "arc_counts", since = "1.15.0")]
e9174d1e 499 pub fn strong_count(this: &Self) -> usize {
62682a34
SL
500 this.inner().strong.load(SeqCst)
501 }
502
1a4d82fc
JJ
503 #[inline]
504 fn inner(&self) -> &ArcInner<T> {
c34b1796
AL
505 // This unsafety is ok because while this arc is alive we're guaranteed
506 // that the inner pointer is valid. Furthermore, we know that the
507 // `ArcInner` structure itself is `Sync` because the inner data is
508 // `Sync` as well, so we're ok loaning out an immutable pointer to these
509 // contents.
7cac9316 510 unsafe { self.ptr.as_ref() }
1a4d82fc 511 }
c34b1796
AL
512
513 // Non-inlined part of `drop`.
514 #[inline(never)]
515 unsafe fn drop_slow(&mut self) {
c34b1796
AL
516 // Destroy the data at this time, even though we may not free the box
517 // allocation itself (there may still be weak pointers lying around).
7cac9316 518 ptr::drop_in_place(&mut self.ptr.as_mut().data);
c34b1796
AL
519
520 if self.inner().weak.fetch_sub(1, Release) == 1 {
521 atomic::fence(Acquire);
94b46f34 522 Global.dealloc(self.ptr.cast(), Layout::for_value(self.ptr.as_ref()))
c34b1796
AL
523 }
524 }
9e0c209e
SL
525
526 #[inline]
8bb4bdeb 527 #[stable(feature = "ptr_eq", since = "1.17.0")]
c30ab7b3
SL
528 /// Returns true if the two `Arc`s point to the same value (not
529 /// just values that compare as equal).
9e0c209e
SL
530 ///
531 /// # Examples
532 ///
533 /// ```
9e0c209e
SL
534 /// use std::sync::Arc;
535 ///
536 /// let five = Arc::new(5);
7cac9316 537 /// let same_five = Arc::clone(&five);
9e0c209e
SL
538 /// let other_five = Arc::new(5);
539 ///
540 /// assert!(Arc::ptr_eq(&five, &same_five));
541 /// assert!(!Arc::ptr_eq(&five, &other_five));
542 /// ```
543 pub fn ptr_eq(this: &Self, other: &Self) -> bool {
7cac9316 544 this.ptr.as_ptr() == other.ptr.as_ptr()
9e0c209e 545 }
1a4d82fc
JJ
546}
547
3b2f2976
XL
548impl<T: ?Sized> Arc<T> {
549 // Allocates an `ArcInner<T>` with sufficient space for an unsized value
550 unsafe fn allocate_for_ptr(ptr: *const T) -> *mut ArcInner<T> {
551 // Create a fake ArcInner to find allocation size and alignment
552 let fake_ptr = ptr as *mut ArcInner<T>;
553
554 let layout = Layout::for_value(&*fake_ptr);
555
83c7162d 556 let mem = Global.alloc(layout)
94b46f34 557 .unwrap_or_else(|_| handle_alloc_error(layout));
3b2f2976
XL
558
559 // Initialize the real ArcInner
83c7162d 560 let inner = set_data_ptr(ptr as *mut T, mem.as_ptr() as *mut u8) as *mut ArcInner<T>;
3b2f2976
XL
561
562 ptr::write(&mut (*inner).strong, atomic::AtomicUsize::new(1));
563 ptr::write(&mut (*inner).weak, atomic::AtomicUsize::new(1));
564
565 inner
566 }
567
568 fn from_box(v: Box<T>) -> Arc<T> {
569 unsafe {
83c7162d
XL
570 let box_unique = Box::into_unique(v);
571 let bptr = box_unique.as_ptr();
3b2f2976
XL
572
573 let value_size = size_of_val(&*bptr);
574 let ptr = Self::allocate_for_ptr(bptr);
575
576 // Copy value as bytes
577 ptr::copy_nonoverlapping(
578 bptr as *const T as *const u8,
579 &mut (*ptr).data as *mut _ as *mut u8,
580 value_size);
581
582 // Free the allocation without dropping its contents
83c7162d 583 box_free(box_unique);
3b2f2976 584
2c00a5a8 585 Arc { ptr: NonNull::new_unchecked(ptr), phantom: PhantomData }
3b2f2976
XL
586 }
587 }
588}
589
590// Sets the data pointer of a `?Sized` raw pointer.
591//
592// For a slice/trait object, this sets the `data` field and leaves the rest
593// unchanged. For a sized raw pointer, this simply sets the pointer.
594unsafe fn set_data_ptr<T: ?Sized, U>(mut ptr: *mut T, data: *mut U) -> *mut T {
595 ptr::write(&mut ptr as *mut _ as *mut *mut u8, data as *mut u8);
596 ptr
597}
598
599impl<T> Arc<[T]> {
600 // Copy elements from slice into newly allocated Arc<[T]>
601 //
602 // Unsafe because the caller must either take ownership or bind `T: Copy`
603 unsafe fn copy_from_slice(v: &[T]) -> Arc<[T]> {
604 let v_ptr = v as *const [T];
605 let ptr = Self::allocate_for_ptr(v_ptr);
606
607 ptr::copy_nonoverlapping(
608 v.as_ptr(),
609 &mut (*ptr).data as *mut [T] as *mut T,
610 v.len());
611
2c00a5a8 612 Arc { ptr: NonNull::new_unchecked(ptr), phantom: PhantomData }
3b2f2976
XL
613 }
614}
615
616// Specialization trait used for From<&[T]>
617trait ArcFromSlice<T> {
618 fn from_slice(slice: &[T]) -> Self;
619}
620
621impl<T: Clone> ArcFromSlice<T> for Arc<[T]> {
622 #[inline]
623 default fn from_slice(v: &[T]) -> Self {
624 // Panic guard while cloning T elements.
625 // In the event of a panic, elements that have been written
626 // into the new ArcInner will be dropped, then the memory freed.
627 struct Guard<T> {
83c7162d 628 mem: NonNull<u8>,
3b2f2976
XL
629 elems: *mut T,
630 layout: Layout,
631 n_elems: usize,
632 }
633
634 impl<T> Drop for Guard<T> {
635 fn drop(&mut self) {
636 use core::slice::from_raw_parts_mut;
637
638 unsafe {
639 let slice = from_raw_parts_mut(self.elems, self.n_elems);
640 ptr::drop_in_place(slice);
641
94b46f34 642 Global.dealloc(self.mem.cast(), self.layout.clone());
3b2f2976
XL
643 }
644 }
645 }
646
647 unsafe {
648 let v_ptr = v as *const [T];
649 let ptr = Self::allocate_for_ptr(v_ptr);
650
651 let mem = ptr as *mut _ as *mut u8;
652 let layout = Layout::for_value(&*ptr);
653
654 // Pointer to first element
655 let elems = &mut (*ptr).data as *mut [T] as *mut T;
656
657 let mut guard = Guard{
83c7162d 658 mem: NonNull::new_unchecked(mem),
3b2f2976
XL
659 elems: elems,
660 layout: layout,
661 n_elems: 0,
662 };
663
664 for (i, item) in v.iter().enumerate() {
665 ptr::write(elems.offset(i as isize), item.clone());
666 guard.n_elems += 1;
667 }
668
669 // All clear. Forget the guard so it doesn't free the new ArcInner.
670 mem::forget(guard);
671
2c00a5a8 672 Arc { ptr: NonNull::new_unchecked(ptr), phantom: PhantomData }
3b2f2976
XL
673 }
674 }
675}
676
677impl<T: Copy> ArcFromSlice<T> for Arc<[T]> {
678 #[inline]
679 fn from_slice(v: &[T]) -> Self {
680 unsafe { Arc::copy_from_slice(v) }
681 }
682}
683
85aaf69f 684#[stable(feature = "rust1", since = "1.0.0")]
62682a34 685impl<T: ?Sized> Clone for Arc<T> {
c30ab7b3 686 /// Makes a clone of the `Arc` pointer.
1a4d82fc 687 ///
c30ab7b3
SL
688 /// This creates another pointer to the same inner value, increasing the
689 /// strong reference count.
1a4d82fc
JJ
690 ///
691 /// # Examples
692 ///
693 /// ```
694 /// use std::sync::Arc;
695 ///
85aaf69f 696 /// let five = Arc::new(5);
1a4d82fc 697 ///
7cac9316 698 /// Arc::clone(&five);
1a4d82fc
JJ
699 /// ```
700 #[inline]
701 fn clone(&self) -> Arc<T> {
c34b1796
AL
702 // Using a relaxed ordering is alright here, as knowledge of the
703 // original reference prevents other threads from erroneously deleting
704 // the object.
1a4d82fc 705 //
c34b1796
AL
706 // As explained in the [Boost documentation][1], Increasing the
707 // reference counter can always be done with memory_order_relaxed: New
708 // references to an object can only be formed from an existing
709 // reference, and passing an existing reference from one thread to
710 // another must already provide any required synchronization.
1a4d82fc
JJ
711 //
712 // [1]: (www.boost.org/doc/libs/1_55_0/doc/html/atomic/usage_examples.html)
c1a9b12d
SL
713 let old_size = self.inner().strong.fetch_add(1, Relaxed);
714
715 // However we need to guard against massive refcounts in case someone
716 // is `mem::forget`ing Arcs. If we don't do this the count can overflow
717 // and users will use-after free. We racily saturate to `isize::MAX` on
718 // the assumption that there aren't ~2 billion threads incrementing
719 // the reference count at once. This branch will never be taken in
720 // any realistic program.
721 //
722 // We abort because such a program is incredibly degenerate, and we
723 // don't care to support it.
724 if old_size > MAX_REFCOUNT {
b039eaaf
SL
725 unsafe {
726 abort();
727 }
c1a9b12d
SL
728 }
729
ff7c6d11 730 Arc { ptr: self.ptr, phantom: PhantomData }
1a4d82fc
JJ
731 }
732}
733
85aaf69f 734#[stable(feature = "rust1", since = "1.0.0")]
62682a34 735impl<T: ?Sized> Deref for Arc<T> {
1a4d82fc
JJ
736 type Target = T;
737
738 #[inline]
739 fn deref(&self) -> &T {
740 &self.inner().data
741 }
742}
743
c34b1796 744impl<T: Clone> Arc<T> {
c30ab7b3
SL
745 /// Makes a mutable reference into the given `Arc`.
746 ///
747 /// If there are other `Arc` or [`Weak`][weak] pointers to the same value,
748 /// then `make_mut` will invoke [`clone`][clone] on the inner value to
749 /// ensure unique ownership. This is also referred to as clone-on-write.
1a4d82fc 750 ///
c30ab7b3
SL
751 /// See also [`get_mut`][get_mut], which will fail rather than cloning.
752 ///
753 /// [weak]: struct.Weak.html
754 /// [clone]: ../../std/clone/trait.Clone.html#tymethod.clone
755 /// [get_mut]: struct.Arc.html#method.get_mut
62682a34 756 ///
1a4d82fc
JJ
757 /// # Examples
758 ///
759 /// ```
760 /// use std::sync::Arc;
761 ///
e9174d1e
SL
762 /// let mut data = Arc::new(5);
763 ///
764 /// *Arc::make_mut(&mut data) += 1; // Won't clone anything
7cac9316 765 /// let mut other_data = Arc::clone(&data); // Won't clone inner data
e9174d1e
SL
766 /// *Arc::make_mut(&mut data) += 1; // Clones inner data
767 /// *Arc::make_mut(&mut data) += 1; // Won't clone anything
768 /// *Arc::make_mut(&mut other_data) *= 2; // Won't clone anything
769 ///
c30ab7b3 770 /// // Now `data` and `other_data` point to different values.
e9174d1e
SL
771 /// assert_eq!(*data, 8);
772 /// assert_eq!(*other_data, 12);
1a4d82fc
JJ
773 /// ```
774 #[inline]
e9174d1e
SL
775 #[stable(feature = "arc_unique", since = "1.4.0")]
776 pub fn make_mut(this: &mut Self) -> &mut T {
c1a9b12d
SL
777 // Note that we hold both a strong reference and a weak reference.
778 // Thus, releasing our strong reference only will not, by itself, cause
779 // the memory to be deallocated.
62682a34 780 //
c1a9b12d
SL
781 // Use Acquire to ensure that we see any writes to `weak` that happen
782 // before release writes (i.e., decrements) to `strong`. Since we hold a
783 // weak count, there's no chance the ArcInner itself could be
784 // deallocated.
54a0048b 785 if this.inner().strong.compare_exchange(1, 0, Acquire, Relaxed).is_err() {
9cc50fc6 786 // Another strong pointer exists; clone
c1a9b12d
SL
787 *this = Arc::new((**this).clone());
788 } else if this.inner().weak.load(Relaxed) != 1 {
789 // Relaxed suffices in the above because this is fundamentally an
790 // optimization: we are always racing with weak pointers being
791 // dropped. Worst case, we end up allocated a new Arc unnecessarily.
792
793 // We removed the last strong ref, but there are additional weak
794 // refs remaining. We'll move the contents to a new Arc, and
795 // invalidate the other weak refs.
796
797 // Note that it is not possible for the read of `weak` to yield
798 // usize::MAX (i.e., locked), since the weak count can only be
799 // locked by a thread with a strong reference.
800
801 // Materialize our own implicit weak pointer, so that it can clean
802 // up the ArcInner as needed.
54a0048b 803 let weak = Weak { ptr: this.ptr };
c1a9b12d
SL
804
805 // mark the data itself as already deallocated
806 unsafe {
807 // there is no data race in the implicit write caused by `read`
808 // here (due to zeroing) because data is no longer accessed by
809 // other threads (due to there being no more strong refs at this
810 // point).
7cac9316 811 let mut swap = Arc::new(ptr::read(&weak.ptr.as_ref().data));
c1a9b12d
SL
812 mem::swap(this, &mut swap);
813 mem::forget(swap);
814 }
815 } else {
816 // We were the sole reference of either kind; bump back up the
817 // strong ref count.
818 this.inner().strong.store(1, Release);
1a4d82fc 819 }
c1a9b12d 820
9346a6ac 821 // As with `get_mut()`, the unsafety is ok because our reference was
c34b1796 822 // either unique to begin with, or became one upon cloning the contents.
c1a9b12d 823 unsafe {
7cac9316 824 &mut this.ptr.as_mut().data
c1a9b12d 825 }
1a4d82fc
JJ
826 }
827}
828
c1a9b12d 829impl<T: ?Sized> Arc<T> {
c30ab7b3
SL
830 /// Returns a mutable reference to the inner value, if there are
831 /// no other `Arc` or [`Weak`][weak] pointers to the same value.
832 ///
833 /// Returns [`None`][option] otherwise, because it is not safe to
834 /// mutate a shared value.
835 ///
836 /// See also [`make_mut`][make_mut], which will [`clone`][clone]
837 /// the inner value when it's shared.
838 ///
839 /// [weak]: struct.Weak.html
840 /// [option]: ../../std/option/enum.Option.html
841 /// [make_mut]: struct.Arc.html#method.make_mut
842 /// [clone]: ../../std/clone/trait.Clone.html#tymethod.clone
c1a9b12d
SL
843 ///
844 /// # Examples
845 ///
846 /// ```
e9174d1e 847 /// use std::sync::Arc;
c1a9b12d
SL
848 ///
849 /// let mut x = Arc::new(3);
850 /// *Arc::get_mut(&mut x).unwrap() = 4;
851 /// assert_eq!(*x, 4);
852 ///
7cac9316 853 /// let _y = Arc::clone(&x);
c1a9b12d 854 /// assert!(Arc::get_mut(&mut x).is_none());
c1a9b12d
SL
855 /// ```
856 #[inline]
e9174d1e
SL
857 #[stable(feature = "arc_unique", since = "1.4.0")]
858 pub fn get_mut(this: &mut Self) -> Option<&mut T> {
c1a9b12d
SL
859 if this.is_unique() {
860 // This unsafety is ok because we're guaranteed that the pointer
861 // returned is the *only* pointer that will ever be returned to T. Our
862 // reference count is guaranteed to be 1 at this point, and we required
863 // the Arc itself to be `mut`, so we're returning the only possible
864 // reference to the inner data.
865 unsafe {
7cac9316 866 Some(&mut this.ptr.as_mut().data)
c1a9b12d
SL
867 }
868 } else {
869 None
870 }
871 }
872
873 /// Determine whether this is the unique reference (including weak refs) to
874 /// the underlying data.
875 ///
876 /// Note that this requires locking the weak ref count.
877 fn is_unique(&mut self) -> bool {
878 // lock the weak pointer count if we appear to be the sole weak pointer
879 // holder.
880 //
881 // The acquire label here ensures a happens-before relationship with any
882 // writes to `strong` prior to decrements of the `weak` count (via drop,
883 // which uses Release).
54a0048b 884 if self.inner().weak.compare_exchange(1, usize::MAX, Acquire, Relaxed).is_ok() {
c1a9b12d
SL
885 // Due to the previous acquire read, this will observe any writes to
886 // `strong` that were due to upgrading weak pointers; only strong
887 // clones remain, which require that the strong count is > 1 anyway.
888 let unique = self.inner().strong.load(Relaxed) == 1;
889
890 // The release write here synchronizes with a read in `downgrade`,
891 // effectively preventing the above read of `strong` from happening
892 // after the write.
893 self.inner().weak.store(1, Release); // release the lock
894 unique
895 } else {
896 false
897 }
898 }
899}
900
85aaf69f 901#[stable(feature = "rust1", since = "1.0.0")]
32a655c1 902unsafe impl<#[may_dangle] T: ?Sized> Drop for Arc<T> {
c30ab7b3 903 /// Drops the `Arc`.
1a4d82fc 904 ///
c34b1796 905 /// This will decrement the strong reference count. If the strong reference
c30ab7b3
SL
906 /// count reaches zero then the only other references (if any) are
907 /// [`Weak`][weak], so we `drop` the inner value.
908 ///
909 /// [weak]: struct.Weak.html
1a4d82fc
JJ
910 ///
911 /// # Examples
912 ///
913 /// ```
914 /// use std::sync::Arc;
915 ///
c30ab7b3 916 /// struct Foo;
1a4d82fc 917 ///
c30ab7b3
SL
918 /// impl Drop for Foo {
919 /// fn drop(&mut self) {
920 /// println!("dropped!");
921 /// }
1a4d82fc 922 /// }
1a4d82fc 923 ///
c30ab7b3 924 /// let foo = Arc::new(Foo);
7cac9316 925 /// let foo2 = Arc::clone(&foo);
1a4d82fc 926 ///
c30ab7b3
SL
927 /// drop(foo); // Doesn't print anything
928 /// drop(foo2); // Prints "dropped!"
1a4d82fc 929 /// ```
c34b1796 930 #[inline]
1a4d82fc 931 fn drop(&mut self) {
c34b1796
AL
932 // Because `fetch_sub` is already atomic, we do not need to synchronize
933 // with other threads unless we are going to delete the object. This
934 // same logic applies to the below `fetch_sub` to the `weak` count.
b039eaaf 935 if self.inner().strong.fetch_sub(1, Release) != 1 {
92a42be0 936 return;
b039eaaf 937 }
1a4d82fc 938
c34b1796
AL
939 // This fence is needed to prevent reordering of use of the data and
940 // deletion of the data. Because it is marked `Release`, the decreasing
941 // of the reference count synchronizes with this `Acquire` fence. This
942 // means that use of the data happens before decreasing the reference
943 // count, which happens before this fence, which happens before the
944 // deletion of the data.
1a4d82fc
JJ
945 //
946 // As explained in the [Boost documentation][1],
947 //
c34b1796
AL
948 // > It is important to enforce any possible access to the object in one
949 // > thread (through an existing reference) to *happen before* deleting
950 // > the object in a different thread. This is achieved by a "release"
951 // > operation after dropping a reference (any access to the object
952 // > through this reference must obviously happened before), and an
953 // > "acquire" operation before deleting the object.
1a4d82fc 954 //
7cac9316
XL
955 // In particular, while the contents of an Arc are usually immutable, it's
956 // possible to have interior writes to something like a Mutex<T>. Since a
957 // Mutex is not acquired when it is deleted, we can't rely on its
958 // synchronization logic to make writes in thread A visible to a destructor
959 // running in thread B.
960 //
961 // Also note that the Acquire fence here could probably be replaced with an
962 // Acquire load, which could improve performance in highly-contended
963 // situations. See [2].
964 //
1a4d82fc 965 // [1]: (www.boost.org/doc/libs/1_55_0/doc/html/atomic/usage_examples.html)
7cac9316 966 // [2]: (https://github.com/rust-lang/rust/pull/41714)
1a4d82fc
JJ
967 atomic::fence(Acquire);
968
c34b1796 969 unsafe {
b039eaaf 970 self.drop_slow();
1a4d82fc
JJ
971 }
972 }
973}
974
94b46f34
XL
975impl Arc<Any + Send + Sync> {
976 #[inline]
977 #[unstable(feature = "rc_downcast", issue = "44608")]
978 /// Attempt to downcast the `Arc<Any + Send + Sync>` to a concrete type.
979 ///
980 /// # Examples
981 ///
982 /// ```
983 /// #![feature(rc_downcast)]
984 /// use std::any::Any;
985 /// use std::sync::Arc;
986 ///
987 /// fn print_if_string(value: Arc<Any + Send + Sync>) {
988 /// if let Ok(string) = value.downcast::<String>() {
989 /// println!("String ({}): {}", string.len(), string);
990 /// }
991 /// }
992 ///
993 /// fn main() {
994 /// let my_string = "Hello World".to_string();
995 /// print_if_string(Arc::new(my_string));
996 /// print_if_string(Arc::new(0i8));
997 /// }
998 /// ```
999 pub fn downcast<T>(self) -> Result<Arc<T>, Self>
1000 where
1001 T: Any + Send + Sync + 'static,
1002 {
1003 if (*self).is::<T>() {
1004 let ptr = self.ptr.cast::<ArcInner<T>>();
1005 mem::forget(self);
1006 Ok(Arc { ptr, phantom: PhantomData })
1007 } else {
1008 Err(self)
1009 }
1010 }
1011}
1012
a7813a04 1013impl<T> Weak<T> {
cc61c64b
XL
1014 /// Constructs a new `Weak<T>`, allocating memory for `T` without initializing
1015 /// it. Calling [`upgrade`] on the return value always gives [`None`].
c30ab7b3 1016 ///
cc61c64b
XL
1017 /// [`upgrade`]: struct.Weak.html#method.upgrade
1018 /// [`None`]: ../../std/option/enum.Option.html#variant.None
a7813a04
XL
1019 ///
1020 /// # Examples
1021 ///
1022 /// ```
1023 /// use std::sync::Weak;
1024 ///
1025 /// let empty: Weak<i64> = Weak::new();
c30ab7b3 1026 /// assert!(empty.upgrade().is_none());
a7813a04
XL
1027 /// ```
1028 #[stable(feature = "downgraded_weak", since = "1.10.0")]
1029 pub fn new() -> Weak<T> {
1030 unsafe {
3157f602 1031 Weak {
2c00a5a8 1032 ptr: Box::into_raw_non_null(box ArcInner {
3157f602
XL
1033 strong: atomic::AtomicUsize::new(0),
1034 weak: atomic::AtomicUsize::new(1),
1035 data: uninitialized(),
2c00a5a8 1036 }),
3157f602 1037 }
a7813a04
XL
1038 }
1039 }
1040}
1041
62682a34 1042impl<T: ?Sized> Weak<T> {
cc61c64b
XL
1043 /// Attempts to upgrade the `Weak` pointer to an [`Arc`], extending
1044 /// the lifetime of the value if successful.
1a4d82fc 1045 ///
cc61c64b 1046 /// Returns [`None`] if the value has since been dropped.
1a4d82fc 1047 ///
cc61c64b
XL
1048 /// [`Arc`]: struct.Arc.html
1049 /// [`None`]: ../../std/option/enum.Option.html#variant.None
1a4d82fc
JJ
1050 ///
1051 /// # Examples
1052 ///
1053 /// ```
1054 /// use std::sync::Arc;
1055 ///
85aaf69f 1056 /// let five = Arc::new(5);
1a4d82fc 1057 ///
e9174d1e 1058 /// let weak_five = Arc::downgrade(&five);
1a4d82fc
JJ
1059 ///
1060 /// let strong_five: Option<Arc<_>> = weak_five.upgrade();
c30ab7b3
SL
1061 /// assert!(strong_five.is_some());
1062 ///
1063 /// // Destroy all strong pointers.
1064 /// drop(strong_five);
1065 /// drop(five);
1066 ///
1067 /// assert!(weak_five.upgrade().is_none());
1a4d82fc 1068 /// ```
e9174d1e 1069 #[stable(feature = "arc_weak", since = "1.4.0")]
1a4d82fc 1070 pub fn upgrade(&self) -> Option<Arc<T>> {
c34b1796 1071 // We use a CAS loop to increment the strong count instead of a
9346a6ac 1072 // fetch_add because once the count hits 0 it must never be above 0.
1a4d82fc 1073 let inner = self.inner();
54a0048b
SL
1074
1075 // Relaxed load because any write of 0 that we can observe
1076 // leaves the field in a permanently zero state (so a
1077 // "stale" read of 0 is fine), and any other value is
1078 // confirmed via the CAS below.
1079 let mut n = inner.strong.load(Relaxed);
1080
1a4d82fc 1081 loop {
b039eaaf 1082 if n == 0 {
92a42be0
SL
1083 return None;
1084 }
1085
1086 // See comments in `Arc::clone` for why we do this (for `mem::forget`).
1087 if n > MAX_REFCOUNT {
3157f602
XL
1088 unsafe {
1089 abort();
1090 }
b039eaaf 1091 }
c1a9b12d
SL
1092
1093 // Relaxed is valid for the same reason it is on Arc's Clone impl
54a0048b 1094 match inner.strong.compare_exchange_weak(n, n + 1, Relaxed, Relaxed) {
ff7c6d11 1095 Ok(_) => return Some(Arc { ptr: self.ptr, phantom: PhantomData }),
54a0048b 1096 Err(old) => n = old,
b039eaaf 1097 }
1a4d82fc
JJ
1098 }
1099 }
1100
1101 #[inline]
1102 fn inner(&self) -> &ArcInner<T> {
1103 // See comments above for why this is "safe"
7cac9316 1104 unsafe { self.ptr.as_ref() }
1a4d82fc
JJ
1105 }
1106}
1107
e9174d1e 1108#[stable(feature = "arc_weak", since = "1.4.0")]
62682a34 1109impl<T: ?Sized> Clone for Weak<T> {
cc61c64b 1110 /// Makes a clone of the `Weak` pointer that points to the same value.
1a4d82fc
JJ
1111 ///
1112 /// # Examples
1113 ///
1114 /// ```
7cac9316 1115 /// use std::sync::{Arc, Weak};
1a4d82fc 1116 ///
e9174d1e 1117 /// let weak_five = Arc::downgrade(&Arc::new(5));
1a4d82fc 1118 ///
7cac9316 1119 /// Weak::clone(&weak_five);
1a4d82fc
JJ
1120 /// ```
1121 #[inline]
1122 fn clone(&self) -> Weak<T> {
c1a9b12d
SL
1123 // See comments in Arc::clone() for why this is relaxed. This can use a
1124 // fetch_add (ignoring the lock) because the weak count is only locked
1125 // where are *no other* weak pointers in existence. (So we can't be
1126 // running this code in that case).
1127 let old_size = self.inner().weak.fetch_add(1, Relaxed);
1128
1129 // See comments in Arc::clone() for why we do this (for mem::forget).
1130 if old_size > MAX_REFCOUNT {
b039eaaf
SL
1131 unsafe {
1132 abort();
1133 }
c1a9b12d
SL
1134 }
1135
54a0048b 1136 return Weak { ptr: self.ptr };
1a4d82fc
JJ
1137 }
1138}
1139
a7813a04
XL
1140#[stable(feature = "downgraded_weak", since = "1.10.0")]
1141impl<T> Default for Weak<T> {
cc61c64b
XL
1142 /// Constructs a new `Weak<T>`, allocating memory for `T` without initializing
1143 /// it. Calling [`upgrade`] on the return value always gives [`None`].
c30ab7b3 1144 ///
cc61c64b
XL
1145 /// [`upgrade`]: struct.Weak.html#method.upgrade
1146 /// [`None`]: ../../std/option/enum.Option.html#variant.None
c30ab7b3
SL
1147 ///
1148 /// # Examples
1149 ///
1150 /// ```
1151 /// use std::sync::Weak;
1152 ///
1153 /// let empty: Weak<i64> = Default::default();
1154 /// assert!(empty.upgrade().is_none());
1155 /// ```
a7813a04
XL
1156 fn default() -> Weak<T> {
1157 Weak::new()
1158 }
1159}
1160
7453a54e 1161#[stable(feature = "arc_weak", since = "1.4.0")]
62682a34 1162impl<T: ?Sized> Drop for Weak<T> {
c30ab7b3 1163 /// Drops the `Weak` pointer.
1a4d82fc 1164 ///
1a4d82fc
JJ
1165 /// # Examples
1166 ///
1167 /// ```
7cac9316 1168 /// use std::sync::{Arc, Weak};
1a4d82fc 1169 ///
c30ab7b3 1170 /// struct Foo;
1a4d82fc 1171 ///
c30ab7b3
SL
1172 /// impl Drop for Foo {
1173 /// fn drop(&mut self) {
1174 /// println!("dropped!");
1175 /// }
1a4d82fc 1176 /// }
1a4d82fc 1177 ///
c30ab7b3
SL
1178 /// let foo = Arc::new(Foo);
1179 /// let weak_foo = Arc::downgrade(&foo);
7cac9316 1180 /// let other_weak_foo = Weak::clone(&weak_foo);
1a4d82fc 1181 ///
c30ab7b3
SL
1182 /// drop(weak_foo); // Doesn't print anything
1183 /// drop(foo); // Prints "dropped!"
1184 ///
1185 /// assert!(other_weak_foo.upgrade().is_none());
1a4d82fc
JJ
1186 /// ```
1187 fn drop(&mut self) {
c34b1796
AL
1188 // If we find out that we were the last weak pointer, then its time to
1189 // deallocate the data entirely. See the discussion in Arc::drop() about
1190 // the memory orderings
c1a9b12d
SL
1191 //
1192 // It's not necessary to check for the locked state here, because the
1193 // weak count can only be locked if there was precisely one weak ref,
1194 // meaning that drop could only subsequently run ON that remaining weak
1195 // ref, which can only happen after the lock is released.
1a4d82fc
JJ
1196 if self.inner().weak.fetch_sub(1, Release) == 1 {
1197 atomic::fence(Acquire);
041b39d2 1198 unsafe {
94b46f34 1199 Global.dealloc(self.ptr.cast(), Layout::for_value(self.ptr.as_ref()))
041b39d2 1200 }
1a4d82fc
JJ
1201 }
1202 }
1203}
1204
85aaf69f 1205#[stable(feature = "rust1", since = "1.0.0")]
62682a34 1206impl<T: ?Sized + PartialEq> PartialEq for Arc<T> {
c30ab7b3 1207 /// Equality for two `Arc`s.
1a4d82fc 1208 ///
c30ab7b3 1209 /// Two `Arc`s are equal if their inner values are equal.
1a4d82fc
JJ
1210 ///
1211 /// # Examples
1212 ///
1213 /// ```
1214 /// use std::sync::Arc;
1215 ///
85aaf69f 1216 /// let five = Arc::new(5);
1a4d82fc 1217 ///
c30ab7b3 1218 /// assert!(five == Arc::new(5));
1a4d82fc 1219 /// ```
b039eaaf
SL
1220 fn eq(&self, other: &Arc<T>) -> bool {
1221 *(*self) == *(*other)
1222 }
1a4d82fc 1223
c30ab7b3 1224 /// Inequality for two `Arc`s.
1a4d82fc 1225 ///
c30ab7b3 1226 /// Two `Arc`s are unequal if their inner values are unequal.
1a4d82fc
JJ
1227 ///
1228 /// # Examples
1229 ///
1230 /// ```
1231 /// use std::sync::Arc;
1232 ///
85aaf69f 1233 /// let five = Arc::new(5);
1a4d82fc 1234 ///
c30ab7b3 1235 /// assert!(five != Arc::new(6));
1a4d82fc 1236 /// ```
b039eaaf
SL
1237 fn ne(&self, other: &Arc<T>) -> bool {
1238 *(*self) != *(*other)
1239 }
1a4d82fc 1240}
85aaf69f 1241#[stable(feature = "rust1", since = "1.0.0")]
62682a34 1242impl<T: ?Sized + PartialOrd> PartialOrd for Arc<T> {
c30ab7b3 1243 /// Partial comparison for two `Arc`s.
1a4d82fc
JJ
1244 ///
1245 /// The two are compared by calling `partial_cmp()` on their inner values.
1246 ///
1247 /// # Examples
1248 ///
1249 /// ```
1250 /// use std::sync::Arc;
c30ab7b3 1251 /// use std::cmp::Ordering;
1a4d82fc 1252 ///
85aaf69f 1253 /// let five = Arc::new(5);
1a4d82fc 1254 ///
c30ab7b3 1255 /// assert_eq!(Some(Ordering::Less), five.partial_cmp(&Arc::new(6)));
1a4d82fc
JJ
1256 /// ```
1257 fn partial_cmp(&self, other: &Arc<T>) -> Option<Ordering> {
1258 (**self).partial_cmp(&**other)
1259 }
1260
c30ab7b3 1261 /// Less-than comparison for two `Arc`s.
1a4d82fc
JJ
1262 ///
1263 /// The two are compared by calling `<` on their inner values.
1264 ///
1265 /// # Examples
1266 ///
1267 /// ```
1268 /// use std::sync::Arc;
1269 ///
85aaf69f 1270 /// let five = Arc::new(5);
1a4d82fc 1271 ///
c30ab7b3 1272 /// assert!(five < Arc::new(6));
1a4d82fc 1273 /// ```
b039eaaf
SL
1274 fn lt(&self, other: &Arc<T>) -> bool {
1275 *(*self) < *(*other)
1276 }
1a4d82fc 1277
c30ab7b3 1278 /// 'Less than or equal to' comparison for two `Arc`s.
1a4d82fc
JJ
1279 ///
1280 /// The two are compared by calling `<=` on their inner values.
1281 ///
1282 /// # Examples
1283 ///
1284 /// ```
1285 /// use std::sync::Arc;
1286 ///
85aaf69f 1287 /// let five = Arc::new(5);
1a4d82fc 1288 ///
c30ab7b3 1289 /// assert!(five <= Arc::new(5));
1a4d82fc 1290 /// ```
b039eaaf
SL
1291 fn le(&self, other: &Arc<T>) -> bool {
1292 *(*self) <= *(*other)
1293 }
1a4d82fc 1294
c30ab7b3 1295 /// Greater-than comparison for two `Arc`s.
1a4d82fc
JJ
1296 ///
1297 /// The two are compared by calling `>` on their inner values.
1298 ///
1299 /// # Examples
1300 ///
1301 /// ```
1302 /// use std::sync::Arc;
1303 ///
85aaf69f 1304 /// let five = Arc::new(5);
1a4d82fc 1305 ///
c30ab7b3 1306 /// assert!(five > Arc::new(4));
1a4d82fc 1307 /// ```
b039eaaf
SL
1308 fn gt(&self, other: &Arc<T>) -> bool {
1309 *(*self) > *(*other)
1310 }
1a4d82fc 1311
c30ab7b3 1312 /// 'Greater than or equal to' comparison for two `Arc`s.
1a4d82fc
JJ
1313 ///
1314 /// The two are compared by calling `>=` on their inner values.
1315 ///
1316 /// # Examples
1317 ///
1318 /// ```
1319 /// use std::sync::Arc;
1320 ///
85aaf69f 1321 /// let five = Arc::new(5);
1a4d82fc 1322 ///
c30ab7b3 1323 /// assert!(five >= Arc::new(5));
1a4d82fc 1324 /// ```
b039eaaf
SL
1325 fn ge(&self, other: &Arc<T>) -> bool {
1326 *(*self) >= *(*other)
1327 }
1a4d82fc 1328}
85aaf69f 1329#[stable(feature = "rust1", since = "1.0.0")]
62682a34 1330impl<T: ?Sized + Ord> Ord for Arc<T> {
c30ab7b3
SL
1331 /// Comparison for two `Arc`s.
1332 ///
1333 /// The two are compared by calling `cmp()` on their inner values.
1334 ///
1335 /// # Examples
1336 ///
1337 /// ```
1338 /// use std::sync::Arc;
1339 /// use std::cmp::Ordering;
1340 ///
1341 /// let five = Arc::new(5);
1342 ///
1343 /// assert_eq!(Ordering::Less, five.cmp(&Arc::new(6)));
1344 /// ```
b039eaaf
SL
1345 fn cmp(&self, other: &Arc<T>) -> Ordering {
1346 (**self).cmp(&**other)
1347 }
1a4d82fc 1348}
85aaf69f 1349#[stable(feature = "rust1", since = "1.0.0")]
62682a34 1350impl<T: ?Sized + Eq> Eq for Arc<T> {}
1a4d82fc 1351
85aaf69f 1352#[stable(feature = "rust1", since = "1.0.0")]
62682a34 1353impl<T: ?Sized + fmt::Display> fmt::Display for Arc<T> {
1a4d82fc 1354 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
85aaf69f 1355 fmt::Display::fmt(&**self, f)
1a4d82fc
JJ
1356 }
1357}
1358
85aaf69f 1359#[stable(feature = "rust1", since = "1.0.0")]
62682a34 1360impl<T: ?Sized + fmt::Debug> fmt::Debug for Arc<T> {
1a4d82fc 1361 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
85aaf69f 1362 fmt::Debug::fmt(&**self, f)
1a4d82fc
JJ
1363 }
1364}
1365
9346a6ac 1366#[stable(feature = "rust1", since = "1.0.0")]
7453a54e 1367impl<T: ?Sized> fmt::Pointer for Arc<T> {
9346a6ac 1368 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
ff7c6d11 1369 fmt::Pointer::fmt(&(&**self as *const T), f)
9346a6ac
AL
1370 }
1371}
1372
85aaf69f 1373#[stable(feature = "rust1", since = "1.0.0")]
d9579d0f 1374impl<T: Default> Default for Arc<T> {
c30ab7b3
SL
1375 /// Creates a new `Arc<T>`, with the `Default` value for `T`.
1376 ///
1377 /// # Examples
1378 ///
1379 /// ```
1380 /// use std::sync::Arc;
1381 ///
1382 /// let x: Arc<i32> = Default::default();
1383 /// assert_eq!(*x, 0);
1384 /// ```
b039eaaf
SL
1385 fn default() -> Arc<T> {
1386 Arc::new(Default::default())
1387 }
1a4d82fc
JJ
1388}
1389
85aaf69f 1390#[stable(feature = "rust1", since = "1.0.0")]
62682a34 1391impl<T: ?Sized + Hash> Hash for Arc<T> {
85aaf69f
SL
1392 fn hash<H: Hasher>(&self, state: &mut H) {
1393 (**self).hash(state)
1394 }
1395}
1a4d82fc 1396
92a42be0
SL
1397#[stable(feature = "from_for_ptrs", since = "1.6.0")]
1398impl<T> From<T> for Arc<T> {
1399 fn from(t: T) -> Self {
1400 Arc::new(t)
1401 }
1402}
1403
3b2f2976
XL
1404#[stable(feature = "shared_from_slice", since = "1.21.0")]
1405impl<'a, T: Clone> From<&'a [T]> for Arc<[T]> {
1406 #[inline]
1407 fn from(v: &[T]) -> Arc<[T]> {
1408 <Self as ArcFromSlice<T>>::from_slice(v)
1409 }
1410}
1411
1412#[stable(feature = "shared_from_slice", since = "1.21.0")]
1413impl<'a> From<&'a str> for Arc<str> {
1414 #[inline]
1415 fn from(v: &str) -> Arc<str> {
ff7c6d11
XL
1416 let arc = Arc::<[u8]>::from(v.as_bytes());
1417 unsafe { Arc::from_raw(Arc::into_raw(arc) as *const str) }
3b2f2976
XL
1418 }
1419}
1420
1421#[stable(feature = "shared_from_slice", since = "1.21.0")]
1422impl From<String> for Arc<str> {
1423 #[inline]
1424 fn from(v: String) -> Arc<str> {
1425 Arc::from(&v[..])
1426 }
1427}
1428
1429#[stable(feature = "shared_from_slice", since = "1.21.0")]
1430impl<T: ?Sized> From<Box<T>> for Arc<T> {
1431 #[inline]
1432 fn from(v: Box<T>) -> Arc<T> {
1433 Arc::from_box(v)
1434 }
1435}
1436
1437#[stable(feature = "shared_from_slice", since = "1.21.0")]
1438impl<T> From<Vec<T>> for Arc<[T]> {
1439 #[inline]
1440 fn from(mut v: Vec<T>) -> Arc<[T]> {
1441 unsafe {
1442 let arc = Arc::copy_from_slice(&v);
1443
1444 // Allow the Vec to free its memory, but not destroy its contents
1445 v.set_len(0);
1446
1447 arc
1448 }
1449 }
1450}
1451
1a4d82fc 1452#[cfg(test)]
1a4d82fc 1453mod tests {
3b2f2976 1454 use std::boxed::Box;
1a4d82fc
JJ
1455 use std::clone::Clone;
1456 use std::sync::mpsc::channel;
1457 use std::mem::drop;
1458 use std::ops::Drop;
1459 use std::option::Option;
3157f602 1460 use std::option::Option::{None, Some};
1a4d82fc
JJ
1461 use std::sync::atomic;
1462 use std::sync::atomic::Ordering::{Acquire, SeqCst};
85aaf69f 1463 use std::thread;
1a4d82fc 1464 use std::sync::Mutex;
92a42be0 1465 use std::convert::From;
1a4d82fc 1466
041b39d2
XL
1467 use super::{Arc, Weak};
1468 use vec::Vec;
1469
85aaf69f 1470 struct Canary(*mut atomic::AtomicUsize);
1a4d82fc 1471
92a42be0 1472 impl Drop for Canary {
1a4d82fc
JJ
1473 fn drop(&mut self) {
1474 unsafe {
1475 match *self {
1476 Canary(c) => {
1477 (*c).fetch_add(1, SeqCst);
1478 }
1479 }
1480 }
1481 }
1482 }
1483
1484 #[test]
c30ab7b3 1485 #[cfg_attr(target_os = "emscripten", ignore)]
1a4d82fc 1486 fn manually_share_arc() {
92a42be0 1487 let v = vec![1, 2, 3, 4, 5, 6, 7, 8, 9, 10];
1a4d82fc
JJ
1488 let arc_v = Arc::new(v);
1489
1490 let (tx, rx) = channel();
1491
85aaf69f
SL
1492 let _t = thread::spawn(move || {
1493 let arc_v: Arc<Vec<i32>> = rx.recv().unwrap();
1a4d82fc
JJ
1494 assert_eq!((*arc_v)[3], 4);
1495 });
1496
1497 tx.send(arc_v.clone()).unwrap();
1498
1499 assert_eq!((*arc_v)[2], 3);
1500 assert_eq!((*arc_v)[4], 5);
1501 }
1502
c34b1796 1503 #[test]
9346a6ac 1504 fn test_arc_get_mut() {
e9174d1e
SL
1505 let mut x = Arc::new(3);
1506 *Arc::get_mut(&mut x).unwrap() = 4;
1507 assert_eq!(*x, 4);
1508 let y = x.clone();
1509 assert!(Arc::get_mut(&mut x).is_none());
1510 drop(y);
1511 assert!(Arc::get_mut(&mut x).is_some());
1512 let _w = Arc::downgrade(&x);
1513 assert!(Arc::get_mut(&mut x).is_none());
c34b1796
AL
1514 }
1515
1a4d82fc 1516 #[test]
e9174d1e
SL
1517 fn try_unwrap() {
1518 let x = Arc::new(3);
1519 assert_eq!(Arc::try_unwrap(x), Ok(3));
1520 let x = Arc::new(4);
1521 let _y = x.clone();
1522 assert_eq!(Arc::try_unwrap(x), Err(Arc::new(4)));
1523 let x = Arc::new(5);
1524 let _w = Arc::downgrade(&x);
1525 assert_eq!(Arc::try_unwrap(x), Ok(5));
1526 }
1527
476ff2be
SL
1528 #[test]
1529 fn into_from_raw() {
1530 let x = Arc::new(box "hello");
1531 let y = x.clone();
1532
1533 let x_ptr = Arc::into_raw(x);
1534 drop(y);
1535 unsafe {
1536 assert_eq!(**x_ptr, "hello");
1537
1538 let x = Arc::from_raw(x_ptr);
1539 assert_eq!(**x, "hello");
1540
1541 assert_eq!(Arc::try_unwrap(x).map(|x| *x), Ok("hello"));
1542 }
1543 }
1544
ea8adc8c
XL
1545 #[test]
1546 fn test_into_from_raw_unsized() {
1547 use std::fmt::Display;
1548 use std::string::ToString;
1549
1550 let arc: Arc<str> = Arc::from("foo");
1551
1552 let ptr = Arc::into_raw(arc.clone());
1553 let arc2 = unsafe { Arc::from_raw(ptr) };
1554
1555 assert_eq!(unsafe { &*ptr }, "foo");
1556 assert_eq!(arc, arc2);
1557
1558 let arc: Arc<Display> = Arc::new(123);
1559
1560 let ptr = Arc::into_raw(arc.clone());
1561 let arc2 = unsafe { Arc::from_raw(ptr) };
1562
1563 assert_eq!(unsafe { &*ptr }.to_string(), "123");
1564 assert_eq!(arc2.to_string(), "123");
1565 }
1566
e9174d1e
SL
1567 #[test]
1568 fn test_cowarc_clone_make_mut() {
1569 let mut cow0 = Arc::new(75);
1570 let mut cow1 = cow0.clone();
1571 let mut cow2 = cow1.clone();
1572
1573 assert!(75 == *Arc::make_mut(&mut cow0));
1574 assert!(75 == *Arc::make_mut(&mut cow1));
1575 assert!(75 == *Arc::make_mut(&mut cow2));
1576
1577 *Arc::make_mut(&mut cow0) += 1;
1578 *Arc::make_mut(&mut cow1) += 2;
1579 *Arc::make_mut(&mut cow2) += 3;
1580
1581 assert!(76 == *cow0);
1582 assert!(77 == *cow1);
1583 assert!(78 == *cow2);
1584
1585 // none should point to the same backing memory
1586 assert!(*cow0 != *cow1);
1587 assert!(*cow0 != *cow2);
1588 assert!(*cow1 != *cow2);
1a4d82fc
JJ
1589 }
1590
1591 #[test]
1592 fn test_cowarc_clone_unique2() {
85aaf69f 1593 let mut cow0 = Arc::new(75);
1a4d82fc
JJ
1594 let cow1 = cow0.clone();
1595 let cow2 = cow1.clone();
1596
1597 assert!(75 == *cow0);
1598 assert!(75 == *cow1);
1599 assert!(75 == *cow2);
1600
e9174d1e 1601 *Arc::make_mut(&mut cow0) += 1;
1a4d82fc
JJ
1602 assert!(76 == *cow0);
1603 assert!(75 == *cow1);
1604 assert!(75 == *cow2);
1605
1606 // cow1 and cow2 should share the same contents
1607 // cow0 should have a unique reference
1608 assert!(*cow0 != *cow1);
1609 assert!(*cow0 != *cow2);
1610 assert!(*cow1 == *cow2);
1611 }
1612
1613 #[test]
1614 fn test_cowarc_clone_weak() {
85aaf69f 1615 let mut cow0 = Arc::new(75);
e9174d1e 1616 let cow1_weak = Arc::downgrade(&cow0);
1a4d82fc
JJ
1617
1618 assert!(75 == *cow0);
1619 assert!(75 == *cow1_weak.upgrade().unwrap());
1620
e9174d1e 1621 *Arc::make_mut(&mut cow0) += 1;
1a4d82fc
JJ
1622
1623 assert!(76 == *cow0);
1624 assert!(cow1_weak.upgrade().is_none());
1625 }
1626
1627 #[test]
1628 fn test_live() {
85aaf69f 1629 let x = Arc::new(5);
e9174d1e 1630 let y = Arc::downgrade(&x);
1a4d82fc
JJ
1631 assert!(y.upgrade().is_some());
1632 }
1633
1634 #[test]
1635 fn test_dead() {
85aaf69f 1636 let x = Arc::new(5);
e9174d1e 1637 let y = Arc::downgrade(&x);
1a4d82fc
JJ
1638 drop(x);
1639 assert!(y.upgrade().is_none());
1640 }
1641
1642 #[test]
1643 fn weak_self_cyclic() {
1644 struct Cycle {
b039eaaf 1645 x: Mutex<Option<Weak<Cycle>>>,
1a4d82fc
JJ
1646 }
1647
1648 let a = Arc::new(Cycle { x: Mutex::new(None) });
e9174d1e 1649 let b = Arc::downgrade(&a.clone());
1a4d82fc
JJ
1650 *a.x.lock().unwrap() = Some(b);
1651
1652 // hopefully we don't double-free (or leak)...
1653 }
1654
1655 #[test]
1656 fn drop_arc() {
85aaf69f
SL
1657 let mut canary = atomic::AtomicUsize::new(0);
1658 let x = Arc::new(Canary(&mut canary as *mut atomic::AtomicUsize));
1a4d82fc
JJ
1659 drop(x);
1660 assert!(canary.load(Acquire) == 1);
1661 }
1662
1663 #[test]
1664 fn drop_arc_weak() {
85aaf69f
SL
1665 let mut canary = atomic::AtomicUsize::new(0);
1666 let arc = Arc::new(Canary(&mut canary as *mut atomic::AtomicUsize));
e9174d1e 1667 let arc_weak = Arc::downgrade(&arc);
1a4d82fc
JJ
1668 assert!(canary.load(Acquire) == 0);
1669 drop(arc);
1670 assert!(canary.load(Acquire) == 1);
1671 drop(arc_weak);
1672 }
1673
1674 #[test]
1675 fn test_strong_count() {
54a0048b 1676 let a = Arc::new(0);
e9174d1e
SL
1677 assert!(Arc::strong_count(&a) == 1);
1678 let w = Arc::downgrade(&a);
1679 assert!(Arc::strong_count(&a) == 1);
1a4d82fc 1680 let b = w.upgrade().expect("");
e9174d1e
SL
1681 assert!(Arc::strong_count(&b) == 2);
1682 assert!(Arc::strong_count(&a) == 2);
1a4d82fc
JJ
1683 drop(w);
1684 drop(a);
e9174d1e 1685 assert!(Arc::strong_count(&b) == 1);
1a4d82fc 1686 let c = b.clone();
e9174d1e
SL
1687 assert!(Arc::strong_count(&b) == 2);
1688 assert!(Arc::strong_count(&c) == 2);
1a4d82fc
JJ
1689 }
1690
1691 #[test]
1692 fn test_weak_count() {
54a0048b 1693 let a = Arc::new(0);
e9174d1e
SL
1694 assert!(Arc::strong_count(&a) == 1);
1695 assert!(Arc::weak_count(&a) == 0);
1696 let w = Arc::downgrade(&a);
1697 assert!(Arc::strong_count(&a) == 1);
1698 assert!(Arc::weak_count(&a) == 1);
1a4d82fc 1699 let x = w.clone();
e9174d1e 1700 assert!(Arc::weak_count(&a) == 2);
1a4d82fc
JJ
1701 drop(w);
1702 drop(x);
e9174d1e
SL
1703 assert!(Arc::strong_count(&a) == 1);
1704 assert!(Arc::weak_count(&a) == 0);
1a4d82fc 1705 let c = a.clone();
e9174d1e
SL
1706 assert!(Arc::strong_count(&a) == 2);
1707 assert!(Arc::weak_count(&a) == 0);
1708 let d = Arc::downgrade(&c);
1709 assert!(Arc::weak_count(&c) == 1);
1710 assert!(Arc::strong_count(&c) == 2);
1a4d82fc
JJ
1711
1712 drop(a);
1713 drop(c);
1714 drop(d);
1715 }
1716
1717 #[test]
1718 fn show_arc() {
54a0048b 1719 let a = Arc::new(5);
85aaf69f 1720 assert_eq!(format!("{:?}", a), "5");
1a4d82fc
JJ
1721 }
1722
1723 // Make sure deriving works with Arc<T>
85aaf69f 1724 #[derive(Eq, Ord, PartialEq, PartialOrd, Clone, Debug, Default)]
b039eaaf
SL
1725 struct Foo {
1726 inner: Arc<i32>,
1727 }
62682a34
SL
1728
1729 #[test]
1730 fn test_unsized() {
1731 let x: Arc<[i32]> = Arc::new([1, 2, 3]);
1732 assert_eq!(format!("{:?}", x), "[1, 2, 3]");
e9174d1e 1733 let y = Arc::downgrade(&x.clone());
62682a34
SL
1734 drop(x);
1735 assert!(y.upgrade().is_none());
1736 }
92a42be0
SL
1737
1738 #[test]
1739 fn test_from_owned() {
1740 let foo = 123;
1741 let foo_arc = Arc::from(foo);
1742 assert!(123 == *foo_arc);
1743 }
9cc50fc6
SL
1744
1745 #[test]
1746 fn test_new_weak() {
1747 let foo: Weak<usize> = Weak::new();
1748 assert!(foo.upgrade().is_none());
1749 }
9e0c209e
SL
1750
1751 #[test]
1752 fn test_ptr_eq() {
1753 let five = Arc::new(5);
1754 let same_five = five.clone();
1755 let other_five = Arc::new(5);
1756
1757 assert!(Arc::ptr_eq(&five, &same_five));
1758 assert!(!Arc::ptr_eq(&five, &other_five));
1759 }
3b2f2976
XL
1760
1761 #[test]
1762 #[cfg_attr(target_os = "emscripten", ignore)]
1763 fn test_weak_count_locked() {
1764 let mut a = Arc::new(atomic::AtomicBool::new(false));
1765 let a2 = a.clone();
1766 let t = thread::spawn(move || {
1767 for _i in 0..1000000 {
1768 Arc::get_mut(&mut a);
1769 }
1770 a.store(true, SeqCst);
1771 });
1772
1773 while !a2.load(SeqCst) {
1774 let n = Arc::weak_count(&a2);
1775 assert!(n < 2, "bad weak count: {}", n);
1776 }
1777 t.join().unwrap();
1778 }
1779
1780 #[test]
1781 fn test_from_str() {
1782 let r: Arc<str> = Arc::from("foo");
1783
1784 assert_eq!(&r[..], "foo");
1785 }
1786
1787 #[test]
1788 fn test_copy_from_slice() {
1789 let s: &[u32] = &[1, 2, 3];
1790 let r: Arc<[u32]> = Arc::from(s);
1791
1792 assert_eq!(&r[..], [1, 2, 3]);
1793 }
1794
1795 #[test]
1796 fn test_clone_from_slice() {
1797 #[derive(Clone, Debug, Eq, PartialEq)]
1798 struct X(u32);
1799
1800 let s: &[X] = &[X(1), X(2), X(3)];
1801 let r: Arc<[X]> = Arc::from(s);
1802
1803 assert_eq!(&r[..], s);
1804 }
1805
1806 #[test]
1807 #[should_panic]
1808 fn test_clone_from_slice_panic() {
1809 use std::string::{String, ToString};
1810
1811 struct Fail(u32, String);
1812
1813 impl Clone for Fail {
1814 fn clone(&self) -> Fail {
1815 if self.0 == 2 {
1816 panic!();
1817 }
1818 Fail(self.0, self.1.clone())
1819 }
1820 }
1821
1822 let s: &[Fail] = &[
1823 Fail(0, "foo".to_string()),
1824 Fail(1, "bar".to_string()),
1825 Fail(2, "baz".to_string()),
1826 ];
1827
1828 // Should panic, but not cause memory corruption
1829 let _r: Arc<[Fail]> = Arc::from(s);
1830 }
1831
1832 #[test]
1833 fn test_from_box() {
1834 let b: Box<u32> = box 123;
1835 let r: Arc<u32> = Arc::from(b);
1836
1837 assert_eq!(*r, 123);
1838 }
1839
1840 #[test]
1841 fn test_from_box_str() {
1842 use std::string::String;
1843
1844 let s = String::from("foo").into_boxed_str();
1845 let r: Arc<str> = Arc::from(s);
1846
1847 assert_eq!(&r[..], "foo");
1848 }
1849
1850 #[test]
1851 fn test_from_box_slice() {
1852 let s = vec![1, 2, 3].into_boxed_slice();
1853 let r: Arc<[u32]> = Arc::from(s);
1854
1855 assert_eq!(&r[..], [1, 2, 3]);
1856 }
1857
1858 #[test]
1859 fn test_from_box_trait() {
1860 use std::fmt::Display;
1861 use std::string::ToString;
1862
1863 let b: Box<Display> = box 123;
1864 let r: Arc<Display> = Arc::from(b);
1865
1866 assert_eq!(r.to_string(), "123");
1867 }
1868
1869 #[test]
1870 fn test_from_box_trait_zero_sized() {
1871 use std::fmt::Debug;
1872
1873 let b: Box<Debug> = box ();
1874 let r: Arc<Debug> = Arc::from(b);
1875
1876 assert_eq!(format!("{:?}", r), "()");
1877 }
1878
1879 #[test]
1880 fn test_from_vec() {
1881 let v = vec![1, 2, 3];
1882 let r: Arc<[u32]> = Arc::from(v);
1883
1884 assert_eq!(&r[..], [1, 2, 3]);
1885 }
94b46f34
XL
1886
1887 #[test]
1888 fn test_downcast() {
1889 use std::any::Any;
1890
1891 let r1: Arc<Any + Send + Sync> = Arc::new(i32::max_value());
1892 let r2: Arc<Any + Send + Sync> = Arc::new("abc");
1893
1894 assert!(r1.clone().downcast::<u32>().is_err());
1895
1896 let r1i32 = r1.downcast::<i32>();
1897 assert!(r1i32.is_ok());
1898 assert_eq!(r1i32.unwrap(), Arc::new(i32::max_value()));
1899
1900 assert!(r2.clone().downcast::<i32>().is_err());
1901
1902 let r2str = r2.downcast::<&'static str>();
1903 assert!(r2str.is_ok());
1904 assert_eq!(r2str.unwrap(), Arc::new("abc"));
1905 }
1a4d82fc 1906}
e9174d1e 1907
92a42be0 1908#[stable(feature = "rust1", since = "1.0.0")]
e9174d1e 1909impl<T: ?Sized> borrow::Borrow<T> for Arc<T> {
b039eaaf
SL
1910 fn borrow(&self) -> &T {
1911 &**self
1912 }
1913}
1914
1915#[stable(since = "1.5.0", feature = "smart_ptr_as_ref")]
1916impl<T: ?Sized> AsRef<T> for Arc<T> {
1917 fn as_ref(&self) -> &T {
1918 &**self
1919 }
e9174d1e 1920}