]> git.proxmox.com Git - rustc.git/blame - src/liballoc/arc.rs
New upstream version 1.26.2+dfsg1
[rustc.git] / src / liballoc / arc.rs
CommitLineData
1a4d82fc
JJ
1// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
2// file at the top-level directory of this distribution and at
3// http://rust-lang.org/COPYRIGHT.
4//
5// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8// option. This file may not be copied, modified, or distributed
9// except according to those terms.
10
85aaf69f 11#![stable(feature = "rust1", since = "1.0.0")]
1a4d82fc 12
c30ab7b3 13//! Thread-safe reference-counting pointers.
1a4d82fc 14//!
c30ab7b3 15//! See the [`Arc<T>`][arc] documentation for more details.
1a4d82fc 16//!
c30ab7b3 17//! [arc]: struct.Arc.html
1a4d82fc 18
e9174d1e 19use core::sync::atomic;
3157f602 20use core::sync::atomic::Ordering::{Acquire, Relaxed, Release, SeqCst};
e9174d1e 21use core::borrow;
85aaf69f 22use core::fmt;
c34b1796 23use core::cmp::Ordering;
0531ce1d 24use core::heap::{Alloc, Layout};
92a42be0 25use core::intrinsics::abort;
ea8adc8c 26use core::mem::{self, align_of_val, size_of_val, uninitialized};
92a42be0 27use core::ops::Deref;
92a42be0 28use core::ops::CoerceUnsized;
2c00a5a8 29use core::ptr::{self, NonNull};
ff7c6d11 30use core::marker::{Unsize, PhantomData};
1a4d82fc 31use core::hash::{Hash, Hasher};
3157f602 32use core::{isize, usize};
92a42be0 33use core::convert::From;
041b39d2 34
0531ce1d 35use heap::{Heap, box_free};
3b2f2976
XL
36use boxed::Box;
37use string::String;
38use vec::Vec;
1a4d82fc 39
c30ab7b3
SL
40/// A soft limit on the amount of references that may be made to an `Arc`.
41///
42/// Going above this limit will abort your program (although not
43/// necessarily) at _exactly_ `MAX_REFCOUNT + 1` references.
c1a9b12d
SL
44const MAX_REFCOUNT: usize = (isize::MAX) as usize;
45
041b39d2
XL
46/// A thread-safe reference-counting pointer. 'Arc' stands for 'Atomically
47/// Reference Counted'.
1a4d82fc 48///
c30ab7b3
SL
49/// The type `Arc<T>` provides shared ownership of a value of type `T`,
50/// allocated in the heap. Invoking [`clone`][clone] on `Arc` produces
51/// a new pointer to the same value in the heap. When the last `Arc`
52/// pointer to a given value is destroyed, the pointed-to value is
53/// also destroyed.
1a4d82fc 54///
c30ab7b3 55/// Shared references in Rust disallow mutation by default, and `Arc` is no
ea8adc8c
XL
56/// exception: you cannot generally obtain a mutable reference to something
57/// inside an `Arc`. If you need to mutate through an `Arc`, use
58/// [`Mutex`][mutex], [`RwLock`][rwlock], or one of the [`Atomic`][atomic]
59/// types.
9e0c209e 60///
7cac9316
XL
61/// ## Thread Safety
62///
63/// Unlike [`Rc<T>`], `Arc<T>` uses atomic operations for its reference
64/// counting This means that it is thread-safe. The disadvantage is that
65/// atomic operations are more expensive than ordinary memory accesses. If you
66/// are not sharing reference-counted values between threads, consider using
67/// [`Rc<T>`] for lower overhead. [`Rc<T>`] is a safe default, because the
68/// compiler will catch any attempt to send an [`Rc<T>`] between threads.
69/// However, a library might choose `Arc<T>` in order to give library consumers
c30ab7b3 70/// more flexibility.
1a4d82fc 71///
7cac9316
XL
72/// `Arc<T>` will implement [`Send`] and [`Sync`] as long as the `T` implements
73/// [`Send`] and [`Sync`]. Why can't you put a non-thread-safe type `T` in an
74/// `Arc<T>` to make it thread-safe? This may be a bit counter-intuitive at
75/// first: after all, isn't the point of `Arc<T>` thread safety? The key is
76/// this: `Arc<T>` makes it thread safe to have multiple ownership of the same
77/// data, but it doesn't add thread safety to its data. Consider
ea8adc8c
XL
78/// `Arc<`[`RefCell<T>`]`>`. [`RefCell<T>`] isn't [`Sync`], and if `Arc<T>` was always
79/// [`Send`], `Arc<`[`RefCell<T>`]`>` would be as well. But then we'd have a problem:
80/// [`RefCell<T>`] is not thread safe; it keeps track of the borrowing count using
7cac9316
XL
81/// non-atomic operations.
82///
83/// In the end, this means that you may need to pair `Arc<T>` with some sort of
ea8adc8c 84/// [`std::sync`] type, usually [`Mutex<T>`][mutex].
7cac9316
XL
85///
86/// ## Breaking cycles with `Weak`
87///
c30ab7b3 88/// The [`downgrade`][downgrade] method can be used to create a non-owning
32a655c1
SL
89/// [`Weak`][weak] pointer. A [`Weak`][weak] pointer can be [`upgrade`][upgrade]d
90/// to an `Arc`, but this will return [`None`] if the value has already been
91/// dropped.
c30ab7b3
SL
92///
93/// A cycle between `Arc` pointers will never be deallocated. For this reason,
32a655c1
SL
94/// [`Weak`][weak] is used to break cycles. For example, a tree could have
95/// strong `Arc` pointers from parent nodes to children, and [`Weak`][weak]
96/// pointers from children back to their parents.
c30ab7b3 97///
7cac9316
XL
98/// # Cloning references
99///
100/// Creating a new reference from an existing reference counted pointer is done using the
3b2f2976 101/// `Clone` trait implemented for [`Arc<T>`][arc] and [`Weak<T>`][weak].
7cac9316
XL
102///
103/// ```
104/// use std::sync::Arc;
105/// let foo = Arc::new(vec![1.0, 2.0, 3.0]);
106/// // The two syntaxes below are equivalent.
107/// let a = foo.clone();
108/// let b = Arc::clone(&foo);
109/// // a and b both point to the same memory location as foo.
110/// ```
111///
ea8adc8c 112/// The [`Arc::clone(&from)`] syntax is the most idiomatic because it conveys more explicitly
7cac9316
XL
113/// the meaning of the code. In the example above, this syntax makes it easier to see that
114/// this code is creating a new reference rather than copying the whole content of foo.
115///
116/// ## `Deref` behavior
117///
c30ab7b3
SL
118/// `Arc<T>` automatically dereferences to `T` (via the [`Deref`][deref] trait),
119/// so you can call `T`'s methods on a value of type `Arc<T>`. To avoid name
120/// clashes with `T`'s methods, the methods of `Arc<T>` itself are [associated
121/// functions][assoc], called using function-like syntax:
c34b1796
AL
122///
123/// ```
1a4d82fc 124/// use std::sync::Arc;
c30ab7b3 125/// let my_arc = Arc::new(());
1a4d82fc 126///
c30ab7b3
SL
127/// Arc::downgrade(&my_arc);
128/// ```
1a4d82fc 129///
32a655c1 130/// [`Weak<T>`][weak] does not auto-dereference to `T`, because the value may have
c30ab7b3 131/// already been destroyed.
1a4d82fc 132///
c30ab7b3
SL
133/// [arc]: struct.Arc.html
134/// [weak]: struct.Weak.html
7cac9316 135/// [`Rc<T>`]: ../../std/rc/struct.Rc.html
c30ab7b3
SL
136/// [clone]: ../../std/clone/trait.Clone.html#tymethod.clone
137/// [mutex]: ../../std/sync/struct.Mutex.html
138/// [rwlock]: ../../std/sync/struct.RwLock.html
139/// [atomic]: ../../std/sync/atomic/index.html
32a655c1 140/// [`Send`]: ../../std/marker/trait.Send.html
7cac9316 141/// [`Sync`]: ../../std/marker/trait.Sync.html
c30ab7b3
SL
142/// [deref]: ../../std/ops/trait.Deref.html
143/// [downgrade]: struct.Arc.html#method.downgrade
144/// [upgrade]: struct.Weak.html#method.upgrade
32a655c1 145/// [`None`]: ../../std/option/enum.Option.html#variant.None
cc61c64b 146/// [assoc]: ../../book/first-edition/method-syntax.html#associated-functions
ea8adc8c
XL
147/// [`RefCell<T>`]: ../../std/cell/struct.RefCell.html
148/// [`std::sync`]: ../../std/sync/index.html
149/// [`Arc::clone(&from)`]: #method.clone
1a4d82fc 150///
c30ab7b3 151/// # Examples
5bcae85e 152///
c30ab7b3
SL
153/// Sharing some immutable data between threads:
154///
155// Note that we **do not** run these tests here. The windows builders get super
156// unhappy if a thread outlives the main thread and then exits at the same time
157// (something deadlocks) so we just avoid this entirely by not running these
158// tests.
5bcae85e 159/// ```no_run
c30ab7b3 160/// use std::sync::Arc;
5bcae85e
SL
161/// use std::thread;
162///
c30ab7b3 163/// let five = Arc::new(5);
5bcae85e
SL
164///
165/// for _ in 0..10 {
7cac9316 166/// let five = Arc::clone(&five);
5bcae85e
SL
167///
168/// thread::spawn(move || {
c30ab7b3
SL
169/// println!("{:?}", five);
170/// });
171/// }
172/// ```
5bcae85e 173///
32a655c1
SL
174/// Sharing a mutable [`AtomicUsize`]:
175///
176/// [`AtomicUsize`]: ../../std/sync/atomic/struct.AtomicUsize.html
5bcae85e 177///
c30ab7b3
SL
178/// ```no_run
179/// use std::sync::Arc;
180/// use std::sync::atomic::{AtomicUsize, Ordering};
181/// use std::thread;
182///
183/// let val = Arc::new(AtomicUsize::new(5));
184///
185/// for _ in 0..10 {
7cac9316 186/// let val = Arc::clone(&val);
c30ab7b3
SL
187///
188/// thread::spawn(move || {
189/// let v = val.fetch_add(1, Ordering::SeqCst);
190/// println!("{:?}", v);
5bcae85e
SL
191/// });
192/// }
193/// ```
c30ab7b3
SL
194///
195/// See the [`rc` documentation][rc_examples] for more examples of reference
196/// counting in general.
197///
198/// [rc_examples]: ../../std/rc/index.html#examples
85aaf69f 199#[stable(feature = "rust1", since = "1.0.0")]
62682a34 200pub struct Arc<T: ?Sized> {
2c00a5a8 201 ptr: NonNull<ArcInner<T>>,
ff7c6d11 202 phantom: PhantomData<T>,
1a4d82fc
JJ
203}
204
92a42be0
SL
205#[stable(feature = "rust1", since = "1.0.0")]
206unsafe impl<T: ?Sized + Sync + Send> Send for Arc<T> {}
207#[stable(feature = "rust1", since = "1.0.0")]
208unsafe impl<T: ?Sized + Sync + Send> Sync for Arc<T> {}
1a4d82fc 209
92a42be0 210#[unstable(feature = "coerce_unsized", issue = "27732")]
62682a34 211impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<Arc<U>> for Arc<T> {}
1a4d82fc 212
cc61c64b
XL
213/// `Weak` is a version of [`Arc`] that holds a non-owning reference to the
214/// managed value. The value is accessed by calling [`upgrade`] on the `Weak`
215/// pointer, which returns an [`Option`]`<`[`Arc`]`<T>>`.
1a4d82fc 216///
cc61c64b
XL
217/// Since a `Weak` reference does not count towards ownership, it will not
218/// prevent the inner value from being dropped, and `Weak` itself makes no
219/// guarantees about the value still being present and may return [`None`]
220/// when [`upgrade`]d.
5bcae85e 221///
cc61c64b
XL
222/// A `Weak` pointer is useful for keeping a temporary reference to the value
223/// within [`Arc`] without extending its lifetime. It is also used to prevent
224/// circular references between [`Arc`] pointers, since mutual owning references
225/// would never allow either [`Arc`] to be dropped. For example, a tree could
226/// have strong [`Arc`] pointers from parent nodes to children, and `Weak`
227/// pointers from children back to their parents.
5bcae85e 228///
cc61c64b 229/// The typical way to obtain a `Weak` pointer is to call [`Arc::downgrade`].
c30ab7b3 230///
cc61c64b
XL
231/// [`Arc`]: struct.Arc.html
232/// [`Arc::downgrade`]: struct.Arc.html#method.downgrade
233/// [`upgrade`]: struct.Weak.html#method.upgrade
234/// [`Option`]: ../../std/option/enum.Option.html
235/// [`None`]: ../../std/option/enum.Option.html#variant.None
e9174d1e 236#[stable(feature = "arc_weak", since = "1.4.0")]
62682a34 237pub struct Weak<T: ?Sized> {
2c00a5a8 238 ptr: NonNull<ArcInner<T>>,
1a4d82fc
JJ
239}
240
7453a54e 241#[stable(feature = "arc_weak", since = "1.4.0")]
92a42be0 242unsafe impl<T: ?Sized + Sync + Send> Send for Weak<T> {}
7453a54e 243#[stable(feature = "arc_weak", since = "1.4.0")]
92a42be0 244unsafe impl<T: ?Sized + Sync + Send> Sync for Weak<T> {}
1a4d82fc 245
92a42be0 246#[unstable(feature = "coerce_unsized", issue = "27732")]
c1a9b12d
SL
247impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<Weak<U>> for Weak<T> {}
248
7453a54e 249#[stable(feature = "arc_weak", since = "1.4.0")]
62682a34 250impl<T: ?Sized + fmt::Debug> fmt::Debug for Weak<T> {
c34b1796
AL
251 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
252 write!(f, "(Weak)")
253 }
254}
255
62682a34 256struct ArcInner<T: ?Sized> {
85aaf69f 257 strong: atomic::AtomicUsize,
c1a9b12d
SL
258
259 // the value usize::MAX acts as a sentinel for temporarily "locking" the
260 // ability to upgrade weak pointers or downgrade strong ones; this is used
e9174d1e 261 // to avoid races in `make_mut` and `get_mut`.
85aaf69f 262 weak: atomic::AtomicUsize,
c1a9b12d 263
1a4d82fc
JJ
264 data: T,
265}
266
62682a34
SL
267unsafe impl<T: ?Sized + Sync + Send> Send for ArcInner<T> {}
268unsafe impl<T: ?Sized + Sync + Send> Sync for ArcInner<T> {}
1a4d82fc
JJ
269
270impl<T> Arc<T> {
271 /// Constructs a new `Arc<T>`.
272 ///
273 /// # Examples
274 ///
275 /// ```
276 /// use std::sync::Arc;
277 ///
85aaf69f 278 /// let five = Arc::new(5);
1a4d82fc
JJ
279 /// ```
280 #[inline]
85aaf69f 281 #[stable(feature = "rust1", since = "1.0.0")]
1a4d82fc
JJ
282 pub fn new(data: T) -> Arc<T> {
283 // Start the weak pointer count as 1 which is the weak pointer that's
284 // held by all the strong pointers (kinda), see std/rc.rs for more info
c34b1796 285 let x: Box<_> = box ArcInner {
85aaf69f
SL
286 strong: atomic::AtomicUsize::new(1),
287 weak: atomic::AtomicUsize::new(1),
3b2f2976 288 data,
1a4d82fc 289 };
2c00a5a8 290 Arc { ptr: Box::into_raw_non_null(x), phantom: PhantomData }
e9174d1e
SL
291 }
292
c30ab7b3 293 /// Returns the contained value, if the `Arc` has exactly one strong reference.
e9174d1e 294 ///
c30ab7b3
SL
295 /// Otherwise, an [`Err`][result] is returned with the same `Arc` that was
296 /// passed in.
e9174d1e 297 ///
54a0048b
SL
298 /// This will succeed even if there are outstanding weak references.
299 ///
c30ab7b3
SL
300 /// [result]: ../../std/result/enum.Result.html
301 ///
e9174d1e
SL
302 /// # Examples
303 ///
304 /// ```
305 /// use std::sync::Arc;
306 ///
307 /// let x = Arc::new(3);
308 /// assert_eq!(Arc::try_unwrap(x), Ok(3));
309 ///
310 /// let x = Arc::new(4);
7cac9316 311 /// let _y = Arc::clone(&x);
c30ab7b3 312 /// assert_eq!(*Arc::try_unwrap(x).unwrap_err(), 4);
e9174d1e
SL
313 /// ```
314 #[inline]
315 #[stable(feature = "arc_unique", since = "1.4.0")]
316 pub fn try_unwrap(this: Self) -> Result<T, Self> {
317 // See `drop` for why all these atomics are like this
54a0048b 318 if this.inner().strong.compare_exchange(1, 0, Release, Relaxed).is_err() {
92a42be0 319 return Err(this);
b039eaaf 320 }
e9174d1e
SL
321
322 atomic::fence(Acquire);
323
324 unsafe {
7cac9316 325 let elem = ptr::read(&this.ptr.as_ref().data);
e9174d1e
SL
326
327 // Make a weak pointer to clean up the implicit strong-weak reference
54a0048b 328 let _weak = Weak { ptr: this.ptr };
e9174d1e
SL
329 mem::forget(this);
330
331 Ok(elem)
332 }
1a4d82fc 333 }
ea8adc8c 334}
476ff2be 335
ea8adc8c 336impl<T: ?Sized> Arc<T> {
476ff2be
SL
337 /// Consumes the `Arc`, returning the wrapped pointer.
338 ///
339 /// To avoid a memory leak the pointer must be converted back to an `Arc` using
340 /// [`Arc::from_raw`][from_raw].
341 ///
342 /// [from_raw]: struct.Arc.html#method.from_raw
343 ///
344 /// # Examples
345 ///
346 /// ```
476ff2be
SL
347 /// use std::sync::Arc;
348 ///
349 /// let x = Arc::new(10);
350 /// let x_ptr = Arc::into_raw(x);
351 /// assert_eq!(unsafe { *x_ptr }, 10);
352 /// ```
8bb4bdeb
XL
353 #[stable(feature = "rc_raw", since = "1.17.0")]
354 pub fn into_raw(this: Self) -> *const T {
7cac9316 355 let ptr: *const T = &*this;
476ff2be
SL
356 mem::forget(this);
357 ptr
358 }
359
360 /// Constructs an `Arc` from a raw pointer.
361 ///
362 /// The raw pointer must have been previously returned by a call to a
363 /// [`Arc::into_raw`][into_raw].
364 ///
365 /// This function is unsafe because improper use may lead to memory problems. For example, a
366 /// double-free may occur if the function is called twice on the same raw pointer.
367 ///
368 /// [into_raw]: struct.Arc.html#method.into_raw
369 ///
370 /// # Examples
371 ///
372 /// ```
476ff2be
SL
373 /// use std::sync::Arc;
374 ///
375 /// let x = Arc::new(10);
376 /// let x_ptr = Arc::into_raw(x);
377 ///
378 /// unsafe {
379 /// // Convert back to an `Arc` to prevent leak.
380 /// let x = Arc::from_raw(x_ptr);
381 /// assert_eq!(*x, 10);
382 ///
383 /// // Further calls to `Arc::from_raw(x_ptr)` would be memory unsafe.
384 /// }
385 ///
386 /// // The memory was freed when `x` went out of scope above, so `x_ptr` is now dangling!
387 /// ```
8bb4bdeb
XL
388 #[stable(feature = "rc_raw", since = "1.17.0")]
389 pub unsafe fn from_raw(ptr: *const T) -> Self {
ea8adc8c
XL
390 // Align the unsized value to the end of the ArcInner.
391 // Because it is ?Sized, it will always be the last field in memory.
392 let align = align_of_val(&*ptr);
393 let layout = Layout::new::<ArcInner<()>>();
394 let offset = (layout.size() + layout.padding_needed_for(align)) as isize;
395
396 // Reverse the offset to find the original ArcInner.
397 let fake_ptr = ptr as *mut ArcInner<T>;
398 let arc_ptr = set_data_ptr(fake_ptr, (ptr as *mut u8).offset(-offset));
399
8bb4bdeb 400 Arc {
2c00a5a8 401 ptr: NonNull::new_unchecked(arc_ptr),
ff7c6d11 402 phantom: PhantomData,
8bb4bdeb 403 }
476ff2be 404 }
1a4d82fc 405
c30ab7b3
SL
406 /// Creates a new [`Weak`][weak] pointer to this value.
407 ///
408 /// [weak]: struct.Weak.html
1a4d82fc
JJ
409 ///
410 /// # Examples
411 ///
412 /// ```
413 /// use std::sync::Arc;
414 ///
85aaf69f 415 /// let five = Arc::new(5);
1a4d82fc 416 ///
e9174d1e 417 /// let weak_five = Arc::downgrade(&five);
1a4d82fc 418 /// ```
e9174d1e
SL
419 #[stable(feature = "arc_weak", since = "1.4.0")]
420 pub fn downgrade(this: &Self) -> Weak<T> {
54a0048b
SL
421 // This Relaxed is OK because we're checking the value in the CAS
422 // below.
423 let mut cur = this.inner().weak.load(Relaxed);
c1a9b12d 424
54a0048b 425 loop {
c1a9b12d 426 // check if the weak counter is currently "locked"; if so, spin.
b039eaaf 427 if cur == usize::MAX {
54a0048b 428 cur = this.inner().weak.load(Relaxed);
92a42be0 429 continue;
b039eaaf 430 }
c1a9b12d
SL
431
432 // NOTE: this code currently ignores the possibility of overflow
433 // into usize::MAX; in general both Rc and Arc need to be adjusted
434 // to deal with overflow.
435
436 // Unlike with Clone(), we need this to be an Acquire read to
437 // synchronize with the write coming from `is_unique`, so that the
438 // events prior to that write happen before this read.
54a0048b
SL
439 match this.inner().weak.compare_exchange_weak(cur, cur + 1, Acquire, Relaxed) {
440 Ok(_) => return Weak { ptr: this.ptr },
441 Err(old) => cur = old,
c1a9b12d
SL
442 }
443 }
1a4d82fc 444 }
1a4d82fc 445
c30ab7b3
SL
446 /// Gets the number of [`Weak`][weak] pointers to this value.
447 ///
c30ab7b3
SL
448 /// [weak]: struct.Weak.html
449 ///
476ff2be
SL
450 /// # Safety
451 ///
452 /// This method by itself is safe, but using it correctly requires extra care.
453 /// Another thread can change the weak count at any time,
454 /// including potentially between calling this method and acting on the result.
455 ///
c30ab7b3
SL
456 /// # Examples
457 ///
458 /// ```
c30ab7b3
SL
459 /// use std::sync::Arc;
460 ///
461 /// let five = Arc::new(5);
462 /// let _weak_five = Arc::downgrade(&five);
463 ///
464 /// // This assertion is deterministic because we haven't shared
465 /// // the `Arc` or `Weak` between threads.
466 /// assert_eq!(1, Arc::weak_count(&five));
467 /// ```
62682a34 468 #[inline]
476ff2be 469 #[stable(feature = "arc_counts", since = "1.15.0")]
e9174d1e 470 pub fn weak_count(this: &Self) -> usize {
3b2f2976
XL
471 let cnt = this.inner().weak.load(SeqCst);
472 // If the weak count is currently locked, the value of the
473 // count was 0 just before taking the lock.
474 if cnt == usize::MAX { 0 } else { cnt - 1 }
62682a34
SL
475 }
476
c30ab7b3
SL
477 /// Gets the number of strong (`Arc`) pointers to this value.
478 ///
476ff2be
SL
479 /// # Safety
480 ///
481 /// This method by itself is safe, but using it correctly requires extra care.
482 /// Another thread can change the strong count at any time,
483 /// including potentially between calling this method and acting on the result.
c30ab7b3
SL
484 ///
485 /// # Examples
486 ///
487 /// ```
c30ab7b3
SL
488 /// use std::sync::Arc;
489 ///
490 /// let five = Arc::new(5);
7cac9316 491 /// let _also_five = Arc::clone(&five);
c30ab7b3
SL
492 ///
493 /// // This assertion is deterministic because we haven't shared
494 /// // the `Arc` between threads.
495 /// assert_eq!(2, Arc::strong_count(&five));
496 /// ```
62682a34 497 #[inline]
476ff2be 498 #[stable(feature = "arc_counts", since = "1.15.0")]
e9174d1e 499 pub fn strong_count(this: &Self) -> usize {
62682a34
SL
500 this.inner().strong.load(SeqCst)
501 }
502
1a4d82fc
JJ
503 #[inline]
504 fn inner(&self) -> &ArcInner<T> {
c34b1796
AL
505 // This unsafety is ok because while this arc is alive we're guaranteed
506 // that the inner pointer is valid. Furthermore, we know that the
507 // `ArcInner` structure itself is `Sync` because the inner data is
508 // `Sync` as well, so we're ok loaning out an immutable pointer to these
509 // contents.
7cac9316 510 unsafe { self.ptr.as_ref() }
1a4d82fc 511 }
c34b1796
AL
512
513 // Non-inlined part of `drop`.
514 #[inline(never)]
515 unsafe fn drop_slow(&mut self) {
7cac9316 516 let ptr = self.ptr.as_ptr();
c34b1796
AL
517
518 // Destroy the data at this time, even though we may not free the box
519 // allocation itself (there may still be weak pointers lying around).
7cac9316 520 ptr::drop_in_place(&mut self.ptr.as_mut().data);
c34b1796
AL
521
522 if self.inner().weak.fetch_sub(1, Release) == 1 {
523 atomic::fence(Acquire);
041b39d2 524 Heap.dealloc(ptr as *mut u8, Layout::for_value(&*ptr))
c34b1796
AL
525 }
526 }
9e0c209e
SL
527
528 #[inline]
8bb4bdeb 529 #[stable(feature = "ptr_eq", since = "1.17.0")]
c30ab7b3
SL
530 /// Returns true if the two `Arc`s point to the same value (not
531 /// just values that compare as equal).
9e0c209e
SL
532 ///
533 /// # Examples
534 ///
535 /// ```
9e0c209e
SL
536 /// use std::sync::Arc;
537 ///
538 /// let five = Arc::new(5);
7cac9316 539 /// let same_five = Arc::clone(&five);
9e0c209e
SL
540 /// let other_five = Arc::new(5);
541 ///
542 /// assert!(Arc::ptr_eq(&five, &same_five));
543 /// assert!(!Arc::ptr_eq(&five, &other_five));
544 /// ```
545 pub fn ptr_eq(this: &Self, other: &Self) -> bool {
7cac9316 546 this.ptr.as_ptr() == other.ptr.as_ptr()
9e0c209e 547 }
1a4d82fc
JJ
548}
549
3b2f2976
XL
550impl<T: ?Sized> Arc<T> {
551 // Allocates an `ArcInner<T>` with sufficient space for an unsized value
552 unsafe fn allocate_for_ptr(ptr: *const T) -> *mut ArcInner<T> {
553 // Create a fake ArcInner to find allocation size and alignment
554 let fake_ptr = ptr as *mut ArcInner<T>;
555
556 let layout = Layout::for_value(&*fake_ptr);
557
558 let mem = Heap.alloc(layout)
559 .unwrap_or_else(|e| Heap.oom(e));
560
561 // Initialize the real ArcInner
562 let inner = set_data_ptr(ptr as *mut T, mem) as *mut ArcInner<T>;
563
564 ptr::write(&mut (*inner).strong, atomic::AtomicUsize::new(1));
565 ptr::write(&mut (*inner).weak, atomic::AtomicUsize::new(1));
566
567 inner
568 }
569
570 fn from_box(v: Box<T>) -> Arc<T> {
571 unsafe {
572 let bptr = Box::into_raw(v);
573
574 let value_size = size_of_val(&*bptr);
575 let ptr = Self::allocate_for_ptr(bptr);
576
577 // Copy value as bytes
578 ptr::copy_nonoverlapping(
579 bptr as *const T as *const u8,
580 &mut (*ptr).data as *mut _ as *mut u8,
581 value_size);
582
583 // Free the allocation without dropping its contents
584 box_free(bptr);
585
2c00a5a8 586 Arc { ptr: NonNull::new_unchecked(ptr), phantom: PhantomData }
3b2f2976
XL
587 }
588 }
589}
590
591// Sets the data pointer of a `?Sized` raw pointer.
592//
593// For a slice/trait object, this sets the `data` field and leaves the rest
594// unchanged. For a sized raw pointer, this simply sets the pointer.
595unsafe fn set_data_ptr<T: ?Sized, U>(mut ptr: *mut T, data: *mut U) -> *mut T {
596 ptr::write(&mut ptr as *mut _ as *mut *mut u8, data as *mut u8);
597 ptr
598}
599
600impl<T> Arc<[T]> {
601 // Copy elements from slice into newly allocated Arc<[T]>
602 //
603 // Unsafe because the caller must either take ownership or bind `T: Copy`
604 unsafe fn copy_from_slice(v: &[T]) -> Arc<[T]> {
605 let v_ptr = v as *const [T];
606 let ptr = Self::allocate_for_ptr(v_ptr);
607
608 ptr::copy_nonoverlapping(
609 v.as_ptr(),
610 &mut (*ptr).data as *mut [T] as *mut T,
611 v.len());
612
2c00a5a8 613 Arc { ptr: NonNull::new_unchecked(ptr), phantom: PhantomData }
3b2f2976
XL
614 }
615}
616
617// Specialization trait used for From<&[T]>
618trait ArcFromSlice<T> {
619 fn from_slice(slice: &[T]) -> Self;
620}
621
622impl<T: Clone> ArcFromSlice<T> for Arc<[T]> {
623 #[inline]
624 default fn from_slice(v: &[T]) -> Self {
625 // Panic guard while cloning T elements.
626 // In the event of a panic, elements that have been written
627 // into the new ArcInner will be dropped, then the memory freed.
628 struct Guard<T> {
629 mem: *mut u8,
630 elems: *mut T,
631 layout: Layout,
632 n_elems: usize,
633 }
634
635 impl<T> Drop for Guard<T> {
636 fn drop(&mut self) {
637 use core::slice::from_raw_parts_mut;
638
639 unsafe {
640 let slice = from_raw_parts_mut(self.elems, self.n_elems);
641 ptr::drop_in_place(slice);
642
643 Heap.dealloc(self.mem, self.layout.clone());
644 }
645 }
646 }
647
648 unsafe {
649 let v_ptr = v as *const [T];
650 let ptr = Self::allocate_for_ptr(v_ptr);
651
652 let mem = ptr as *mut _ as *mut u8;
653 let layout = Layout::for_value(&*ptr);
654
655 // Pointer to first element
656 let elems = &mut (*ptr).data as *mut [T] as *mut T;
657
658 let mut guard = Guard{
659 mem: mem,
660 elems: elems,
661 layout: layout,
662 n_elems: 0,
663 };
664
665 for (i, item) in v.iter().enumerate() {
666 ptr::write(elems.offset(i as isize), item.clone());
667 guard.n_elems += 1;
668 }
669
670 // All clear. Forget the guard so it doesn't free the new ArcInner.
671 mem::forget(guard);
672
2c00a5a8 673 Arc { ptr: NonNull::new_unchecked(ptr), phantom: PhantomData }
3b2f2976
XL
674 }
675 }
676}
677
678impl<T: Copy> ArcFromSlice<T> for Arc<[T]> {
679 #[inline]
680 fn from_slice(v: &[T]) -> Self {
681 unsafe { Arc::copy_from_slice(v) }
682 }
683}
684
85aaf69f 685#[stable(feature = "rust1", since = "1.0.0")]
62682a34 686impl<T: ?Sized> Clone for Arc<T> {
c30ab7b3 687 /// Makes a clone of the `Arc` pointer.
1a4d82fc 688 ///
c30ab7b3
SL
689 /// This creates another pointer to the same inner value, increasing the
690 /// strong reference count.
1a4d82fc
JJ
691 ///
692 /// # Examples
693 ///
694 /// ```
695 /// use std::sync::Arc;
696 ///
85aaf69f 697 /// let five = Arc::new(5);
1a4d82fc 698 ///
7cac9316 699 /// Arc::clone(&five);
1a4d82fc
JJ
700 /// ```
701 #[inline]
702 fn clone(&self) -> Arc<T> {
c34b1796
AL
703 // Using a relaxed ordering is alright here, as knowledge of the
704 // original reference prevents other threads from erroneously deleting
705 // the object.
1a4d82fc 706 //
c34b1796
AL
707 // As explained in the [Boost documentation][1], Increasing the
708 // reference counter can always be done with memory_order_relaxed: New
709 // references to an object can only be formed from an existing
710 // reference, and passing an existing reference from one thread to
711 // another must already provide any required synchronization.
1a4d82fc
JJ
712 //
713 // [1]: (www.boost.org/doc/libs/1_55_0/doc/html/atomic/usage_examples.html)
c1a9b12d
SL
714 let old_size = self.inner().strong.fetch_add(1, Relaxed);
715
716 // However we need to guard against massive refcounts in case someone
717 // is `mem::forget`ing Arcs. If we don't do this the count can overflow
718 // and users will use-after free. We racily saturate to `isize::MAX` on
719 // the assumption that there aren't ~2 billion threads incrementing
720 // the reference count at once. This branch will never be taken in
721 // any realistic program.
722 //
723 // We abort because such a program is incredibly degenerate, and we
724 // don't care to support it.
725 if old_size > MAX_REFCOUNT {
b039eaaf
SL
726 unsafe {
727 abort();
728 }
c1a9b12d
SL
729 }
730
ff7c6d11 731 Arc { ptr: self.ptr, phantom: PhantomData }
1a4d82fc
JJ
732 }
733}
734
85aaf69f 735#[stable(feature = "rust1", since = "1.0.0")]
62682a34 736impl<T: ?Sized> Deref for Arc<T> {
1a4d82fc
JJ
737 type Target = T;
738
739 #[inline]
740 fn deref(&self) -> &T {
741 &self.inner().data
742 }
743}
744
c34b1796 745impl<T: Clone> Arc<T> {
c30ab7b3
SL
746 /// Makes a mutable reference into the given `Arc`.
747 ///
748 /// If there are other `Arc` or [`Weak`][weak] pointers to the same value,
749 /// then `make_mut` will invoke [`clone`][clone] on the inner value to
750 /// ensure unique ownership. This is also referred to as clone-on-write.
1a4d82fc 751 ///
c30ab7b3
SL
752 /// See also [`get_mut`][get_mut], which will fail rather than cloning.
753 ///
754 /// [weak]: struct.Weak.html
755 /// [clone]: ../../std/clone/trait.Clone.html#tymethod.clone
756 /// [get_mut]: struct.Arc.html#method.get_mut
62682a34 757 ///
1a4d82fc
JJ
758 /// # Examples
759 ///
760 /// ```
761 /// use std::sync::Arc;
762 ///
e9174d1e
SL
763 /// let mut data = Arc::new(5);
764 ///
765 /// *Arc::make_mut(&mut data) += 1; // Won't clone anything
7cac9316 766 /// let mut other_data = Arc::clone(&data); // Won't clone inner data
e9174d1e
SL
767 /// *Arc::make_mut(&mut data) += 1; // Clones inner data
768 /// *Arc::make_mut(&mut data) += 1; // Won't clone anything
769 /// *Arc::make_mut(&mut other_data) *= 2; // Won't clone anything
770 ///
c30ab7b3 771 /// // Now `data` and `other_data` point to different values.
e9174d1e
SL
772 /// assert_eq!(*data, 8);
773 /// assert_eq!(*other_data, 12);
1a4d82fc
JJ
774 /// ```
775 #[inline]
e9174d1e
SL
776 #[stable(feature = "arc_unique", since = "1.4.0")]
777 pub fn make_mut(this: &mut Self) -> &mut T {
c1a9b12d
SL
778 // Note that we hold both a strong reference and a weak reference.
779 // Thus, releasing our strong reference only will not, by itself, cause
780 // the memory to be deallocated.
62682a34 781 //
c1a9b12d
SL
782 // Use Acquire to ensure that we see any writes to `weak` that happen
783 // before release writes (i.e., decrements) to `strong`. Since we hold a
784 // weak count, there's no chance the ArcInner itself could be
785 // deallocated.
54a0048b 786 if this.inner().strong.compare_exchange(1, 0, Acquire, Relaxed).is_err() {
9cc50fc6 787 // Another strong pointer exists; clone
c1a9b12d
SL
788 *this = Arc::new((**this).clone());
789 } else if this.inner().weak.load(Relaxed) != 1 {
790 // Relaxed suffices in the above because this is fundamentally an
791 // optimization: we are always racing with weak pointers being
792 // dropped. Worst case, we end up allocated a new Arc unnecessarily.
793
794 // We removed the last strong ref, but there are additional weak
795 // refs remaining. We'll move the contents to a new Arc, and
796 // invalidate the other weak refs.
797
798 // Note that it is not possible for the read of `weak` to yield
799 // usize::MAX (i.e., locked), since the weak count can only be
800 // locked by a thread with a strong reference.
801
802 // Materialize our own implicit weak pointer, so that it can clean
803 // up the ArcInner as needed.
54a0048b 804 let weak = Weak { ptr: this.ptr };
c1a9b12d
SL
805
806 // mark the data itself as already deallocated
807 unsafe {
808 // there is no data race in the implicit write caused by `read`
809 // here (due to zeroing) because data is no longer accessed by
810 // other threads (due to there being no more strong refs at this
811 // point).
7cac9316 812 let mut swap = Arc::new(ptr::read(&weak.ptr.as_ref().data));
c1a9b12d
SL
813 mem::swap(this, &mut swap);
814 mem::forget(swap);
815 }
816 } else {
817 // We were the sole reference of either kind; bump back up the
818 // strong ref count.
819 this.inner().strong.store(1, Release);
1a4d82fc 820 }
c1a9b12d 821
9346a6ac 822 // As with `get_mut()`, the unsafety is ok because our reference was
c34b1796 823 // either unique to begin with, or became one upon cloning the contents.
c1a9b12d 824 unsafe {
7cac9316 825 &mut this.ptr.as_mut().data
c1a9b12d 826 }
1a4d82fc
JJ
827 }
828}
829
c1a9b12d 830impl<T: ?Sized> Arc<T> {
c30ab7b3
SL
831 /// Returns a mutable reference to the inner value, if there are
832 /// no other `Arc` or [`Weak`][weak] pointers to the same value.
833 ///
834 /// Returns [`None`][option] otherwise, because it is not safe to
835 /// mutate a shared value.
836 ///
837 /// See also [`make_mut`][make_mut], which will [`clone`][clone]
838 /// the inner value when it's shared.
839 ///
840 /// [weak]: struct.Weak.html
841 /// [option]: ../../std/option/enum.Option.html
842 /// [make_mut]: struct.Arc.html#method.make_mut
843 /// [clone]: ../../std/clone/trait.Clone.html#tymethod.clone
c1a9b12d
SL
844 ///
845 /// # Examples
846 ///
847 /// ```
e9174d1e 848 /// use std::sync::Arc;
c1a9b12d
SL
849 ///
850 /// let mut x = Arc::new(3);
851 /// *Arc::get_mut(&mut x).unwrap() = 4;
852 /// assert_eq!(*x, 4);
853 ///
7cac9316 854 /// let _y = Arc::clone(&x);
c1a9b12d 855 /// assert!(Arc::get_mut(&mut x).is_none());
c1a9b12d
SL
856 /// ```
857 #[inline]
e9174d1e
SL
858 #[stable(feature = "arc_unique", since = "1.4.0")]
859 pub fn get_mut(this: &mut Self) -> Option<&mut T> {
c1a9b12d
SL
860 if this.is_unique() {
861 // This unsafety is ok because we're guaranteed that the pointer
862 // returned is the *only* pointer that will ever be returned to T. Our
863 // reference count is guaranteed to be 1 at this point, and we required
864 // the Arc itself to be `mut`, so we're returning the only possible
865 // reference to the inner data.
866 unsafe {
7cac9316 867 Some(&mut this.ptr.as_mut().data)
c1a9b12d
SL
868 }
869 } else {
870 None
871 }
872 }
873
874 /// Determine whether this is the unique reference (including weak refs) to
875 /// the underlying data.
876 ///
877 /// Note that this requires locking the weak ref count.
878 fn is_unique(&mut self) -> bool {
879 // lock the weak pointer count if we appear to be the sole weak pointer
880 // holder.
881 //
882 // The acquire label here ensures a happens-before relationship with any
883 // writes to `strong` prior to decrements of the `weak` count (via drop,
884 // which uses Release).
54a0048b 885 if self.inner().weak.compare_exchange(1, usize::MAX, Acquire, Relaxed).is_ok() {
c1a9b12d
SL
886 // Due to the previous acquire read, this will observe any writes to
887 // `strong` that were due to upgrading weak pointers; only strong
888 // clones remain, which require that the strong count is > 1 anyway.
889 let unique = self.inner().strong.load(Relaxed) == 1;
890
891 // The release write here synchronizes with a read in `downgrade`,
892 // effectively preventing the above read of `strong` from happening
893 // after the write.
894 self.inner().weak.store(1, Release); // release the lock
895 unique
896 } else {
897 false
898 }
899 }
900}
901
85aaf69f 902#[stable(feature = "rust1", since = "1.0.0")]
32a655c1 903unsafe impl<#[may_dangle] T: ?Sized> Drop for Arc<T> {
c30ab7b3 904 /// Drops the `Arc`.
1a4d82fc 905 ///
c34b1796 906 /// This will decrement the strong reference count. If the strong reference
c30ab7b3
SL
907 /// count reaches zero then the only other references (if any) are
908 /// [`Weak`][weak], so we `drop` the inner value.
909 ///
910 /// [weak]: struct.Weak.html
1a4d82fc
JJ
911 ///
912 /// # Examples
913 ///
914 /// ```
915 /// use std::sync::Arc;
916 ///
c30ab7b3 917 /// struct Foo;
1a4d82fc 918 ///
c30ab7b3
SL
919 /// impl Drop for Foo {
920 /// fn drop(&mut self) {
921 /// println!("dropped!");
922 /// }
1a4d82fc 923 /// }
1a4d82fc 924 ///
c30ab7b3 925 /// let foo = Arc::new(Foo);
7cac9316 926 /// let foo2 = Arc::clone(&foo);
1a4d82fc 927 ///
c30ab7b3
SL
928 /// drop(foo); // Doesn't print anything
929 /// drop(foo2); // Prints "dropped!"
1a4d82fc 930 /// ```
c34b1796 931 #[inline]
1a4d82fc 932 fn drop(&mut self) {
c34b1796
AL
933 // Because `fetch_sub` is already atomic, we do not need to synchronize
934 // with other threads unless we are going to delete the object. This
935 // same logic applies to the below `fetch_sub` to the `weak` count.
b039eaaf 936 if self.inner().strong.fetch_sub(1, Release) != 1 {
92a42be0 937 return;
b039eaaf 938 }
1a4d82fc 939
c34b1796
AL
940 // This fence is needed to prevent reordering of use of the data and
941 // deletion of the data. Because it is marked `Release`, the decreasing
942 // of the reference count synchronizes with this `Acquire` fence. This
943 // means that use of the data happens before decreasing the reference
944 // count, which happens before this fence, which happens before the
945 // deletion of the data.
1a4d82fc
JJ
946 //
947 // As explained in the [Boost documentation][1],
948 //
c34b1796
AL
949 // > It is important to enforce any possible access to the object in one
950 // > thread (through an existing reference) to *happen before* deleting
951 // > the object in a different thread. This is achieved by a "release"
952 // > operation after dropping a reference (any access to the object
953 // > through this reference must obviously happened before), and an
954 // > "acquire" operation before deleting the object.
1a4d82fc 955 //
7cac9316
XL
956 // In particular, while the contents of an Arc are usually immutable, it's
957 // possible to have interior writes to something like a Mutex<T>. Since a
958 // Mutex is not acquired when it is deleted, we can't rely on its
959 // synchronization logic to make writes in thread A visible to a destructor
960 // running in thread B.
961 //
962 // Also note that the Acquire fence here could probably be replaced with an
963 // Acquire load, which could improve performance in highly-contended
964 // situations. See [2].
965 //
1a4d82fc 966 // [1]: (www.boost.org/doc/libs/1_55_0/doc/html/atomic/usage_examples.html)
7cac9316 967 // [2]: (https://github.com/rust-lang/rust/pull/41714)
1a4d82fc
JJ
968 atomic::fence(Acquire);
969
c34b1796 970 unsafe {
b039eaaf 971 self.drop_slow();
1a4d82fc
JJ
972 }
973 }
974}
975
a7813a04 976impl<T> Weak<T> {
cc61c64b
XL
977 /// Constructs a new `Weak<T>`, allocating memory for `T` without initializing
978 /// it. Calling [`upgrade`] on the return value always gives [`None`].
c30ab7b3 979 ///
cc61c64b
XL
980 /// [`upgrade`]: struct.Weak.html#method.upgrade
981 /// [`None`]: ../../std/option/enum.Option.html#variant.None
a7813a04
XL
982 ///
983 /// # Examples
984 ///
985 /// ```
986 /// use std::sync::Weak;
987 ///
988 /// let empty: Weak<i64> = Weak::new();
c30ab7b3 989 /// assert!(empty.upgrade().is_none());
a7813a04
XL
990 /// ```
991 #[stable(feature = "downgraded_weak", since = "1.10.0")]
992 pub fn new() -> Weak<T> {
993 unsafe {
3157f602 994 Weak {
2c00a5a8 995 ptr: Box::into_raw_non_null(box ArcInner {
3157f602
XL
996 strong: atomic::AtomicUsize::new(0),
997 weak: atomic::AtomicUsize::new(1),
998 data: uninitialized(),
2c00a5a8 999 }),
3157f602 1000 }
a7813a04
XL
1001 }
1002 }
1003}
1004
62682a34 1005impl<T: ?Sized> Weak<T> {
cc61c64b
XL
1006 /// Attempts to upgrade the `Weak` pointer to an [`Arc`], extending
1007 /// the lifetime of the value if successful.
1a4d82fc 1008 ///
cc61c64b 1009 /// Returns [`None`] if the value has since been dropped.
1a4d82fc 1010 ///
cc61c64b
XL
1011 /// [`Arc`]: struct.Arc.html
1012 /// [`None`]: ../../std/option/enum.Option.html#variant.None
1a4d82fc
JJ
1013 ///
1014 /// # Examples
1015 ///
1016 /// ```
1017 /// use std::sync::Arc;
1018 ///
85aaf69f 1019 /// let five = Arc::new(5);
1a4d82fc 1020 ///
e9174d1e 1021 /// let weak_five = Arc::downgrade(&five);
1a4d82fc
JJ
1022 ///
1023 /// let strong_five: Option<Arc<_>> = weak_five.upgrade();
c30ab7b3
SL
1024 /// assert!(strong_five.is_some());
1025 ///
1026 /// // Destroy all strong pointers.
1027 /// drop(strong_five);
1028 /// drop(five);
1029 ///
1030 /// assert!(weak_five.upgrade().is_none());
1a4d82fc 1031 /// ```
e9174d1e 1032 #[stable(feature = "arc_weak", since = "1.4.0")]
1a4d82fc 1033 pub fn upgrade(&self) -> Option<Arc<T>> {
c34b1796 1034 // We use a CAS loop to increment the strong count instead of a
9346a6ac 1035 // fetch_add because once the count hits 0 it must never be above 0.
1a4d82fc 1036 let inner = self.inner();
54a0048b
SL
1037
1038 // Relaxed load because any write of 0 that we can observe
1039 // leaves the field in a permanently zero state (so a
1040 // "stale" read of 0 is fine), and any other value is
1041 // confirmed via the CAS below.
1042 let mut n = inner.strong.load(Relaxed);
1043
1a4d82fc 1044 loop {
b039eaaf 1045 if n == 0 {
92a42be0
SL
1046 return None;
1047 }
1048
1049 // See comments in `Arc::clone` for why we do this (for `mem::forget`).
1050 if n > MAX_REFCOUNT {
3157f602
XL
1051 unsafe {
1052 abort();
1053 }
b039eaaf 1054 }
c1a9b12d
SL
1055
1056 // Relaxed is valid for the same reason it is on Arc's Clone impl
54a0048b 1057 match inner.strong.compare_exchange_weak(n, n + 1, Relaxed, Relaxed) {
ff7c6d11 1058 Ok(_) => return Some(Arc { ptr: self.ptr, phantom: PhantomData }),
54a0048b 1059 Err(old) => n = old,
b039eaaf 1060 }
1a4d82fc
JJ
1061 }
1062 }
1063
1064 #[inline]
1065 fn inner(&self) -> &ArcInner<T> {
1066 // See comments above for why this is "safe"
7cac9316 1067 unsafe { self.ptr.as_ref() }
1a4d82fc
JJ
1068 }
1069}
1070
e9174d1e 1071#[stable(feature = "arc_weak", since = "1.4.0")]
62682a34 1072impl<T: ?Sized> Clone for Weak<T> {
cc61c64b 1073 /// Makes a clone of the `Weak` pointer that points to the same value.
1a4d82fc
JJ
1074 ///
1075 /// # Examples
1076 ///
1077 /// ```
7cac9316 1078 /// use std::sync::{Arc, Weak};
1a4d82fc 1079 ///
e9174d1e 1080 /// let weak_five = Arc::downgrade(&Arc::new(5));
1a4d82fc 1081 ///
7cac9316 1082 /// Weak::clone(&weak_five);
1a4d82fc
JJ
1083 /// ```
1084 #[inline]
1085 fn clone(&self) -> Weak<T> {
c1a9b12d
SL
1086 // See comments in Arc::clone() for why this is relaxed. This can use a
1087 // fetch_add (ignoring the lock) because the weak count is only locked
1088 // where are *no other* weak pointers in existence. (So we can't be
1089 // running this code in that case).
1090 let old_size = self.inner().weak.fetch_add(1, Relaxed);
1091
1092 // See comments in Arc::clone() for why we do this (for mem::forget).
1093 if old_size > MAX_REFCOUNT {
b039eaaf
SL
1094 unsafe {
1095 abort();
1096 }
c1a9b12d
SL
1097 }
1098
54a0048b 1099 return Weak { ptr: self.ptr };
1a4d82fc
JJ
1100 }
1101}
1102
a7813a04
XL
1103#[stable(feature = "downgraded_weak", since = "1.10.0")]
1104impl<T> Default for Weak<T> {
cc61c64b
XL
1105 /// Constructs a new `Weak<T>`, allocating memory for `T` without initializing
1106 /// it. Calling [`upgrade`] on the return value always gives [`None`].
c30ab7b3 1107 ///
cc61c64b
XL
1108 /// [`upgrade`]: struct.Weak.html#method.upgrade
1109 /// [`None`]: ../../std/option/enum.Option.html#variant.None
c30ab7b3
SL
1110 ///
1111 /// # Examples
1112 ///
1113 /// ```
1114 /// use std::sync::Weak;
1115 ///
1116 /// let empty: Weak<i64> = Default::default();
1117 /// assert!(empty.upgrade().is_none());
1118 /// ```
a7813a04
XL
1119 fn default() -> Weak<T> {
1120 Weak::new()
1121 }
1122}
1123
7453a54e 1124#[stable(feature = "arc_weak", since = "1.4.0")]
62682a34 1125impl<T: ?Sized> Drop for Weak<T> {
c30ab7b3 1126 /// Drops the `Weak` pointer.
1a4d82fc 1127 ///
1a4d82fc
JJ
1128 /// # Examples
1129 ///
1130 /// ```
7cac9316 1131 /// use std::sync::{Arc, Weak};
1a4d82fc 1132 ///
c30ab7b3 1133 /// struct Foo;
1a4d82fc 1134 ///
c30ab7b3
SL
1135 /// impl Drop for Foo {
1136 /// fn drop(&mut self) {
1137 /// println!("dropped!");
1138 /// }
1a4d82fc 1139 /// }
1a4d82fc 1140 ///
c30ab7b3
SL
1141 /// let foo = Arc::new(Foo);
1142 /// let weak_foo = Arc::downgrade(&foo);
7cac9316 1143 /// let other_weak_foo = Weak::clone(&weak_foo);
1a4d82fc 1144 ///
c30ab7b3
SL
1145 /// drop(weak_foo); // Doesn't print anything
1146 /// drop(foo); // Prints "dropped!"
1147 ///
1148 /// assert!(other_weak_foo.upgrade().is_none());
1a4d82fc
JJ
1149 /// ```
1150 fn drop(&mut self) {
7cac9316 1151 let ptr = self.ptr.as_ptr();
1a4d82fc 1152
c34b1796
AL
1153 // If we find out that we were the last weak pointer, then its time to
1154 // deallocate the data entirely. See the discussion in Arc::drop() about
1155 // the memory orderings
c1a9b12d
SL
1156 //
1157 // It's not necessary to check for the locked state here, because the
1158 // weak count can only be locked if there was precisely one weak ref,
1159 // meaning that drop could only subsequently run ON that remaining weak
1160 // ref, which can only happen after the lock is released.
1a4d82fc
JJ
1161 if self.inner().weak.fetch_sub(1, Release) == 1 {
1162 atomic::fence(Acquire);
041b39d2
XL
1163 unsafe {
1164 Heap.dealloc(ptr as *mut u8, Layout::for_value(&*ptr))
1165 }
1a4d82fc
JJ
1166 }
1167 }
1168}
1169
85aaf69f 1170#[stable(feature = "rust1", since = "1.0.0")]
62682a34 1171impl<T: ?Sized + PartialEq> PartialEq for Arc<T> {
c30ab7b3 1172 /// Equality for two `Arc`s.
1a4d82fc 1173 ///
c30ab7b3 1174 /// Two `Arc`s are equal if their inner values are equal.
1a4d82fc
JJ
1175 ///
1176 /// # Examples
1177 ///
1178 /// ```
1179 /// use std::sync::Arc;
1180 ///
85aaf69f 1181 /// let five = Arc::new(5);
1a4d82fc 1182 ///
c30ab7b3 1183 /// assert!(five == Arc::new(5));
1a4d82fc 1184 /// ```
b039eaaf
SL
1185 fn eq(&self, other: &Arc<T>) -> bool {
1186 *(*self) == *(*other)
1187 }
1a4d82fc 1188
c30ab7b3 1189 /// Inequality for two `Arc`s.
1a4d82fc 1190 ///
c30ab7b3 1191 /// Two `Arc`s are unequal if their inner values are unequal.
1a4d82fc
JJ
1192 ///
1193 /// # Examples
1194 ///
1195 /// ```
1196 /// use std::sync::Arc;
1197 ///
85aaf69f 1198 /// let five = Arc::new(5);
1a4d82fc 1199 ///
c30ab7b3 1200 /// assert!(five != Arc::new(6));
1a4d82fc 1201 /// ```
b039eaaf
SL
1202 fn ne(&self, other: &Arc<T>) -> bool {
1203 *(*self) != *(*other)
1204 }
1a4d82fc 1205}
85aaf69f 1206#[stable(feature = "rust1", since = "1.0.0")]
62682a34 1207impl<T: ?Sized + PartialOrd> PartialOrd for Arc<T> {
c30ab7b3 1208 /// Partial comparison for two `Arc`s.
1a4d82fc
JJ
1209 ///
1210 /// The two are compared by calling `partial_cmp()` on their inner values.
1211 ///
1212 /// # Examples
1213 ///
1214 /// ```
1215 /// use std::sync::Arc;
c30ab7b3 1216 /// use std::cmp::Ordering;
1a4d82fc 1217 ///
85aaf69f 1218 /// let five = Arc::new(5);
1a4d82fc 1219 ///
c30ab7b3 1220 /// assert_eq!(Some(Ordering::Less), five.partial_cmp(&Arc::new(6)));
1a4d82fc
JJ
1221 /// ```
1222 fn partial_cmp(&self, other: &Arc<T>) -> Option<Ordering> {
1223 (**self).partial_cmp(&**other)
1224 }
1225
c30ab7b3 1226 /// Less-than comparison for two `Arc`s.
1a4d82fc
JJ
1227 ///
1228 /// The two are compared by calling `<` on their inner values.
1229 ///
1230 /// # Examples
1231 ///
1232 /// ```
1233 /// use std::sync::Arc;
1234 ///
85aaf69f 1235 /// let five = Arc::new(5);
1a4d82fc 1236 ///
c30ab7b3 1237 /// assert!(five < Arc::new(6));
1a4d82fc 1238 /// ```
b039eaaf
SL
1239 fn lt(&self, other: &Arc<T>) -> bool {
1240 *(*self) < *(*other)
1241 }
1a4d82fc 1242
c30ab7b3 1243 /// 'Less than or equal to' comparison for two `Arc`s.
1a4d82fc
JJ
1244 ///
1245 /// The two are compared by calling `<=` on their inner values.
1246 ///
1247 /// # Examples
1248 ///
1249 /// ```
1250 /// use std::sync::Arc;
1251 ///
85aaf69f 1252 /// let five = Arc::new(5);
1a4d82fc 1253 ///
c30ab7b3 1254 /// assert!(five <= Arc::new(5));
1a4d82fc 1255 /// ```
b039eaaf
SL
1256 fn le(&self, other: &Arc<T>) -> bool {
1257 *(*self) <= *(*other)
1258 }
1a4d82fc 1259
c30ab7b3 1260 /// Greater-than comparison for two `Arc`s.
1a4d82fc
JJ
1261 ///
1262 /// The two are compared by calling `>` on their inner values.
1263 ///
1264 /// # Examples
1265 ///
1266 /// ```
1267 /// use std::sync::Arc;
1268 ///
85aaf69f 1269 /// let five = Arc::new(5);
1a4d82fc 1270 ///
c30ab7b3 1271 /// assert!(five > Arc::new(4));
1a4d82fc 1272 /// ```
b039eaaf
SL
1273 fn gt(&self, other: &Arc<T>) -> bool {
1274 *(*self) > *(*other)
1275 }
1a4d82fc 1276
c30ab7b3 1277 /// 'Greater than or equal to' comparison for two `Arc`s.
1a4d82fc
JJ
1278 ///
1279 /// The two are compared by calling `>=` on their inner values.
1280 ///
1281 /// # Examples
1282 ///
1283 /// ```
1284 /// use std::sync::Arc;
1285 ///
85aaf69f 1286 /// let five = Arc::new(5);
1a4d82fc 1287 ///
c30ab7b3 1288 /// assert!(five >= Arc::new(5));
1a4d82fc 1289 /// ```
b039eaaf
SL
1290 fn ge(&self, other: &Arc<T>) -> bool {
1291 *(*self) >= *(*other)
1292 }
1a4d82fc 1293}
85aaf69f 1294#[stable(feature = "rust1", since = "1.0.0")]
62682a34 1295impl<T: ?Sized + Ord> Ord for Arc<T> {
c30ab7b3
SL
1296 /// Comparison for two `Arc`s.
1297 ///
1298 /// The two are compared by calling `cmp()` on their inner values.
1299 ///
1300 /// # Examples
1301 ///
1302 /// ```
1303 /// use std::sync::Arc;
1304 /// use std::cmp::Ordering;
1305 ///
1306 /// let five = Arc::new(5);
1307 ///
1308 /// assert_eq!(Ordering::Less, five.cmp(&Arc::new(6)));
1309 /// ```
b039eaaf
SL
1310 fn cmp(&self, other: &Arc<T>) -> Ordering {
1311 (**self).cmp(&**other)
1312 }
1a4d82fc 1313}
85aaf69f 1314#[stable(feature = "rust1", since = "1.0.0")]
62682a34 1315impl<T: ?Sized + Eq> Eq for Arc<T> {}
1a4d82fc 1316
85aaf69f 1317#[stable(feature = "rust1", since = "1.0.0")]
62682a34 1318impl<T: ?Sized + fmt::Display> fmt::Display for Arc<T> {
1a4d82fc 1319 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
85aaf69f 1320 fmt::Display::fmt(&**self, f)
1a4d82fc
JJ
1321 }
1322}
1323
85aaf69f 1324#[stable(feature = "rust1", since = "1.0.0")]
62682a34 1325impl<T: ?Sized + fmt::Debug> fmt::Debug for Arc<T> {
1a4d82fc 1326 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
85aaf69f 1327 fmt::Debug::fmt(&**self, f)
1a4d82fc
JJ
1328 }
1329}
1330
9346a6ac 1331#[stable(feature = "rust1", since = "1.0.0")]
7453a54e 1332impl<T: ?Sized> fmt::Pointer for Arc<T> {
9346a6ac 1333 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
ff7c6d11 1334 fmt::Pointer::fmt(&(&**self as *const T), f)
9346a6ac
AL
1335 }
1336}
1337
85aaf69f 1338#[stable(feature = "rust1", since = "1.0.0")]
d9579d0f 1339impl<T: Default> Default for Arc<T> {
c30ab7b3
SL
1340 /// Creates a new `Arc<T>`, with the `Default` value for `T`.
1341 ///
1342 /// # Examples
1343 ///
1344 /// ```
1345 /// use std::sync::Arc;
1346 ///
1347 /// let x: Arc<i32> = Default::default();
1348 /// assert_eq!(*x, 0);
1349 /// ```
b039eaaf
SL
1350 fn default() -> Arc<T> {
1351 Arc::new(Default::default())
1352 }
1a4d82fc
JJ
1353}
1354
85aaf69f 1355#[stable(feature = "rust1", since = "1.0.0")]
62682a34 1356impl<T: ?Sized + Hash> Hash for Arc<T> {
85aaf69f
SL
1357 fn hash<H: Hasher>(&self, state: &mut H) {
1358 (**self).hash(state)
1359 }
1360}
1a4d82fc 1361
92a42be0
SL
1362#[stable(feature = "from_for_ptrs", since = "1.6.0")]
1363impl<T> From<T> for Arc<T> {
1364 fn from(t: T) -> Self {
1365 Arc::new(t)
1366 }
1367}
1368
3b2f2976
XL
1369#[stable(feature = "shared_from_slice", since = "1.21.0")]
1370impl<'a, T: Clone> From<&'a [T]> for Arc<[T]> {
1371 #[inline]
1372 fn from(v: &[T]) -> Arc<[T]> {
1373 <Self as ArcFromSlice<T>>::from_slice(v)
1374 }
1375}
1376
1377#[stable(feature = "shared_from_slice", since = "1.21.0")]
1378impl<'a> From<&'a str> for Arc<str> {
1379 #[inline]
1380 fn from(v: &str) -> Arc<str> {
ff7c6d11
XL
1381 let arc = Arc::<[u8]>::from(v.as_bytes());
1382 unsafe { Arc::from_raw(Arc::into_raw(arc) as *const str) }
3b2f2976
XL
1383 }
1384}
1385
1386#[stable(feature = "shared_from_slice", since = "1.21.0")]
1387impl From<String> for Arc<str> {
1388 #[inline]
1389 fn from(v: String) -> Arc<str> {
1390 Arc::from(&v[..])
1391 }
1392}
1393
1394#[stable(feature = "shared_from_slice", since = "1.21.0")]
1395impl<T: ?Sized> From<Box<T>> for Arc<T> {
1396 #[inline]
1397 fn from(v: Box<T>) -> Arc<T> {
1398 Arc::from_box(v)
1399 }
1400}
1401
1402#[stable(feature = "shared_from_slice", since = "1.21.0")]
1403impl<T> From<Vec<T>> for Arc<[T]> {
1404 #[inline]
1405 fn from(mut v: Vec<T>) -> Arc<[T]> {
1406 unsafe {
1407 let arc = Arc::copy_from_slice(&v);
1408
1409 // Allow the Vec to free its memory, but not destroy its contents
1410 v.set_len(0);
1411
1412 arc
1413 }
1414 }
1415}
1416
1a4d82fc 1417#[cfg(test)]
1a4d82fc 1418mod tests {
3b2f2976 1419 use std::boxed::Box;
1a4d82fc
JJ
1420 use std::clone::Clone;
1421 use std::sync::mpsc::channel;
1422 use std::mem::drop;
1423 use std::ops::Drop;
1424 use std::option::Option;
3157f602 1425 use std::option::Option::{None, Some};
1a4d82fc
JJ
1426 use std::sync::atomic;
1427 use std::sync::atomic::Ordering::{Acquire, SeqCst};
85aaf69f 1428 use std::thread;
1a4d82fc 1429 use std::sync::Mutex;
92a42be0 1430 use std::convert::From;
1a4d82fc 1431
041b39d2
XL
1432 use super::{Arc, Weak};
1433 use vec::Vec;
1434
85aaf69f 1435 struct Canary(*mut atomic::AtomicUsize);
1a4d82fc 1436
92a42be0 1437 impl Drop for Canary {
1a4d82fc
JJ
1438 fn drop(&mut self) {
1439 unsafe {
1440 match *self {
1441 Canary(c) => {
1442 (*c).fetch_add(1, SeqCst);
1443 }
1444 }
1445 }
1446 }
1447 }
1448
1449 #[test]
c30ab7b3 1450 #[cfg_attr(target_os = "emscripten", ignore)]
1a4d82fc 1451 fn manually_share_arc() {
92a42be0 1452 let v = vec![1, 2, 3, 4, 5, 6, 7, 8, 9, 10];
1a4d82fc
JJ
1453 let arc_v = Arc::new(v);
1454
1455 let (tx, rx) = channel();
1456
85aaf69f
SL
1457 let _t = thread::spawn(move || {
1458 let arc_v: Arc<Vec<i32>> = rx.recv().unwrap();
1a4d82fc
JJ
1459 assert_eq!((*arc_v)[3], 4);
1460 });
1461
1462 tx.send(arc_v.clone()).unwrap();
1463
1464 assert_eq!((*arc_v)[2], 3);
1465 assert_eq!((*arc_v)[4], 5);
1466 }
1467
c34b1796 1468 #[test]
9346a6ac 1469 fn test_arc_get_mut() {
e9174d1e
SL
1470 let mut x = Arc::new(3);
1471 *Arc::get_mut(&mut x).unwrap() = 4;
1472 assert_eq!(*x, 4);
1473 let y = x.clone();
1474 assert!(Arc::get_mut(&mut x).is_none());
1475 drop(y);
1476 assert!(Arc::get_mut(&mut x).is_some());
1477 let _w = Arc::downgrade(&x);
1478 assert!(Arc::get_mut(&mut x).is_none());
c34b1796
AL
1479 }
1480
1a4d82fc 1481 #[test]
e9174d1e
SL
1482 fn try_unwrap() {
1483 let x = Arc::new(3);
1484 assert_eq!(Arc::try_unwrap(x), Ok(3));
1485 let x = Arc::new(4);
1486 let _y = x.clone();
1487 assert_eq!(Arc::try_unwrap(x), Err(Arc::new(4)));
1488 let x = Arc::new(5);
1489 let _w = Arc::downgrade(&x);
1490 assert_eq!(Arc::try_unwrap(x), Ok(5));
1491 }
1492
476ff2be
SL
1493 #[test]
1494 fn into_from_raw() {
1495 let x = Arc::new(box "hello");
1496 let y = x.clone();
1497
1498 let x_ptr = Arc::into_raw(x);
1499 drop(y);
1500 unsafe {
1501 assert_eq!(**x_ptr, "hello");
1502
1503 let x = Arc::from_raw(x_ptr);
1504 assert_eq!(**x, "hello");
1505
1506 assert_eq!(Arc::try_unwrap(x).map(|x| *x), Ok("hello"));
1507 }
1508 }
1509
ea8adc8c
XL
1510 #[test]
1511 fn test_into_from_raw_unsized() {
1512 use std::fmt::Display;
1513 use std::string::ToString;
1514
1515 let arc: Arc<str> = Arc::from("foo");
1516
1517 let ptr = Arc::into_raw(arc.clone());
1518 let arc2 = unsafe { Arc::from_raw(ptr) };
1519
1520 assert_eq!(unsafe { &*ptr }, "foo");
1521 assert_eq!(arc, arc2);
1522
1523 let arc: Arc<Display> = Arc::new(123);
1524
1525 let ptr = Arc::into_raw(arc.clone());
1526 let arc2 = unsafe { Arc::from_raw(ptr) };
1527
1528 assert_eq!(unsafe { &*ptr }.to_string(), "123");
1529 assert_eq!(arc2.to_string(), "123");
1530 }
1531
e9174d1e
SL
1532 #[test]
1533 fn test_cowarc_clone_make_mut() {
1534 let mut cow0 = Arc::new(75);
1535 let mut cow1 = cow0.clone();
1536 let mut cow2 = cow1.clone();
1537
1538 assert!(75 == *Arc::make_mut(&mut cow0));
1539 assert!(75 == *Arc::make_mut(&mut cow1));
1540 assert!(75 == *Arc::make_mut(&mut cow2));
1541
1542 *Arc::make_mut(&mut cow0) += 1;
1543 *Arc::make_mut(&mut cow1) += 2;
1544 *Arc::make_mut(&mut cow2) += 3;
1545
1546 assert!(76 == *cow0);
1547 assert!(77 == *cow1);
1548 assert!(78 == *cow2);
1549
1550 // none should point to the same backing memory
1551 assert!(*cow0 != *cow1);
1552 assert!(*cow0 != *cow2);
1553 assert!(*cow1 != *cow2);
1a4d82fc
JJ
1554 }
1555
1556 #[test]
1557 fn test_cowarc_clone_unique2() {
85aaf69f 1558 let mut cow0 = Arc::new(75);
1a4d82fc
JJ
1559 let cow1 = cow0.clone();
1560 let cow2 = cow1.clone();
1561
1562 assert!(75 == *cow0);
1563 assert!(75 == *cow1);
1564 assert!(75 == *cow2);
1565
e9174d1e 1566 *Arc::make_mut(&mut cow0) += 1;
1a4d82fc
JJ
1567 assert!(76 == *cow0);
1568 assert!(75 == *cow1);
1569 assert!(75 == *cow2);
1570
1571 // cow1 and cow2 should share the same contents
1572 // cow0 should have a unique reference
1573 assert!(*cow0 != *cow1);
1574 assert!(*cow0 != *cow2);
1575 assert!(*cow1 == *cow2);
1576 }
1577
1578 #[test]
1579 fn test_cowarc_clone_weak() {
85aaf69f 1580 let mut cow0 = Arc::new(75);
e9174d1e 1581 let cow1_weak = Arc::downgrade(&cow0);
1a4d82fc
JJ
1582
1583 assert!(75 == *cow0);
1584 assert!(75 == *cow1_weak.upgrade().unwrap());
1585
e9174d1e 1586 *Arc::make_mut(&mut cow0) += 1;
1a4d82fc
JJ
1587
1588 assert!(76 == *cow0);
1589 assert!(cow1_weak.upgrade().is_none());
1590 }
1591
1592 #[test]
1593 fn test_live() {
85aaf69f 1594 let x = Arc::new(5);
e9174d1e 1595 let y = Arc::downgrade(&x);
1a4d82fc
JJ
1596 assert!(y.upgrade().is_some());
1597 }
1598
1599 #[test]
1600 fn test_dead() {
85aaf69f 1601 let x = Arc::new(5);
e9174d1e 1602 let y = Arc::downgrade(&x);
1a4d82fc
JJ
1603 drop(x);
1604 assert!(y.upgrade().is_none());
1605 }
1606
1607 #[test]
1608 fn weak_self_cyclic() {
1609 struct Cycle {
b039eaaf 1610 x: Mutex<Option<Weak<Cycle>>>,
1a4d82fc
JJ
1611 }
1612
1613 let a = Arc::new(Cycle { x: Mutex::new(None) });
e9174d1e 1614 let b = Arc::downgrade(&a.clone());
1a4d82fc
JJ
1615 *a.x.lock().unwrap() = Some(b);
1616
1617 // hopefully we don't double-free (or leak)...
1618 }
1619
1620 #[test]
1621 fn drop_arc() {
85aaf69f
SL
1622 let mut canary = atomic::AtomicUsize::new(0);
1623 let x = Arc::new(Canary(&mut canary as *mut atomic::AtomicUsize));
1a4d82fc
JJ
1624 drop(x);
1625 assert!(canary.load(Acquire) == 1);
1626 }
1627
1628 #[test]
1629 fn drop_arc_weak() {
85aaf69f
SL
1630 let mut canary = atomic::AtomicUsize::new(0);
1631 let arc = Arc::new(Canary(&mut canary as *mut atomic::AtomicUsize));
e9174d1e 1632 let arc_weak = Arc::downgrade(&arc);
1a4d82fc
JJ
1633 assert!(canary.load(Acquire) == 0);
1634 drop(arc);
1635 assert!(canary.load(Acquire) == 1);
1636 drop(arc_weak);
1637 }
1638
1639 #[test]
1640 fn test_strong_count() {
54a0048b 1641 let a = Arc::new(0);
e9174d1e
SL
1642 assert!(Arc::strong_count(&a) == 1);
1643 let w = Arc::downgrade(&a);
1644 assert!(Arc::strong_count(&a) == 1);
1a4d82fc 1645 let b = w.upgrade().expect("");
e9174d1e
SL
1646 assert!(Arc::strong_count(&b) == 2);
1647 assert!(Arc::strong_count(&a) == 2);
1a4d82fc
JJ
1648 drop(w);
1649 drop(a);
e9174d1e 1650 assert!(Arc::strong_count(&b) == 1);
1a4d82fc 1651 let c = b.clone();
e9174d1e
SL
1652 assert!(Arc::strong_count(&b) == 2);
1653 assert!(Arc::strong_count(&c) == 2);
1a4d82fc
JJ
1654 }
1655
1656 #[test]
1657 fn test_weak_count() {
54a0048b 1658 let a = Arc::new(0);
e9174d1e
SL
1659 assert!(Arc::strong_count(&a) == 1);
1660 assert!(Arc::weak_count(&a) == 0);
1661 let w = Arc::downgrade(&a);
1662 assert!(Arc::strong_count(&a) == 1);
1663 assert!(Arc::weak_count(&a) == 1);
1a4d82fc 1664 let x = w.clone();
e9174d1e 1665 assert!(Arc::weak_count(&a) == 2);
1a4d82fc
JJ
1666 drop(w);
1667 drop(x);
e9174d1e
SL
1668 assert!(Arc::strong_count(&a) == 1);
1669 assert!(Arc::weak_count(&a) == 0);
1a4d82fc 1670 let c = a.clone();
e9174d1e
SL
1671 assert!(Arc::strong_count(&a) == 2);
1672 assert!(Arc::weak_count(&a) == 0);
1673 let d = Arc::downgrade(&c);
1674 assert!(Arc::weak_count(&c) == 1);
1675 assert!(Arc::strong_count(&c) == 2);
1a4d82fc
JJ
1676
1677 drop(a);
1678 drop(c);
1679 drop(d);
1680 }
1681
1682 #[test]
1683 fn show_arc() {
54a0048b 1684 let a = Arc::new(5);
85aaf69f 1685 assert_eq!(format!("{:?}", a), "5");
1a4d82fc
JJ
1686 }
1687
1688 // Make sure deriving works with Arc<T>
85aaf69f 1689 #[derive(Eq, Ord, PartialEq, PartialOrd, Clone, Debug, Default)]
b039eaaf
SL
1690 struct Foo {
1691 inner: Arc<i32>,
1692 }
62682a34
SL
1693
1694 #[test]
1695 fn test_unsized() {
1696 let x: Arc<[i32]> = Arc::new([1, 2, 3]);
1697 assert_eq!(format!("{:?}", x), "[1, 2, 3]");
e9174d1e 1698 let y = Arc::downgrade(&x.clone());
62682a34
SL
1699 drop(x);
1700 assert!(y.upgrade().is_none());
1701 }
92a42be0
SL
1702
1703 #[test]
1704 fn test_from_owned() {
1705 let foo = 123;
1706 let foo_arc = Arc::from(foo);
1707 assert!(123 == *foo_arc);
1708 }
9cc50fc6
SL
1709
1710 #[test]
1711 fn test_new_weak() {
1712 let foo: Weak<usize> = Weak::new();
1713 assert!(foo.upgrade().is_none());
1714 }
9e0c209e
SL
1715
1716 #[test]
1717 fn test_ptr_eq() {
1718 let five = Arc::new(5);
1719 let same_five = five.clone();
1720 let other_five = Arc::new(5);
1721
1722 assert!(Arc::ptr_eq(&five, &same_five));
1723 assert!(!Arc::ptr_eq(&five, &other_five));
1724 }
3b2f2976
XL
1725
1726 #[test]
1727 #[cfg_attr(target_os = "emscripten", ignore)]
1728 fn test_weak_count_locked() {
1729 let mut a = Arc::new(atomic::AtomicBool::new(false));
1730 let a2 = a.clone();
1731 let t = thread::spawn(move || {
1732 for _i in 0..1000000 {
1733 Arc::get_mut(&mut a);
1734 }
1735 a.store(true, SeqCst);
1736 });
1737
1738 while !a2.load(SeqCst) {
1739 let n = Arc::weak_count(&a2);
1740 assert!(n < 2, "bad weak count: {}", n);
1741 }
1742 t.join().unwrap();
1743 }
1744
1745 #[test]
1746 fn test_from_str() {
1747 let r: Arc<str> = Arc::from("foo");
1748
1749 assert_eq!(&r[..], "foo");
1750 }
1751
1752 #[test]
1753 fn test_copy_from_slice() {
1754 let s: &[u32] = &[1, 2, 3];
1755 let r: Arc<[u32]> = Arc::from(s);
1756
1757 assert_eq!(&r[..], [1, 2, 3]);
1758 }
1759
1760 #[test]
1761 fn test_clone_from_slice() {
1762 #[derive(Clone, Debug, Eq, PartialEq)]
1763 struct X(u32);
1764
1765 let s: &[X] = &[X(1), X(2), X(3)];
1766 let r: Arc<[X]> = Arc::from(s);
1767
1768 assert_eq!(&r[..], s);
1769 }
1770
1771 #[test]
1772 #[should_panic]
1773 fn test_clone_from_slice_panic() {
1774 use std::string::{String, ToString};
1775
1776 struct Fail(u32, String);
1777
1778 impl Clone for Fail {
1779 fn clone(&self) -> Fail {
1780 if self.0 == 2 {
1781 panic!();
1782 }
1783 Fail(self.0, self.1.clone())
1784 }
1785 }
1786
1787 let s: &[Fail] = &[
1788 Fail(0, "foo".to_string()),
1789 Fail(1, "bar".to_string()),
1790 Fail(2, "baz".to_string()),
1791 ];
1792
1793 // Should panic, but not cause memory corruption
1794 let _r: Arc<[Fail]> = Arc::from(s);
1795 }
1796
1797 #[test]
1798 fn test_from_box() {
1799 let b: Box<u32> = box 123;
1800 let r: Arc<u32> = Arc::from(b);
1801
1802 assert_eq!(*r, 123);
1803 }
1804
1805 #[test]
1806 fn test_from_box_str() {
1807 use std::string::String;
1808
1809 let s = String::from("foo").into_boxed_str();
1810 let r: Arc<str> = Arc::from(s);
1811
1812 assert_eq!(&r[..], "foo");
1813 }
1814
1815 #[test]
1816 fn test_from_box_slice() {
1817 let s = vec![1, 2, 3].into_boxed_slice();
1818 let r: Arc<[u32]> = Arc::from(s);
1819
1820 assert_eq!(&r[..], [1, 2, 3]);
1821 }
1822
1823 #[test]
1824 fn test_from_box_trait() {
1825 use std::fmt::Display;
1826 use std::string::ToString;
1827
1828 let b: Box<Display> = box 123;
1829 let r: Arc<Display> = Arc::from(b);
1830
1831 assert_eq!(r.to_string(), "123");
1832 }
1833
1834 #[test]
1835 fn test_from_box_trait_zero_sized() {
1836 use std::fmt::Debug;
1837
1838 let b: Box<Debug> = box ();
1839 let r: Arc<Debug> = Arc::from(b);
1840
1841 assert_eq!(format!("{:?}", r), "()");
1842 }
1843
1844 #[test]
1845 fn test_from_vec() {
1846 let v = vec![1, 2, 3];
1847 let r: Arc<[u32]> = Arc::from(v);
1848
1849 assert_eq!(&r[..], [1, 2, 3]);
1850 }
1a4d82fc 1851}
e9174d1e 1852
92a42be0 1853#[stable(feature = "rust1", since = "1.0.0")]
e9174d1e 1854impl<T: ?Sized> borrow::Borrow<T> for Arc<T> {
b039eaaf
SL
1855 fn borrow(&self) -> &T {
1856 &**self
1857 }
1858}
1859
1860#[stable(since = "1.5.0", feature = "smart_ptr_as_ref")]
1861impl<T: ?Sized> AsRef<T> for Arc<T> {
1862 fn as_ref(&self) -> &T {
1863 &**self
1864 }
e9174d1e 1865}