]> git.proxmox.com Git - rustc.git/blob - src/liballoc/arc.rs
Imported Upstream version 1.9.0+dfsg1
[rustc.git] / src / liballoc / arc.rs
1 // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
4 //
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
10
11 #![stable(feature = "rust1", since = "1.0.0")]
12
13 //! Threadsafe reference-counted boxes (the `Arc<T>` type).
14 //!
15 //! The `Arc<T>` type provides shared ownership of an immutable value.
16 //! Destruction is deterministic, and will occur as soon as the last owner is
17 //! gone. It is marked as `Send` because it uses atomic reference counting.
18 //!
19 //! If you do not need thread-safety, and just need shared ownership, consider
20 //! the [`Rc<T>` type](../rc/struct.Rc.html). It is the same as `Arc<T>`, but
21 //! does not use atomics, making it both thread-unsafe as well as significantly
22 //! faster when updating the reference count.
23 //!
24 //! The `downgrade` method can be used to create a non-owning `Weak<T>` pointer
25 //! to the box. A `Weak<T>` pointer can be upgraded to an `Arc<T>` pointer, but
26 //! will return `None` if the value has already been dropped.
27 //!
28 //! For example, a tree with parent pointers can be represented by putting the
29 //! nodes behind strong `Arc<T>` pointers, and then storing the parent pointers
30 //! as `Weak<T>` pointers.
31 //!
32 //! # Examples
33 //!
34 //! Sharing some immutable data between threads:
35 //!
36 //! ```no_run
37 //! use std::sync::Arc;
38 //! use std::thread;
39 //!
40 //! let five = Arc::new(5);
41 //!
42 //! for _ in 0..10 {
43 //! let five = five.clone();
44 //!
45 //! thread::spawn(move || {
46 //! println!("{:?}", five);
47 //! });
48 //! }
49 //! ```
50 //!
51 //! Sharing mutable data safely between threads with a `Mutex`:
52 //!
53 //! ```no_run
54 //! use std::sync::{Arc, Mutex};
55 //! use std::thread;
56 //!
57 //! let five = Arc::new(Mutex::new(5));
58 //!
59 //! for _ in 0..10 {
60 //! let five = five.clone();
61 //!
62 //! thread::spawn(move || {
63 //! let mut number = five.lock().unwrap();
64 //!
65 //! *number += 1;
66 //!
67 //! println!("{}", *number); // prints 6
68 //! });
69 //! }
70 //! ```
71
72 use boxed::Box;
73
74 use core::sync::atomic;
75 use core::sync::atomic::Ordering::{Relaxed, Release, Acquire, SeqCst};
76 use core::borrow;
77 use core::fmt;
78 use core::cmp::Ordering;
79 use core::mem::{align_of_val, size_of_val};
80 use core::intrinsics::abort;
81 use core::mem;
82 use core::mem::uninitialized;
83 use core::ops::Deref;
84 use core::ops::CoerceUnsized;
85 use core::ptr::{self, Shared};
86 use core::marker::Unsize;
87 use core::hash::{Hash, Hasher};
88 use core::{usize, isize};
89 use core::convert::From;
90 use heap::deallocate;
91
92 const MAX_REFCOUNT: usize = (isize::MAX) as usize;
93
94 /// An atomically reference counted wrapper for shared state.
95 ///
96 /// # Examples
97 ///
98 /// In this example, a large vector is shared between several threads.
99 /// With simple pipes, without `Arc`, a copy would have to be made for each
100 /// thread.
101 ///
102 /// When you clone an `Arc<T>`, it will create another pointer to the data and
103 /// increase the reference counter.
104 ///
105 /// ```
106 /// use std::sync::Arc;
107 /// use std::thread;
108 ///
109 /// fn main() {
110 /// let numbers: Vec<_> = (0..100).collect();
111 /// let shared_numbers = Arc::new(numbers);
112 ///
113 /// for _ in 0..10 {
114 /// let child_numbers = shared_numbers.clone();
115 ///
116 /// thread::spawn(move || {
117 /// let local_numbers = &child_numbers[..];
118 ///
119 /// // Work with the local numbers
120 /// });
121 /// }
122 /// }
123 /// ```
124 #[unsafe_no_drop_flag]
125 #[stable(feature = "rust1", since = "1.0.0")]
126 pub struct Arc<T: ?Sized> {
127 ptr: Shared<ArcInner<T>>,
128 }
129
130 #[stable(feature = "rust1", since = "1.0.0")]
131 unsafe impl<T: ?Sized + Sync + Send> Send for Arc<T> {}
132 #[stable(feature = "rust1", since = "1.0.0")]
133 unsafe impl<T: ?Sized + Sync + Send> Sync for Arc<T> {}
134
135 #[unstable(feature = "coerce_unsized", issue = "27732")]
136 impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<Arc<U>> for Arc<T> {}
137
138 /// A weak pointer to an `Arc`.
139 ///
140 /// Weak pointers will not keep the data inside of the `Arc` alive, and can be
141 /// used to break cycles between `Arc` pointers.
142 #[unsafe_no_drop_flag]
143 #[stable(feature = "arc_weak", since = "1.4.0")]
144 pub struct Weak<T: ?Sized> {
145 ptr: Shared<ArcInner<T>>,
146 }
147
148 #[stable(feature = "arc_weak", since = "1.4.0")]
149 unsafe impl<T: ?Sized + Sync + Send> Send for Weak<T> {}
150 #[stable(feature = "arc_weak", since = "1.4.0")]
151 unsafe impl<T: ?Sized + Sync + Send> Sync for Weak<T> {}
152
153 #[unstable(feature = "coerce_unsized", issue = "27732")]
154 impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<Weak<U>> for Weak<T> {}
155
156 #[stable(feature = "arc_weak", since = "1.4.0")]
157 impl<T: ?Sized + fmt::Debug> fmt::Debug for Weak<T> {
158 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
159 write!(f, "(Weak)")
160 }
161 }
162
163 struct ArcInner<T: ?Sized> {
164 strong: atomic::AtomicUsize,
165
166 // the value usize::MAX acts as a sentinel for temporarily "locking" the
167 // ability to upgrade weak pointers or downgrade strong ones; this is used
168 // to avoid races in `make_mut` and `get_mut`.
169 weak: atomic::AtomicUsize,
170
171 data: T,
172 }
173
174 unsafe impl<T: ?Sized + Sync + Send> Send for ArcInner<T> {}
175 unsafe impl<T: ?Sized + Sync + Send> Sync for ArcInner<T> {}
176
177 impl<T> Arc<T> {
178 /// Constructs a new `Arc<T>`.
179 ///
180 /// # Examples
181 ///
182 /// ```
183 /// use std::sync::Arc;
184 ///
185 /// let five = Arc::new(5);
186 /// ```
187 #[inline]
188 #[stable(feature = "rust1", since = "1.0.0")]
189 pub fn new(data: T) -> Arc<T> {
190 // Start the weak pointer count as 1 which is the weak pointer that's
191 // held by all the strong pointers (kinda), see std/rc.rs for more info
192 let x: Box<_> = box ArcInner {
193 strong: atomic::AtomicUsize::new(1),
194 weak: atomic::AtomicUsize::new(1),
195 data: data,
196 };
197 Arc { ptr: unsafe { Shared::new(Box::into_raw(x)) } }
198 }
199
200 /// Unwraps the contained value if the `Arc<T>` has exactly one strong reference.
201 ///
202 /// Otherwise, an `Err` is returned with the same `Arc<T>`.
203 ///
204 /// This will succeed even if there are outstanding weak references.
205 ///
206 /// # Examples
207 ///
208 /// ```
209 /// use std::sync::Arc;
210 ///
211 /// let x = Arc::new(3);
212 /// assert_eq!(Arc::try_unwrap(x), Ok(3));
213 ///
214 /// let x = Arc::new(4);
215 /// let _y = x.clone();
216 /// assert_eq!(Arc::try_unwrap(x), Err(Arc::new(4)));
217 /// ```
218 #[inline]
219 #[stable(feature = "arc_unique", since = "1.4.0")]
220 pub fn try_unwrap(this: Self) -> Result<T, Self> {
221 // See `drop` for why all these atomics are like this
222 if this.inner().strong.compare_exchange(1, 0, Release, Relaxed).is_err() {
223 return Err(this);
224 }
225
226 atomic::fence(Acquire);
227
228 unsafe {
229 let ptr = *this.ptr;
230 let elem = ptr::read(&(*ptr).data);
231
232 // Make a weak pointer to clean up the implicit strong-weak reference
233 let _weak = Weak { ptr: this.ptr };
234 mem::forget(this);
235
236 Ok(elem)
237 }
238 }
239 }
240
241 impl<T: ?Sized> Arc<T> {
242 /// Downgrades the `Arc<T>` to a `Weak<T>` reference.
243 ///
244 /// # Examples
245 ///
246 /// ```
247 /// use std::sync::Arc;
248 ///
249 /// let five = Arc::new(5);
250 ///
251 /// let weak_five = Arc::downgrade(&five);
252 /// ```
253 #[stable(feature = "arc_weak", since = "1.4.0")]
254 pub fn downgrade(this: &Self) -> Weak<T> {
255 // This Relaxed is OK because we're checking the value in the CAS
256 // below.
257 let mut cur = this.inner().weak.load(Relaxed);
258
259 loop {
260 // check if the weak counter is currently "locked"; if so, spin.
261 if cur == usize::MAX {
262 cur = this.inner().weak.load(Relaxed);
263 continue;
264 }
265
266 // NOTE: this code currently ignores the possibility of overflow
267 // into usize::MAX; in general both Rc and Arc need to be adjusted
268 // to deal with overflow.
269
270 // Unlike with Clone(), we need this to be an Acquire read to
271 // synchronize with the write coming from `is_unique`, so that the
272 // events prior to that write happen before this read.
273 match this.inner().weak.compare_exchange_weak(cur, cur + 1, Acquire, Relaxed) {
274 Ok(_) => return Weak { ptr: this.ptr },
275 Err(old) => cur = old,
276 }
277 }
278 }
279
280 /// Get the number of weak references to this value.
281 #[inline]
282 #[unstable(feature = "arc_counts", reason = "not clearly useful, and racy",
283 issue = "28356")]
284 pub fn weak_count(this: &Self) -> usize {
285 this.inner().weak.load(SeqCst) - 1
286 }
287
288 /// Get the number of strong references to this value.
289 #[inline]
290 #[unstable(feature = "arc_counts", reason = "not clearly useful, and racy",
291 issue = "28356")]
292 pub fn strong_count(this: &Self) -> usize {
293 this.inner().strong.load(SeqCst)
294 }
295
296 #[inline]
297 fn inner(&self) -> &ArcInner<T> {
298 // This unsafety is ok because while this arc is alive we're guaranteed
299 // that the inner pointer is valid. Furthermore, we know that the
300 // `ArcInner` structure itself is `Sync` because the inner data is
301 // `Sync` as well, so we're ok loaning out an immutable pointer to these
302 // contents.
303 unsafe { &**self.ptr }
304 }
305
306 // Non-inlined part of `drop`.
307 #[inline(never)]
308 unsafe fn drop_slow(&mut self) {
309 let ptr = *self.ptr;
310
311 // Destroy the data at this time, even though we may not free the box
312 // allocation itself (there may still be weak pointers lying around).
313 ptr::drop_in_place(&mut (*ptr).data);
314
315 if self.inner().weak.fetch_sub(1, Release) == 1 {
316 atomic::fence(Acquire);
317 deallocate(ptr as *mut u8, size_of_val(&*ptr), align_of_val(&*ptr))
318 }
319 }
320 }
321
322 #[stable(feature = "rust1", since = "1.0.0")]
323 impl<T: ?Sized> Clone for Arc<T> {
324 /// Makes a clone of the `Arc<T>`.
325 ///
326 /// This increases the strong reference count.
327 ///
328 /// # Examples
329 ///
330 /// ```
331 /// use std::sync::Arc;
332 ///
333 /// let five = Arc::new(5);
334 ///
335 /// five.clone();
336 /// ```
337 #[inline]
338 fn clone(&self) -> Arc<T> {
339 // Using a relaxed ordering is alright here, as knowledge of the
340 // original reference prevents other threads from erroneously deleting
341 // the object.
342 //
343 // As explained in the [Boost documentation][1], Increasing the
344 // reference counter can always be done with memory_order_relaxed: New
345 // references to an object can only be formed from an existing
346 // reference, and passing an existing reference from one thread to
347 // another must already provide any required synchronization.
348 //
349 // [1]: (www.boost.org/doc/libs/1_55_0/doc/html/atomic/usage_examples.html)
350 let old_size = self.inner().strong.fetch_add(1, Relaxed);
351
352 // However we need to guard against massive refcounts in case someone
353 // is `mem::forget`ing Arcs. If we don't do this the count can overflow
354 // and users will use-after free. We racily saturate to `isize::MAX` on
355 // the assumption that there aren't ~2 billion threads incrementing
356 // the reference count at once. This branch will never be taken in
357 // any realistic program.
358 //
359 // We abort because such a program is incredibly degenerate, and we
360 // don't care to support it.
361 if old_size > MAX_REFCOUNT {
362 unsafe {
363 abort();
364 }
365 }
366
367 Arc { ptr: self.ptr }
368 }
369 }
370
371 #[stable(feature = "rust1", since = "1.0.0")]
372 impl<T: ?Sized> Deref for Arc<T> {
373 type Target = T;
374
375 #[inline]
376 fn deref(&self) -> &T {
377 &self.inner().data
378 }
379 }
380
381 impl<T: Clone> Arc<T> {
382 /// Make a mutable reference into the given `Arc<T>`.
383 /// If the `Arc<T>` has more than one strong reference, or any weak
384 /// references, the inner data is cloned.
385 ///
386 /// This is also referred to as a copy-on-write.
387 ///
388 /// # Examples
389 ///
390 /// ```
391 /// use std::sync::Arc;
392 ///
393 /// let mut data = Arc::new(5);
394 ///
395 /// *Arc::make_mut(&mut data) += 1; // Won't clone anything
396 /// let mut other_data = data.clone(); // Won't clone inner data
397 /// *Arc::make_mut(&mut data) += 1; // Clones inner data
398 /// *Arc::make_mut(&mut data) += 1; // Won't clone anything
399 /// *Arc::make_mut(&mut other_data) *= 2; // Won't clone anything
400 ///
401 /// // Note: data and other_data now point to different numbers
402 /// assert_eq!(*data, 8);
403 /// assert_eq!(*other_data, 12);
404 ///
405 /// ```
406 #[inline]
407 #[stable(feature = "arc_unique", since = "1.4.0")]
408 pub fn make_mut(this: &mut Self) -> &mut T {
409 // Note that we hold both a strong reference and a weak reference.
410 // Thus, releasing our strong reference only will not, by itself, cause
411 // the memory to be deallocated.
412 //
413 // Use Acquire to ensure that we see any writes to `weak` that happen
414 // before release writes (i.e., decrements) to `strong`. Since we hold a
415 // weak count, there's no chance the ArcInner itself could be
416 // deallocated.
417 if this.inner().strong.compare_exchange(1, 0, Acquire, Relaxed).is_err() {
418 // Another strong pointer exists; clone
419 *this = Arc::new((**this).clone());
420 } else if this.inner().weak.load(Relaxed) != 1 {
421 // Relaxed suffices in the above because this is fundamentally an
422 // optimization: we are always racing with weak pointers being
423 // dropped. Worst case, we end up allocated a new Arc unnecessarily.
424
425 // We removed the last strong ref, but there are additional weak
426 // refs remaining. We'll move the contents to a new Arc, and
427 // invalidate the other weak refs.
428
429 // Note that it is not possible for the read of `weak` to yield
430 // usize::MAX (i.e., locked), since the weak count can only be
431 // locked by a thread with a strong reference.
432
433 // Materialize our own implicit weak pointer, so that it can clean
434 // up the ArcInner as needed.
435 let weak = Weak { ptr: this.ptr };
436
437 // mark the data itself as already deallocated
438 unsafe {
439 // there is no data race in the implicit write caused by `read`
440 // here (due to zeroing) because data is no longer accessed by
441 // other threads (due to there being no more strong refs at this
442 // point).
443 let mut swap = Arc::new(ptr::read(&(**weak.ptr).data));
444 mem::swap(this, &mut swap);
445 mem::forget(swap);
446 }
447 } else {
448 // We were the sole reference of either kind; bump back up the
449 // strong ref count.
450 this.inner().strong.store(1, Release);
451 }
452
453 // As with `get_mut()`, the unsafety is ok because our reference was
454 // either unique to begin with, or became one upon cloning the contents.
455 unsafe {
456 let inner = &mut **this.ptr;
457 &mut inner.data
458 }
459 }
460 }
461
462 impl<T: ?Sized> Arc<T> {
463 /// Returns a mutable reference to the contained value if the `Arc<T>` has
464 /// one strong reference and no weak references.
465 ///
466 /// # Examples
467 ///
468 /// ```
469 /// use std::sync::Arc;
470 ///
471 /// let mut x = Arc::new(3);
472 /// *Arc::get_mut(&mut x).unwrap() = 4;
473 /// assert_eq!(*x, 4);
474 ///
475 /// let _y = x.clone();
476 /// assert!(Arc::get_mut(&mut x).is_none());
477 /// ```
478 #[inline]
479 #[stable(feature = "arc_unique", since = "1.4.0")]
480 pub fn get_mut(this: &mut Self) -> Option<&mut T> {
481 if this.is_unique() {
482 // This unsafety is ok because we're guaranteed that the pointer
483 // returned is the *only* pointer that will ever be returned to T. Our
484 // reference count is guaranteed to be 1 at this point, and we required
485 // the Arc itself to be `mut`, so we're returning the only possible
486 // reference to the inner data.
487 unsafe {
488 let inner = &mut **this.ptr;
489 Some(&mut inner.data)
490 }
491 } else {
492 None
493 }
494 }
495
496 /// Determine whether this is the unique reference (including weak refs) to
497 /// the underlying data.
498 ///
499 /// Note that this requires locking the weak ref count.
500 fn is_unique(&mut self) -> bool {
501 // lock the weak pointer count if we appear to be the sole weak pointer
502 // holder.
503 //
504 // The acquire label here ensures a happens-before relationship with any
505 // writes to `strong` prior to decrements of the `weak` count (via drop,
506 // which uses Release).
507 if self.inner().weak.compare_exchange(1, usize::MAX, Acquire, Relaxed).is_ok() {
508 // Due to the previous acquire read, this will observe any writes to
509 // `strong` that were due to upgrading weak pointers; only strong
510 // clones remain, which require that the strong count is > 1 anyway.
511 let unique = self.inner().strong.load(Relaxed) == 1;
512
513 // The release write here synchronizes with a read in `downgrade`,
514 // effectively preventing the above read of `strong` from happening
515 // after the write.
516 self.inner().weak.store(1, Release); // release the lock
517 unique
518 } else {
519 false
520 }
521 }
522 }
523
524 #[stable(feature = "rust1", since = "1.0.0")]
525 impl<T: ?Sized> Drop for Arc<T> {
526 /// Drops the `Arc<T>`.
527 ///
528 /// This will decrement the strong reference count. If the strong reference
529 /// count becomes zero and the only other references are `Weak<T>` ones,
530 /// `drop`s the inner value.
531 ///
532 /// # Examples
533 ///
534 /// ```
535 /// use std::sync::Arc;
536 ///
537 /// {
538 /// let five = Arc::new(5);
539 ///
540 /// // stuff
541 ///
542 /// drop(five); // explicit drop
543 /// }
544 /// {
545 /// let five = Arc::new(5);
546 ///
547 /// // stuff
548 ///
549 /// } // implicit drop
550 /// ```
551 #[unsafe_destructor_blind_to_params]
552 #[inline]
553 fn drop(&mut self) {
554 // This structure has #[unsafe_no_drop_flag], so this drop glue may run
555 // more than once (but it is guaranteed to be zeroed after the first if
556 // it's run more than once)
557 let thin = *self.ptr as *const ();
558
559 if thin as usize == mem::POST_DROP_USIZE {
560 return;
561 }
562
563 // Because `fetch_sub` is already atomic, we do not need to synchronize
564 // with other threads unless we are going to delete the object. This
565 // same logic applies to the below `fetch_sub` to the `weak` count.
566 if self.inner().strong.fetch_sub(1, Release) != 1 {
567 return;
568 }
569
570 // This fence is needed to prevent reordering of use of the data and
571 // deletion of the data. Because it is marked `Release`, the decreasing
572 // of the reference count synchronizes with this `Acquire` fence. This
573 // means that use of the data happens before decreasing the reference
574 // count, which happens before this fence, which happens before the
575 // deletion of the data.
576 //
577 // As explained in the [Boost documentation][1],
578 //
579 // > It is important to enforce any possible access to the object in one
580 // > thread (through an existing reference) to *happen before* deleting
581 // > the object in a different thread. This is achieved by a "release"
582 // > operation after dropping a reference (any access to the object
583 // > through this reference must obviously happened before), and an
584 // > "acquire" operation before deleting the object.
585 //
586 // [1]: (www.boost.org/doc/libs/1_55_0/doc/html/atomic/usage_examples.html)
587 atomic::fence(Acquire);
588
589 unsafe {
590 self.drop_slow();
591 }
592 }
593 }
594
595 impl<T: ?Sized> Weak<T> {
596 /// Upgrades a weak reference to a strong reference.
597 ///
598 /// Upgrades the `Weak<T>` reference to an `Arc<T>`, if possible.
599 ///
600 /// Returns `None` if there were no strong references and the data was
601 /// destroyed.
602 ///
603 /// # Examples
604 ///
605 /// ```
606 /// use std::sync::Arc;
607 ///
608 /// let five = Arc::new(5);
609 ///
610 /// let weak_five = Arc::downgrade(&five);
611 ///
612 /// let strong_five: Option<Arc<_>> = weak_five.upgrade();
613 /// ```
614 #[stable(feature = "arc_weak", since = "1.4.0")]
615 pub fn upgrade(&self) -> Option<Arc<T>> {
616 // We use a CAS loop to increment the strong count instead of a
617 // fetch_add because once the count hits 0 it must never be above 0.
618 let inner = self.inner();
619
620 // Relaxed load because any write of 0 that we can observe
621 // leaves the field in a permanently zero state (so a
622 // "stale" read of 0 is fine), and any other value is
623 // confirmed via the CAS below.
624 let mut n = inner.strong.load(Relaxed);
625
626 loop {
627 if n == 0 {
628 return None;
629 }
630
631 // See comments in `Arc::clone` for why we do this (for `mem::forget`).
632 if n > MAX_REFCOUNT {
633 unsafe { abort(); }
634 }
635
636 // Relaxed is valid for the same reason it is on Arc's Clone impl
637 match inner.strong.compare_exchange_weak(n, n + 1, Relaxed, Relaxed) {
638 Ok(_) => return Some(Arc { ptr: self.ptr }),
639 Err(old) => n = old,
640 }
641 }
642 }
643
644 #[inline]
645 fn inner(&self) -> &ArcInner<T> {
646 // See comments above for why this is "safe"
647 unsafe { &**self.ptr }
648 }
649 }
650
651 #[stable(feature = "arc_weak", since = "1.4.0")]
652 impl<T: ?Sized> Clone for Weak<T> {
653 /// Makes a clone of the `Weak<T>`.
654 ///
655 /// This increases the weak reference count.
656 ///
657 /// # Examples
658 ///
659 /// ```
660 /// use std::sync::Arc;
661 ///
662 /// let weak_five = Arc::downgrade(&Arc::new(5));
663 ///
664 /// weak_five.clone();
665 /// ```
666 #[inline]
667 fn clone(&self) -> Weak<T> {
668 // See comments in Arc::clone() for why this is relaxed. This can use a
669 // fetch_add (ignoring the lock) because the weak count is only locked
670 // where are *no other* weak pointers in existence. (So we can't be
671 // running this code in that case).
672 let old_size = self.inner().weak.fetch_add(1, Relaxed);
673
674 // See comments in Arc::clone() for why we do this (for mem::forget).
675 if old_size > MAX_REFCOUNT {
676 unsafe {
677 abort();
678 }
679 }
680
681 return Weak { ptr: self.ptr };
682 }
683 }
684
685 #[stable(feature = "arc_weak", since = "1.4.0")]
686 impl<T: ?Sized> Drop for Weak<T> {
687 /// Drops the `Weak<T>`.
688 ///
689 /// This will decrement the weak reference count.
690 ///
691 /// # Examples
692 ///
693 /// ```
694 /// use std::sync::Arc;
695 ///
696 /// {
697 /// let five = Arc::new(5);
698 /// let weak_five = Arc::downgrade(&five);
699 ///
700 /// // stuff
701 ///
702 /// drop(weak_five); // explicit drop
703 /// }
704 /// {
705 /// let five = Arc::new(5);
706 /// let weak_five = Arc::downgrade(&five);
707 ///
708 /// // stuff
709 ///
710 /// } // implicit drop
711 /// ```
712 fn drop(&mut self) {
713 let ptr = *self.ptr;
714 let thin = ptr as *const ();
715
716 // see comments above for why this check is here
717 if thin as usize == mem::POST_DROP_USIZE {
718 return;
719 }
720
721 // If we find out that we were the last weak pointer, then its time to
722 // deallocate the data entirely. See the discussion in Arc::drop() about
723 // the memory orderings
724 //
725 // It's not necessary to check for the locked state here, because the
726 // weak count can only be locked if there was precisely one weak ref,
727 // meaning that drop could only subsequently run ON that remaining weak
728 // ref, which can only happen after the lock is released.
729 if self.inner().weak.fetch_sub(1, Release) == 1 {
730 atomic::fence(Acquire);
731 unsafe { deallocate(ptr as *mut u8, size_of_val(&*ptr), align_of_val(&*ptr)) }
732 }
733 }
734 }
735
736 #[stable(feature = "rust1", since = "1.0.0")]
737 impl<T: ?Sized + PartialEq> PartialEq for Arc<T> {
738 /// Equality for two `Arc<T>`s.
739 ///
740 /// Two `Arc<T>`s are equal if their inner value are equal.
741 ///
742 /// # Examples
743 ///
744 /// ```
745 /// use std::sync::Arc;
746 ///
747 /// let five = Arc::new(5);
748 ///
749 /// five == Arc::new(5);
750 /// ```
751 fn eq(&self, other: &Arc<T>) -> bool {
752 *(*self) == *(*other)
753 }
754
755 /// Inequality for two `Arc<T>`s.
756 ///
757 /// Two `Arc<T>`s are unequal if their inner value are unequal.
758 ///
759 /// # Examples
760 ///
761 /// ```
762 /// use std::sync::Arc;
763 ///
764 /// let five = Arc::new(5);
765 ///
766 /// five != Arc::new(5);
767 /// ```
768 fn ne(&self, other: &Arc<T>) -> bool {
769 *(*self) != *(*other)
770 }
771 }
772 #[stable(feature = "rust1", since = "1.0.0")]
773 impl<T: ?Sized + PartialOrd> PartialOrd for Arc<T> {
774 /// Partial comparison for two `Arc<T>`s.
775 ///
776 /// The two are compared by calling `partial_cmp()` on their inner values.
777 ///
778 /// # Examples
779 ///
780 /// ```
781 /// use std::sync::Arc;
782 ///
783 /// let five = Arc::new(5);
784 ///
785 /// five.partial_cmp(&Arc::new(5));
786 /// ```
787 fn partial_cmp(&self, other: &Arc<T>) -> Option<Ordering> {
788 (**self).partial_cmp(&**other)
789 }
790
791 /// Less-than comparison for two `Arc<T>`s.
792 ///
793 /// The two are compared by calling `<` on their inner values.
794 ///
795 /// # Examples
796 ///
797 /// ```
798 /// use std::sync::Arc;
799 ///
800 /// let five = Arc::new(5);
801 ///
802 /// five < Arc::new(5);
803 /// ```
804 fn lt(&self, other: &Arc<T>) -> bool {
805 *(*self) < *(*other)
806 }
807
808 /// 'Less-than or equal to' comparison for two `Arc<T>`s.
809 ///
810 /// The two are compared by calling `<=` on their inner values.
811 ///
812 /// # Examples
813 ///
814 /// ```
815 /// use std::sync::Arc;
816 ///
817 /// let five = Arc::new(5);
818 ///
819 /// five <= Arc::new(5);
820 /// ```
821 fn le(&self, other: &Arc<T>) -> bool {
822 *(*self) <= *(*other)
823 }
824
825 /// Greater-than comparison for two `Arc<T>`s.
826 ///
827 /// The two are compared by calling `>` on their inner values.
828 ///
829 /// # Examples
830 ///
831 /// ```
832 /// use std::sync::Arc;
833 ///
834 /// let five = Arc::new(5);
835 ///
836 /// five > Arc::new(5);
837 /// ```
838 fn gt(&self, other: &Arc<T>) -> bool {
839 *(*self) > *(*other)
840 }
841
842 /// 'Greater-than or equal to' comparison for two `Arc<T>`s.
843 ///
844 /// The two are compared by calling `>=` on their inner values.
845 ///
846 /// # Examples
847 ///
848 /// ```
849 /// use std::sync::Arc;
850 ///
851 /// let five = Arc::new(5);
852 ///
853 /// five >= Arc::new(5);
854 /// ```
855 fn ge(&self, other: &Arc<T>) -> bool {
856 *(*self) >= *(*other)
857 }
858 }
859 #[stable(feature = "rust1", since = "1.0.0")]
860 impl<T: ?Sized + Ord> Ord for Arc<T> {
861 fn cmp(&self, other: &Arc<T>) -> Ordering {
862 (**self).cmp(&**other)
863 }
864 }
865 #[stable(feature = "rust1", since = "1.0.0")]
866 impl<T: ?Sized + Eq> Eq for Arc<T> {}
867
868 #[stable(feature = "rust1", since = "1.0.0")]
869 impl<T: ?Sized + fmt::Display> fmt::Display for Arc<T> {
870 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
871 fmt::Display::fmt(&**self, f)
872 }
873 }
874
875 #[stable(feature = "rust1", since = "1.0.0")]
876 impl<T: ?Sized + fmt::Debug> fmt::Debug for Arc<T> {
877 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
878 fmt::Debug::fmt(&**self, f)
879 }
880 }
881
882 #[stable(feature = "rust1", since = "1.0.0")]
883 impl<T: ?Sized> fmt::Pointer for Arc<T> {
884 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
885 fmt::Pointer::fmt(&*self.ptr, f)
886 }
887 }
888
889 #[stable(feature = "rust1", since = "1.0.0")]
890 impl<T: Default> Default for Arc<T> {
891 fn default() -> Arc<T> {
892 Arc::new(Default::default())
893 }
894 }
895
896 #[stable(feature = "rust1", since = "1.0.0")]
897 impl<T: ?Sized + Hash> Hash for Arc<T> {
898 fn hash<H: Hasher>(&self, state: &mut H) {
899 (**self).hash(state)
900 }
901 }
902
903 #[stable(feature = "from_for_ptrs", since = "1.6.0")]
904 impl<T> From<T> for Arc<T> {
905 fn from(t: T) -> Self {
906 Arc::new(t)
907 }
908 }
909
910 impl<T> Weak<T> {
911 /// Constructs a new `Weak<T>` without an accompanying instance of T.
912 ///
913 /// This allocates memory for T, but does not initialize it. Calling
914 /// Weak<T>::upgrade() on the return value always gives None.
915 ///
916 /// # Examples
917 ///
918 /// ```
919 /// #![feature(downgraded_weak)]
920 ///
921 /// use std::sync::Weak;
922 ///
923 /// let empty: Weak<i64> = Weak::new();
924 /// ```
925 #[unstable(feature = "downgraded_weak",
926 reason = "recently added",
927 issue = "30425")]
928 pub fn new() -> Weak<T> {
929 unsafe {
930 Weak { ptr: Shared::new(Box::into_raw(box ArcInner {
931 strong: atomic::AtomicUsize::new(0),
932 weak: atomic::AtomicUsize::new(1),
933 data: uninitialized(),
934 }))}
935 }
936 }
937 }
938
939 #[cfg(test)]
940 mod tests {
941 use std::clone::Clone;
942 use std::sync::mpsc::channel;
943 use std::mem::drop;
944 use std::ops::Drop;
945 use std::option::Option;
946 use std::option::Option::{Some, None};
947 use std::sync::atomic;
948 use std::sync::atomic::Ordering::{Acquire, SeqCst};
949 use std::thread;
950 use std::vec::Vec;
951 use super::{Arc, Weak};
952 use std::sync::Mutex;
953 use std::convert::From;
954
955 struct Canary(*mut atomic::AtomicUsize);
956
957 impl Drop for Canary {
958 fn drop(&mut self) {
959 unsafe {
960 match *self {
961 Canary(c) => {
962 (*c).fetch_add(1, SeqCst);
963 }
964 }
965 }
966 }
967 }
968
969 #[test]
970 fn manually_share_arc() {
971 let v = vec![1, 2, 3, 4, 5, 6, 7, 8, 9, 10];
972 let arc_v = Arc::new(v);
973
974 let (tx, rx) = channel();
975
976 let _t = thread::spawn(move || {
977 let arc_v: Arc<Vec<i32>> = rx.recv().unwrap();
978 assert_eq!((*arc_v)[3], 4);
979 });
980
981 tx.send(arc_v.clone()).unwrap();
982
983 assert_eq!((*arc_v)[2], 3);
984 assert_eq!((*arc_v)[4], 5);
985 }
986
987 #[test]
988 fn test_arc_get_mut() {
989 let mut x = Arc::new(3);
990 *Arc::get_mut(&mut x).unwrap() = 4;
991 assert_eq!(*x, 4);
992 let y = x.clone();
993 assert!(Arc::get_mut(&mut x).is_none());
994 drop(y);
995 assert!(Arc::get_mut(&mut x).is_some());
996 let _w = Arc::downgrade(&x);
997 assert!(Arc::get_mut(&mut x).is_none());
998 }
999
1000 #[test]
1001 fn try_unwrap() {
1002 let x = Arc::new(3);
1003 assert_eq!(Arc::try_unwrap(x), Ok(3));
1004 let x = Arc::new(4);
1005 let _y = x.clone();
1006 assert_eq!(Arc::try_unwrap(x), Err(Arc::new(4)));
1007 let x = Arc::new(5);
1008 let _w = Arc::downgrade(&x);
1009 assert_eq!(Arc::try_unwrap(x), Ok(5));
1010 }
1011
1012 #[test]
1013 fn test_cowarc_clone_make_mut() {
1014 let mut cow0 = Arc::new(75);
1015 let mut cow1 = cow0.clone();
1016 let mut cow2 = cow1.clone();
1017
1018 assert!(75 == *Arc::make_mut(&mut cow0));
1019 assert!(75 == *Arc::make_mut(&mut cow1));
1020 assert!(75 == *Arc::make_mut(&mut cow2));
1021
1022 *Arc::make_mut(&mut cow0) += 1;
1023 *Arc::make_mut(&mut cow1) += 2;
1024 *Arc::make_mut(&mut cow2) += 3;
1025
1026 assert!(76 == *cow0);
1027 assert!(77 == *cow1);
1028 assert!(78 == *cow2);
1029
1030 // none should point to the same backing memory
1031 assert!(*cow0 != *cow1);
1032 assert!(*cow0 != *cow2);
1033 assert!(*cow1 != *cow2);
1034 }
1035
1036 #[test]
1037 fn test_cowarc_clone_unique2() {
1038 let mut cow0 = Arc::new(75);
1039 let cow1 = cow0.clone();
1040 let cow2 = cow1.clone();
1041
1042 assert!(75 == *cow0);
1043 assert!(75 == *cow1);
1044 assert!(75 == *cow2);
1045
1046 *Arc::make_mut(&mut cow0) += 1;
1047 assert!(76 == *cow0);
1048 assert!(75 == *cow1);
1049 assert!(75 == *cow2);
1050
1051 // cow1 and cow2 should share the same contents
1052 // cow0 should have a unique reference
1053 assert!(*cow0 != *cow1);
1054 assert!(*cow0 != *cow2);
1055 assert!(*cow1 == *cow2);
1056 }
1057
1058 #[test]
1059 fn test_cowarc_clone_weak() {
1060 let mut cow0 = Arc::new(75);
1061 let cow1_weak = Arc::downgrade(&cow0);
1062
1063 assert!(75 == *cow0);
1064 assert!(75 == *cow1_weak.upgrade().unwrap());
1065
1066 *Arc::make_mut(&mut cow0) += 1;
1067
1068 assert!(76 == *cow0);
1069 assert!(cow1_weak.upgrade().is_none());
1070 }
1071
1072 #[test]
1073 fn test_live() {
1074 let x = Arc::new(5);
1075 let y = Arc::downgrade(&x);
1076 assert!(y.upgrade().is_some());
1077 }
1078
1079 #[test]
1080 fn test_dead() {
1081 let x = Arc::new(5);
1082 let y = Arc::downgrade(&x);
1083 drop(x);
1084 assert!(y.upgrade().is_none());
1085 }
1086
1087 #[test]
1088 fn weak_self_cyclic() {
1089 struct Cycle {
1090 x: Mutex<Option<Weak<Cycle>>>,
1091 }
1092
1093 let a = Arc::new(Cycle { x: Mutex::new(None) });
1094 let b = Arc::downgrade(&a.clone());
1095 *a.x.lock().unwrap() = Some(b);
1096
1097 // hopefully we don't double-free (or leak)...
1098 }
1099
1100 #[test]
1101 fn drop_arc() {
1102 let mut canary = atomic::AtomicUsize::new(0);
1103 let x = Arc::new(Canary(&mut canary as *mut atomic::AtomicUsize));
1104 drop(x);
1105 assert!(canary.load(Acquire) == 1);
1106 }
1107
1108 #[test]
1109 fn drop_arc_weak() {
1110 let mut canary = atomic::AtomicUsize::new(0);
1111 let arc = Arc::new(Canary(&mut canary as *mut atomic::AtomicUsize));
1112 let arc_weak = Arc::downgrade(&arc);
1113 assert!(canary.load(Acquire) == 0);
1114 drop(arc);
1115 assert!(canary.load(Acquire) == 1);
1116 drop(arc_weak);
1117 }
1118
1119 #[test]
1120 fn test_strong_count() {
1121 let a = Arc::new(0);
1122 assert!(Arc::strong_count(&a) == 1);
1123 let w = Arc::downgrade(&a);
1124 assert!(Arc::strong_count(&a) == 1);
1125 let b = w.upgrade().expect("");
1126 assert!(Arc::strong_count(&b) == 2);
1127 assert!(Arc::strong_count(&a) == 2);
1128 drop(w);
1129 drop(a);
1130 assert!(Arc::strong_count(&b) == 1);
1131 let c = b.clone();
1132 assert!(Arc::strong_count(&b) == 2);
1133 assert!(Arc::strong_count(&c) == 2);
1134 }
1135
1136 #[test]
1137 fn test_weak_count() {
1138 let a = Arc::new(0);
1139 assert!(Arc::strong_count(&a) == 1);
1140 assert!(Arc::weak_count(&a) == 0);
1141 let w = Arc::downgrade(&a);
1142 assert!(Arc::strong_count(&a) == 1);
1143 assert!(Arc::weak_count(&a) == 1);
1144 let x = w.clone();
1145 assert!(Arc::weak_count(&a) == 2);
1146 drop(w);
1147 drop(x);
1148 assert!(Arc::strong_count(&a) == 1);
1149 assert!(Arc::weak_count(&a) == 0);
1150 let c = a.clone();
1151 assert!(Arc::strong_count(&a) == 2);
1152 assert!(Arc::weak_count(&a) == 0);
1153 let d = Arc::downgrade(&c);
1154 assert!(Arc::weak_count(&c) == 1);
1155 assert!(Arc::strong_count(&c) == 2);
1156
1157 drop(a);
1158 drop(c);
1159 drop(d);
1160 }
1161
1162 #[test]
1163 fn show_arc() {
1164 let a = Arc::new(5);
1165 assert_eq!(format!("{:?}", a), "5");
1166 }
1167
1168 // Make sure deriving works with Arc<T>
1169 #[derive(Eq, Ord, PartialEq, PartialOrd, Clone, Debug, Default)]
1170 struct Foo {
1171 inner: Arc<i32>,
1172 }
1173
1174 #[test]
1175 fn test_unsized() {
1176 let x: Arc<[i32]> = Arc::new([1, 2, 3]);
1177 assert_eq!(format!("{:?}", x), "[1, 2, 3]");
1178 let y = Arc::downgrade(&x.clone());
1179 drop(x);
1180 assert!(y.upgrade().is_none());
1181 }
1182
1183 #[test]
1184 fn test_from_owned() {
1185 let foo = 123;
1186 let foo_arc = Arc::from(foo);
1187 assert!(123 == *foo_arc);
1188 }
1189
1190 #[test]
1191 fn test_new_weak() {
1192 let foo: Weak<usize> = Weak::new();
1193 assert!(foo.upgrade().is_none());
1194 }
1195 }
1196
1197 #[stable(feature = "rust1", since = "1.0.0")]
1198 impl<T: ?Sized> borrow::Borrow<T> for Arc<T> {
1199 fn borrow(&self) -> &T {
1200 &**self
1201 }
1202 }
1203
1204 #[stable(since = "1.5.0", feature = "smart_ptr_as_ref")]
1205 impl<T: ?Sized> AsRef<T> for Arc<T> {
1206 fn as_ref(&self) -> &T {
1207 &**self
1208 }
1209 }