]> git.proxmox.com Git - rustc.git/blame - library/alloc/src/rc.rs
New upstream version 1.61.0+dfsg1
[rustc.git] / library / alloc / src / rc.rs
CommitLineData
041b39d2
XL
1//! Single-threaded reference-counting pointers. 'Rc' stands for 'Reference
2//! Counted'.
1a4d82fc 3//!
476ff2be 4//! The type [`Rc<T>`][`Rc`] provides shared ownership of a value of type `T`,
cc61c64b 5//! allocated in the heap. Invoking [`clone`][clone] on [`Rc`] produces a new
e74abb32
XL
6//! pointer to the same allocation in the heap. When the last [`Rc`] pointer to a
7//! given allocation is destroyed, the value stored in that allocation (often
8//! referred to as "inner value") is also dropped.
1a4d82fc 9//!
8bb4bdeb 10//! Shared references in Rust disallow mutation by default, and [`Rc`]
ea8adc8c 11//! is no exception: you cannot generally obtain a mutable reference to
8bb4bdeb
XL
12//! something inside an [`Rc`]. If you need mutability, put a [`Cell`]
13//! or [`RefCell`] inside the [`Rc`]; see [an example of mutability
29967ef6 14//! inside an `Rc`][mutability].
1a4d82fc 15//!
476ff2be
SL
16//! [`Rc`] uses non-atomic reference counting. This means that overhead is very
17//! low, but an [`Rc`] cannot be sent between threads, and consequently [`Rc`]
9e0c209e 18//! does not implement [`Send`][send]. As a result, the Rust compiler
476ff2be 19//! will check *at compile time* that you are not sending [`Rc`]s between
9e0c209e
SL
20//! threads. If you need multi-threaded, atomic reference counting, use
21//! [`sync::Arc`][arc].
22//!
cc61c64b 23//! The [`downgrade`][downgrade] method can be used to create a non-owning
476ff2be 24//! [`Weak`] pointer. A [`Weak`] pointer can be [`upgrade`][upgrade]d
e74abb32
XL
25//! to an [`Rc`], but this will return [`None`] if the value stored in the allocation has
26//! already been dropped. In other words, `Weak` pointers do not keep the value
27//! inside the allocation alive; however, they *do* keep the allocation
28//! (the backing store for the inner value) alive.
9e0c209e 29//!
476ff2be
SL
30//! A cycle between [`Rc`] pointers will never be deallocated. For this reason,
31//! [`Weak`] is used to break cycles. For example, a tree could have strong
32//! [`Rc`] pointers from parent nodes to children, and [`Weak`] pointers from
9e0c209e
SL
33//! children back to their parents.
34//!
476ff2be
SL
35//! `Rc<T>` automatically dereferences to `T` (via the [`Deref`] trait),
36//! so you can call `T`'s methods on a value of type [`Rc<T>`][`Rc`]. To avoid name
13cf67c4 37//! clashes with `T`'s methods, the methods of [`Rc<T>`][`Rc`] itself are associated
29967ef6 38//! functions, called using [fully qualified syntax]:
9e0c209e
SL
39//!
40//! ```
c30ab7b3 41//! use std::rc::Rc;
c30ab7b3 42//!
29967ef6 43//! let my_rc = Rc::new(());
3c0e092e 44//! let my_weak = Rc::downgrade(&my_rc);
9e0c209e
SL
45//! ```
46//!
29967ef6
XL
47//! `Rc<T>`'s implementations of traits like `Clone` may also be called using
48//! fully qualified syntax. Some people prefer to use fully qualified syntax,
49//! while others prefer using method-call syntax.
50//!
51//! ```
52//! use std::rc::Rc;
53//!
54//! let rc = Rc::new(());
55//! // Method-call syntax
56//! let rc2 = rc.clone();
57//! // Fully qualified syntax
58//! let rc3 = Rc::clone(&rc);
59//! ```
60//!
e74abb32
XL
61//! [`Weak<T>`][`Weak`] does not auto-dereference to `T`, because the inner value may have
62//! already been dropped.
9e0c209e 63//!
7cac9316
XL
64//! # Cloning references
65//!
e74abb32
XL
66//! Creating a new reference to the same allocation as an existing reference counted pointer
67//! is done using the `Clone` trait implemented for [`Rc<T>`][`Rc`] and [`Weak<T>`][`Weak`].
7cac9316
XL
68//!
69//! ```
70//! use std::rc::Rc;
29967ef6 71//!
7cac9316
XL
72//! let foo = Rc::new(vec![1.0, 2.0, 3.0]);
73//! // The two syntaxes below are equivalent.
74//! let a = foo.clone();
75//! let b = Rc::clone(&foo);
76//! // a and b both point to the same memory location as foo.
77//! ```
78//!
79//! The `Rc::clone(&from)` syntax is the most idiomatic because it conveys more explicitly
80//! the meaning of the code. In the example above, this syntax makes it easier to see that
81//! this code is creating a new reference rather than copying the whole content of foo.
82//!
1a4d82fc
JJ
83//! # Examples
84//!
85//! Consider a scenario where a set of `Gadget`s are owned by a given `Owner`.
86//! We want to have our `Gadget`s point to their `Owner`. We can't do this with
87//! unique ownership, because more than one gadget may belong to the same
476ff2be 88//! `Owner`. [`Rc`] allows us to share an `Owner` between multiple `Gadget`s,
1a4d82fc
JJ
89//! and have the `Owner` remain allocated as long as any `Gadget` points at it.
90//!
9e0c209e 91//! ```
1a4d82fc
JJ
92//! use std::rc::Rc;
93//!
94//! struct Owner {
9e0c209e 95//! name: String,
1a4d82fc
JJ
96//! // ...other fields
97//! }
98//!
99//! struct Gadget {
85aaf69f 100//! id: i32,
9e0c209e 101//! owner: Rc<Owner>,
1a4d82fc
JJ
102//! // ...other fields
103//! }
104//!
105//! fn main() {
9e0c209e
SL
106//! // Create a reference-counted `Owner`.
107//! let gadget_owner: Rc<Owner> = Rc::new(
108//! Owner {
109//! name: "Gadget Man".to_string(),
110//! }
1a4d82fc
JJ
111//! );
112//!
9e0c209e 113//! // Create `Gadget`s belonging to `gadget_owner`. Cloning the `Rc<Owner>`
e74abb32 114//! // gives us a new pointer to the same `Owner` allocation, incrementing
9e0c209e
SL
115//! // the reference count in the process.
116//! let gadget1 = Gadget {
117//! id: 1,
7cac9316 118//! owner: Rc::clone(&gadget_owner),
9e0c209e
SL
119//! };
120//! let gadget2 = Gadget {
121//! id: 2,
7cac9316 122//! owner: Rc::clone(&gadget_owner),
9e0c209e 123//! };
1a4d82fc 124//!
9e0c209e 125//! // Dispose of our local variable `gadget_owner`.
1a4d82fc
JJ
126//! drop(gadget_owner);
127//!
9e0c209e
SL
128//! // Despite dropping `gadget_owner`, we're still able to print out the name
129//! // of the `Owner` of the `Gadget`s. This is because we've only dropped a
130//! // single `Rc<Owner>`, not the `Owner` it points to. As long as there are
e74abb32
XL
131//! // other `Rc<Owner>` pointing at the same `Owner` allocation, it will remain
132//! // live. The field projection `gadget1.owner.name` works because
9e0c209e 133//! // `Rc<Owner>` automatically dereferences to `Owner`.
1a4d82fc
JJ
134//! println!("Gadget {} owned by {}", gadget1.id, gadget1.owner.name);
135//! println!("Gadget {} owned by {}", gadget2.id, gadget2.owner.name);
136//!
9e0c209e
SL
137//! // At the end of the function, `gadget1` and `gadget2` are destroyed, and
138//! // with them the last counted references to our `Owner`. Gadget Man now
139//! // gets destroyed as well.
1a4d82fc
JJ
140//! }
141//! ```
142//!
c34b1796 143//! If our requirements change, and we also need to be able to traverse from
476ff2be 144//! `Owner` to `Gadget`, we will run into problems. An [`Rc`] pointer from `Owner`
e74abb32
XL
145//! to `Gadget` introduces a cycle. This means that their
146//! reference counts can never reach 0, and the allocation will never be destroyed:
147//! a memory leak. In order to get around this, we can use [`Weak`]
9e0c209e 148//! pointers.
1a4d82fc 149//!
c34b1796 150//! Rust actually makes it somewhat difficult to produce this loop in the first
9e0c209e 151//! place. In order to end up with two values that point at each other, one of
476ff2be 152//! them needs to be mutable. This is difficult because [`Rc`] enforces
9e0c209e 153//! memory safety by only giving out shared references to the value it wraps,
c34b1796 154//! and these don't allow direct mutation. We need to wrap the part of the
476ff2be 155//! value we wish to mutate in a [`RefCell`], which provides *interior
c34b1796 156//! mutability*: a method to achieve mutability through a shared reference.
476ff2be 157//! [`RefCell`] enforces Rust's borrowing rules at runtime.
1a4d82fc 158//!
9e0c209e 159//! ```
1a4d82fc
JJ
160//! use std::rc::Rc;
161//! use std::rc::Weak;
162//! use std::cell::RefCell;
163//!
164//! struct Owner {
165//! name: String,
e9174d1e 166//! gadgets: RefCell<Vec<Weak<Gadget>>>,
1a4d82fc
JJ
167//! // ...other fields
168//! }
169//!
170//! struct Gadget {
85aaf69f 171//! id: i32,
e9174d1e 172//! owner: Rc<Owner>,
1a4d82fc
JJ
173//! // ...other fields
174//! }
175//!
176//! fn main() {
9e0c209e
SL
177//! // Create a reference-counted `Owner`. Note that we've put the `Owner`'s
178//! // vector of `Gadget`s inside a `RefCell` so that we can mutate it through
179//! // a shared reference.
180//! let gadget_owner: Rc<Owner> = Rc::new(
e9174d1e
SL
181//! Owner {
182//! name: "Gadget Man".to_string(),
9e0c209e 183//! gadgets: RefCell::new(vec![]),
e9174d1e 184//! }
1a4d82fc
JJ
185//! );
186//!
9e0c209e
SL
187//! // Create `Gadget`s belonging to `gadget_owner`, as before.
188//! let gadget1 = Rc::new(
189//! Gadget {
190//! id: 1,
7cac9316 191//! owner: Rc::clone(&gadget_owner),
9e0c209e
SL
192//! }
193//! );
194//! let gadget2 = Rc::new(
195//! Gadget {
196//! id: 2,
7cac9316 197//! owner: Rc::clone(&gadget_owner),
9e0c209e
SL
198//! }
199//! );
200//!
201//! // Add the `Gadget`s to their `Owner`.
202//! {
203//! let mut gadgets = gadget_owner.gadgets.borrow_mut();
204//! gadgets.push(Rc::downgrade(&gadget1));
205//! gadgets.push(Rc::downgrade(&gadget2));
1a4d82fc 206//!
9e0c209e
SL
207//! // `RefCell` dynamic borrow ends here.
208//! }
1a4d82fc 209//!
9e0c209e
SL
210//! // Iterate over our `Gadget`s, printing their details out.
211//! for gadget_weak in gadget_owner.gadgets.borrow().iter() {
1a4d82fc 212//!
9e0c209e 213//! // `gadget_weak` is a `Weak<Gadget>`. Since `Weak` pointers can't
e74abb32 214//! // guarantee the allocation still exists, we need to call
9e0c209e
SL
215//! // `upgrade`, which returns an `Option<Rc<Gadget>>`.
216//! //
e74abb32 217//! // In this case we know the allocation still exists, so we simply
9e0c209e
SL
218//! // `unwrap` the `Option`. In a more complicated program, you might
219//! // need graceful error handling for a `None` result.
220//!
221//! let gadget = gadget_weak.upgrade().unwrap();
1a4d82fc
JJ
222//! println!("Gadget {} owned by {}", gadget.id, gadget.owner.name);
223//! }
224//!
9e0c209e
SL
225//! // At the end of the function, `gadget_owner`, `gadget1`, and `gadget2`
226//! // are destroyed. There are now no strong (`Rc`) pointers to the
227//! // gadgets, so they are destroyed. This zeroes the reference count on
228//! // Gadget Man, so he gets destroyed as well.
1a4d82fc
JJ
229//! }
230//! ```
476ff2be 231//!
3dfed10e
XL
232//! [clone]: Clone::clone
233//! [`Cell`]: core::cell::Cell
234//! [`RefCell`]: core::cell::RefCell
235//! [send]: core::marker::Send
29967ef6 236//! [arc]: crate::sync::Arc
3dfed10e
XL
237//! [`Deref`]: core::ops::Deref
238//! [downgrade]: Rc::downgrade
239//! [upgrade]: Weak::upgrade
240//! [mutability]: core::cell#introducing-mutability-inside-of-something-immutable
5869c6ff 241//! [fully qualified syntax]: https://doc.rust-lang.org/book/ch19-03-advanced-traits.html#fully-qualified-syntax-for-disambiguation-calling-methods-with-the-same-name
1a4d82fc 242
85aaf69f 243#![stable(feature = "rust1", since = "1.0.0")]
62682a34 244
c34b1796 245#[cfg(not(test))]
9fa01778 246use crate::boxed::Box;
c34b1796 247#[cfg(test)]
62682a34
SL
248use std::boxed::Box;
249
ea8adc8c 250use core::any::Any;
e9174d1e 251use core::borrow;
1a4d82fc 252use core::cell::Cell;
62682a34 253use core::cmp::Ordering;
dfeec247 254use core::convert::{From, TryFrom};
1a4d82fc 255use core::fmt;
3157f602 256use core::hash::{Hash, Hasher};
7cac9316 257use core::intrinsics::abort;
17df50a5 258#[cfg(not(no_global_oom_handling))]
416331ca 259use core::iter;
dfeec247 260use core::marker::{self, PhantomData, Unpin, Unsize};
17df50a5
XL
261#[cfg(not(no_global_oom_handling))]
262use core::mem::size_of_val;
263use core::mem::{self, align_of_val_raw, forget};
dfeec247 264use core::ops::{CoerceUnsized, Deref, DispatchFromDyn, Receiver};
94222f64 265use core::panic::{RefUnwindSafe, UnwindSafe};
136023e0 266#[cfg(not(no_global_oom_handling))]
0bf4aa26 267use core::pin::Pin;
2c00a5a8 268use core::ptr::{self, NonNull};
17df50a5 269#[cfg(not(no_global_oom_handling))]
f9f354fc 270use core::slice::from_raw_parts_mut;
d9579d0f 271
17df50a5
XL
272#[cfg(not(no_global_oom_handling))]
273use crate::alloc::handle_alloc_error;
274#[cfg(not(no_global_oom_handling))]
275use crate::alloc::{box_free, WriteCloneIntoRaw};
276use crate::alloc::{AllocError, Allocator, Global, Layout};
f9f354fc 277use crate::borrow::{Cow, ToOwned};
17df50a5 278#[cfg(not(no_global_oom_handling))]
9fa01778 279use crate::string::String;
17df50a5 280#[cfg(not(no_global_oom_handling))]
9fa01778 281use crate::vec::Vec;
1a4d82fc 282
416331ca
XL
283#[cfg(test)]
284mod tests;
285
ba9703b0
XL
286// This is repr(C) to future-proof against possible field-reordering, which
287// would interfere with otherwise safe [into|from]_raw() of transmutable
288// inner types.
289#[repr(C)]
d9579d0f 290struct RcBox<T: ?Sized> {
85aaf69f 291 strong: Cell<usize>,
d9579d0f
AL
292 weak: Cell<usize>,
293 value: T,
1a4d82fc
JJ
294}
295
041b39d2
XL
296/// A single-threaded reference-counting pointer. 'Rc' stands for 'Reference
297/// Counted'.
9e0c209e
SL
298///
299/// See the [module-level documentation](./index.html) for more details.
1a4d82fc 300///
9e0c209e 301/// The inherent methods of `Rc` are all associated functions, which means
0731742a 302/// that you have to call them as e.g., [`Rc::get_mut(&mut value)`][get_mut] instead of
29967ef6 303/// `value.get_mut()`. This avoids conflicts with methods of the inner type `T`.
476ff2be 304///
29967ef6 305/// [get_mut]: Rc::get_mut
ba9703b0 306#[cfg_attr(not(test), rustc_diagnostic_item = "Rc")]
d9579d0f 307#[stable(feature = "rust1", since = "1.0.0")]
94222f64 308#[rustc_insignificant_dtor]
d9579d0f 309pub struct Rc<T: ?Sized> {
2c00a5a8 310 ptr: NonNull<RcBox<T>>,
60c5eb7d 311 phantom: PhantomData<RcBox<T>>,
d9579d0f 312}
1a4d82fc 313
92a42be0 314#[stable(feature = "rust1", since = "1.0.0")]
d9579d0f 315impl<T: ?Sized> !marker::Send for Rc<T> {}
3c0e092e
XL
316
317// Note that this negative impl isn't strictly necessary for correctness,
318// as `Rc` transitively contains a `Cell`, which is itself `!Sync`.
319// However, given how important `Rc`'s `!Sync`-ness is,
320// having an explicit negative impl is nice for documentation purposes
321// and results in nicer error messages.
92a42be0 322#[stable(feature = "rust1", since = "1.0.0")]
d9579d0f
AL
323impl<T: ?Sized> !marker::Sync for Rc<T> {}
324
94222f64
XL
325#[stable(feature = "catch_unwind", since = "1.9.0")]
326impl<T: RefUnwindSafe + ?Sized> UnwindSafe for Rc<T> {}
3c0e092e
XL
327#[stable(feature = "rc_ref_unwind_safe", since = "1.58.0")]
328impl<T: RefUnwindSafe + ?Sized> RefUnwindSafe for Rc<T> {}
94222f64 329
92a42be0
SL
330#[unstable(feature = "coerce_unsized", issue = "27732")]
331impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<Rc<U>> for Rc<T> {}
d9579d0f 332
dfeec247 333#[unstable(feature = "dispatch_from_dyn", issue = "none")]
a1dfa0c6
XL
334impl<T: ?Sized + Unsize<U>, U: ?Sized> DispatchFromDyn<Rc<U>> for Rc<T> {}
335
416331ca 336impl<T: ?Sized> Rc<T> {
1b1a35ee
XL
337 #[inline(always)]
338 fn inner(&self) -> &RcBox<T> {
339 // This unsafety is ok because while this Rc is alive we're guaranteed
340 // that the inner pointer is valid.
341 unsafe { self.ptr.as_ref() }
342 }
343
3c0e092e 344 unsafe fn from_inner(ptr: NonNull<RcBox<T>>) -> Self {
dfeec247 345 Self { ptr, phantom: PhantomData }
416331ca
XL
346 }
347
348 unsafe fn from_ptr(ptr: *mut RcBox<T>) -> Self {
3c0e092e 349 unsafe { Self::from_inner(NonNull::new_unchecked(ptr)) }
416331ca
XL
350 }
351}
352
1a4d82fc
JJ
353impl<T> Rc<T> {
354 /// Constructs a new `Rc<T>`.
355 ///
356 /// # Examples
357 ///
358 /// ```
359 /// use std::rc::Rc;
360 ///
85aaf69f 361 /// let five = Rc::new(5);
1a4d82fc 362 /// ```
136023e0 363 #[cfg(not(no_global_oom_handling))]
85aaf69f 364 #[stable(feature = "rust1", since = "1.0.0")]
1a4d82fc 365 pub fn new(value: T) -> Rc<T> {
416331ca
XL
366 // There is an implicit weak pointer owned by all the strong
367 // pointers, which ensures that the weak destructor never frees
368 // the allocation while the strong destructor is running, even
369 // if the weak pointer is stored inside the strong one.
3c0e092e
XL
370 unsafe {
371 Self::from_inner(
372 Box::leak(box RcBox { strong: Cell::new(1), weak: Cell::new(1), value }).into(),
373 )
374 }
1a4d82fc 375 }
62682a34 376
5099ac24
FG
377 /// Constructs a new `Rc<T>` using a closure `data_fn` that has access to a
378 /// weak reference to the constructing `Rc<T>`.
379 ///
380 /// Generally, a structure circularly referencing itself, either directly or
381 /// indirectly, should not hold a strong reference to prevent a memory leak.
382 /// In `data_fn`, initialization of `T` can make use of the weak reference
383 /// by cloning and storing it inside `T` for use at a later time.
384 ///
385 /// Since the new `Rc<T>` is not fully-constructed until `Rc<T>::new_cyclic`
386 /// returns, calling [`upgrade`] on the weak reference inside `data_fn` will
387 /// fail and result in a `None` value.
388 ///
389 /// # Panics
390 /// If `data_fn` panics, the panic is propagated to the caller, and the
391 /// temporary [`Weak<T>`] is dropped normally.
5869c6ff
XL
392 ///
393 /// # Examples
394 ///
395 /// ```
5869c6ff
XL
396 /// #![allow(dead_code)]
397 /// use std::rc::{Rc, Weak};
398 ///
399 /// struct Gadget {
5099ac24 400 /// me: Weak<Gadget>,
5869c6ff 401 /// }
5099ac24 402 ///
5869c6ff 403 /// impl Gadget {
5099ac24
FG
404 /// /// Construct a reference counted Gadget.
405 /// fn new() -> Rc<Self> {
406 /// Rc::new_cyclic(|me| Gadget { me: me.clone() })
407 /// }
408 ///
409 /// /// Return a reference counted pointer to Self.
410 /// fn me(&self) -> Rc<Self> {
411 /// self.me.upgrade().unwrap()
5869c6ff
XL
412 /// }
413 /// }
414 /// ```
5099ac24 415 /// [`upgrade`]: Weak::upgrade
136023e0 416 #[cfg(not(no_global_oom_handling))]
5099ac24
FG
417 #[stable(feature = "arc_new_cyclic", since = "1.60.0")]
418 pub fn new_cyclic<F>(data_fn: F) -> Rc<T>
419 where
420 F: FnOnce(&Weak<T>) -> T,
421 {
1b1a35ee
XL
422 // Construct the inner in the "uninitialized" state with a single
423 // weak reference.
424 let uninit_ptr: NonNull<_> = Box::leak(box RcBox {
425 strong: Cell::new(0),
426 weak: Cell::new(1),
427 value: mem::MaybeUninit::<T>::uninit(),
428 })
429 .into();
430
431 let init_ptr: NonNull<RcBox<T>> = uninit_ptr.cast();
432
433 let weak = Weak { ptr: init_ptr };
434
435 // It's important we don't give up ownership of the weak pointer, or
436 // else the memory might be freed by the time `data_fn` returns. If
437 // we really wanted to pass ownership, we could create an additional
438 // weak pointer for ourselves, but this would result in additional
439 // updates to the weak reference count which might not be necessary
440 // otherwise.
441 let data = data_fn(&weak);
442
3c0e092e 443 let strong = unsafe {
1b1a35ee 444 let inner = init_ptr.as_ptr();
5869c6ff 445 ptr::write(ptr::addr_of_mut!((*inner).value), data);
1b1a35ee
XL
446
447 let prev_value = (*inner).strong.get();
448 debug_assert_eq!(prev_value, 0, "No prior strong references should exist");
449 (*inner).strong.set(1);
1b1a35ee 450
3c0e092e
XL
451 Rc::from_inner(init_ptr)
452 };
1b1a35ee
XL
453
454 // Strong references should collectively own a shared weak reference,
455 // so don't run the destructor for our old weak reference.
456 mem::forget(weak);
457 strong
458 }
459
e1599b0c
XL
460 /// Constructs a new `Rc` with uninitialized contents.
461 ///
462 /// # Examples
463 ///
464 /// ```
465 /// #![feature(new_uninit)]
466 /// #![feature(get_mut_unchecked)]
467 ///
468 /// use std::rc::Rc;
469 ///
470 /// let mut five = Rc::<u32>::new_uninit();
471 ///
5099ac24
FG
472 /// // Deferred initialization:
473 /// Rc::get_mut(&mut five).unwrap().write(5);
e1599b0c 474 ///
5099ac24 475 /// let five = unsafe { five.assume_init() };
e1599b0c
XL
476 ///
477 /// assert_eq!(*five, 5)
478 /// ```
17df50a5 479 #[cfg(not(no_global_oom_handling))]
e1599b0c 480 #[unstable(feature = "new_uninit", issue = "63291")]
c295e0f8 481 #[must_use]
e1599b0c
XL
482 pub fn new_uninit() -> Rc<mem::MaybeUninit<T>> {
483 unsafe {
3dfed10e
XL
484 Rc::from_ptr(Rc::allocate_for_layout(
485 Layout::new::<T>(),
fc512014 486 |layout| Global.allocate(layout),
3dfed10e
XL
487 |mem| mem as *mut RcBox<mem::MaybeUninit<T>>,
488 ))
e1599b0c
XL
489 }
490 }
491
60c5eb7d
XL
492 /// Constructs a new `Rc` with uninitialized contents, with the memory
493 /// being filled with `0` bytes.
494 ///
495 /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and
496 /// incorrect usage of this method.
497 ///
498 /// # Examples
499 ///
500 /// ```
501 /// #![feature(new_uninit)]
502 ///
503 /// use std::rc::Rc;
504 ///
505 /// let zero = Rc::<u32>::new_zeroed();
506 /// let zero = unsafe { zero.assume_init() };
507 ///
508 /// assert_eq!(*zero, 0)
509 /// ```
510 ///
1b1a35ee 511 /// [zeroed]: mem::MaybeUninit::zeroed
17df50a5 512 #[cfg(not(no_global_oom_handling))]
60c5eb7d 513 #[unstable(feature = "new_uninit", issue = "63291")]
c295e0f8 514 #[must_use]
60c5eb7d
XL
515 pub fn new_zeroed() -> Rc<mem::MaybeUninit<T>> {
516 unsafe {
3dfed10e
XL
517 Rc::from_ptr(Rc::allocate_for_layout(
518 Layout::new::<T>(),
fc512014 519 |layout| Global.allocate_zeroed(layout),
3dfed10e
XL
520 |mem| mem as *mut RcBox<mem::MaybeUninit<T>>,
521 ))
60c5eb7d
XL
522 }
523 }
524
5869c6ff
XL
525 /// Constructs a new `Rc<T>`, returning an error if the allocation fails
526 ///
527 /// # Examples
528 ///
529 /// ```
530 /// #![feature(allocator_api)]
531 /// use std::rc::Rc;
532 ///
533 /// let five = Rc::try_new(5);
534 /// # Ok::<(), std::alloc::AllocError>(())
535 /// ```
536 #[unstable(feature = "allocator_api", issue = "32838")]
537 pub fn try_new(value: T) -> Result<Rc<T>, AllocError> {
538 // There is an implicit weak pointer owned by all the strong
539 // pointers, which ensures that the weak destructor never frees
540 // the allocation while the strong destructor is running, even
541 // if the weak pointer is stored inside the strong one.
3c0e092e
XL
542 unsafe {
543 Ok(Self::from_inner(
544 Box::leak(Box::try_new(RcBox { strong: Cell::new(1), weak: Cell::new(1), value })?)
545 .into(),
546 ))
547 }
5869c6ff
XL
548 }
549
550 /// Constructs a new `Rc` with uninitialized contents, returning an error if the allocation fails
551 ///
552 /// # Examples
553 ///
554 /// ```
555 /// #![feature(allocator_api, new_uninit)]
556 /// #![feature(get_mut_unchecked)]
557 ///
558 /// use std::rc::Rc;
559 ///
560 /// let mut five = Rc::<u32>::try_new_uninit()?;
561 ///
5099ac24
FG
562 /// // Deferred initialization:
563 /// Rc::get_mut(&mut five).unwrap().write(5);
5869c6ff 564 ///
5099ac24 565 /// let five = unsafe { five.assume_init() };
5869c6ff
XL
566 ///
567 /// assert_eq!(*five, 5);
568 /// # Ok::<(), std::alloc::AllocError>(())
569 /// ```
570 #[unstable(feature = "allocator_api", issue = "32838")]
571 // #[unstable(feature = "new_uninit", issue = "63291")]
572 pub fn try_new_uninit() -> Result<Rc<mem::MaybeUninit<T>>, AllocError> {
573 unsafe {
574 Ok(Rc::from_ptr(Rc::try_allocate_for_layout(
575 Layout::new::<T>(),
576 |layout| Global.allocate(layout),
577 |mem| mem as *mut RcBox<mem::MaybeUninit<T>>,
578 )?))
579 }
580 }
581
582 /// Constructs a new `Rc` with uninitialized contents, with the memory
583 /// being filled with `0` bytes, returning an error if the allocation fails
584 ///
585 /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and
586 /// incorrect usage of this method.
587 ///
588 /// # Examples
589 ///
590 /// ```
591 /// #![feature(allocator_api, new_uninit)]
592 ///
593 /// use std::rc::Rc;
594 ///
595 /// let zero = Rc::<u32>::try_new_zeroed()?;
596 /// let zero = unsafe { zero.assume_init() };
597 ///
598 /// assert_eq!(*zero, 0);
599 /// # Ok::<(), std::alloc::AllocError>(())
600 /// ```
601 ///
602 /// [zeroed]: mem::MaybeUninit::zeroed
603 #[unstable(feature = "allocator_api", issue = "32838")]
604 //#[unstable(feature = "new_uninit", issue = "63291")]
605 pub fn try_new_zeroed() -> Result<Rc<mem::MaybeUninit<T>>, AllocError> {
606 unsafe {
607 Ok(Rc::from_ptr(Rc::try_allocate_for_layout(
608 Layout::new::<T>(),
609 |layout| Global.allocate_zeroed(layout),
610 |mem| mem as *mut RcBox<mem::MaybeUninit<T>>,
611 )?))
612 }
613 }
0731742a
XL
614 /// Constructs a new `Pin<Rc<T>>`. If `T` does not implement `Unpin`, then
615 /// `value` will be pinned in memory and unable to be moved.
136023e0 616 #[cfg(not(no_global_oom_handling))]
0731742a 617 #[stable(feature = "pin", since = "1.33.0")]
c295e0f8 618 #[must_use]
0731742a 619 pub fn pin(value: T) -> Pin<Rc<T>> {
0bf4aa26
XL
620 unsafe { Pin::new_unchecked(Rc::new(value)) }
621 }
622
e74abb32 623 /// Returns the inner value, if the `Rc` has exactly one strong reference.
62682a34 624 ///
3dfed10e 625 /// Otherwise, an [`Err`] is returned with the same `Rc` that was
c30ab7b3 626 /// passed in.
62682a34 627 ///
54a0048b
SL
628 /// This will succeed even if there are outstanding weak references.
629 ///
62682a34
SL
630 /// # Examples
631 ///
632 /// ```
62682a34
SL
633 /// use std::rc::Rc;
634 ///
635 /// let x = Rc::new(3);
636 /// assert_eq!(Rc::try_unwrap(x), Ok(3));
637 ///
638 /// let x = Rc::new(4);
7cac9316 639 /// let _y = Rc::clone(&x);
9e0c209e 640 /// assert_eq!(*Rc::try_unwrap(x).unwrap_err(), 4);
62682a34
SL
641 /// ```
642 #[inline]
e9174d1e
SL
643 #[stable(feature = "rc_unique", since = "1.4.0")]
644 pub fn try_unwrap(this: Self) -> Result<T, Self> {
476ff2be 645 if Rc::strong_count(&this) == 1 {
62682a34 646 unsafe {
e9174d1e
SL
647 let val = ptr::read(&*this); // copy the contained object
648
ff7c6d11 649 // Indicate to Weaks that they can't be promoted by decrementing
e9174d1e
SL
650 // the strong count, and then remove the implicit "strong weak"
651 // pointer while also handling drop logic by just crafting a
652 // fake Weak.
1b1a35ee 653 this.inner().dec_strong();
54a0048b 654 let _weak = Weak { ptr: this.ptr };
e9174d1e 655 forget(this);
62682a34
SL
656 Ok(val)
657 }
658 } else {
e9174d1e 659 Err(this)
62682a34
SL
660 }
661 }
ea8adc8c 662}
e9174d1e 663
e1599b0c
XL
664impl<T> Rc<[T]> {
665 /// Constructs a new reference-counted slice with uninitialized contents.
666 ///
667 /// # Examples
668 ///
669 /// ```
670 /// #![feature(new_uninit)]
671 /// #![feature(get_mut_unchecked)]
672 ///
673 /// use std::rc::Rc;
674 ///
675 /// let mut values = Rc::<[u32]>::new_uninit_slice(3);
676 ///
5099ac24
FG
677 /// // Deferred initialization:
678 /// let data = Rc::get_mut(&mut values).unwrap();
679 /// data[0].write(1);
680 /// data[1].write(2);
681 /// data[2].write(3);
e1599b0c 682 ///
5099ac24 683 /// let values = unsafe { values.assume_init() };
e1599b0c
XL
684 ///
685 /// assert_eq!(*values, [1, 2, 3])
686 /// ```
17df50a5 687 #[cfg(not(no_global_oom_handling))]
e1599b0c 688 #[unstable(feature = "new_uninit", issue = "63291")]
c295e0f8 689 #[must_use]
e1599b0c 690 pub fn new_uninit_slice(len: usize) -> Rc<[mem::MaybeUninit<T>]> {
dfeec247 691 unsafe { Rc::from_ptr(Rc::allocate_for_slice(len)) }
e1599b0c 692 }
3dfed10e
XL
693
694 /// Constructs a new reference-counted slice with uninitialized contents, with the memory being
695 /// filled with `0` bytes.
696 ///
697 /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and
698 /// incorrect usage of this method.
699 ///
700 /// # Examples
701 ///
702 /// ```
703 /// #![feature(new_uninit)]
704 ///
705 /// use std::rc::Rc;
706 ///
707 /// let values = Rc::<[u32]>::new_zeroed_slice(3);
708 /// let values = unsafe { values.assume_init() };
709 ///
710 /// assert_eq!(*values, [0, 0, 0])
711 /// ```
712 ///
1b1a35ee 713 /// [zeroed]: mem::MaybeUninit::zeroed
17df50a5 714 #[cfg(not(no_global_oom_handling))]
3dfed10e 715 #[unstable(feature = "new_uninit", issue = "63291")]
c295e0f8 716 #[must_use]
3dfed10e
XL
717 pub fn new_zeroed_slice(len: usize) -> Rc<[mem::MaybeUninit<T>]> {
718 unsafe {
719 Rc::from_ptr(Rc::allocate_for_layout(
720 Layout::array::<T>(len).unwrap(),
fc512014 721 |layout| Global.allocate_zeroed(layout),
3dfed10e
XL
722 |mem| {
723 ptr::slice_from_raw_parts_mut(mem as *mut T, len)
724 as *mut RcBox<[mem::MaybeUninit<T>]>
725 },
726 ))
727 }
728 }
e1599b0c
XL
729}
730
731impl<T> Rc<mem::MaybeUninit<T>> {
732 /// Converts to `Rc<T>`.
733 ///
734 /// # Safety
735 ///
736 /// As with [`MaybeUninit::assume_init`],
e74abb32 737 /// it is up to the caller to guarantee that the inner value
e1599b0c
XL
738 /// really is in an initialized state.
739 /// Calling this when the content is not yet fully initialized
740 /// causes immediate undefined behavior.
741 ///
1b1a35ee 742 /// [`MaybeUninit::assume_init`]: mem::MaybeUninit::assume_init
e1599b0c
XL
743 ///
744 /// # Examples
745 ///
746 /// ```
747 /// #![feature(new_uninit)]
748 /// #![feature(get_mut_unchecked)]
749 ///
750 /// use std::rc::Rc;
751 ///
752 /// let mut five = Rc::<u32>::new_uninit();
753 ///
5099ac24
FG
754 /// // Deferred initialization:
755 /// Rc::get_mut(&mut five).unwrap().write(5);
e1599b0c 756 ///
5099ac24 757 /// let five = unsafe { five.assume_init() };
e1599b0c
XL
758 ///
759 /// assert_eq!(*five, 5)
760 /// ```
761 #[unstable(feature = "new_uninit", issue = "63291")]
762 #[inline]
763 pub unsafe fn assume_init(self) -> Rc<T> {
3c0e092e 764 unsafe { Rc::from_inner(mem::ManuallyDrop::new(self).ptr.cast()) }
e1599b0c
XL
765 }
766}
767
768impl<T> Rc<[mem::MaybeUninit<T>]> {
769 /// Converts to `Rc<[T]>`.
770 ///
771 /// # Safety
772 ///
773 /// As with [`MaybeUninit::assume_init`],
e74abb32 774 /// it is up to the caller to guarantee that the inner value
e1599b0c
XL
775 /// really is in an initialized state.
776 /// Calling this when the content is not yet fully initialized
777 /// causes immediate undefined behavior.
778 ///
1b1a35ee 779 /// [`MaybeUninit::assume_init`]: mem::MaybeUninit::assume_init
e1599b0c
XL
780 ///
781 /// # Examples
782 ///
783 /// ```
784 /// #![feature(new_uninit)]
785 /// #![feature(get_mut_unchecked)]
786 ///
787 /// use std::rc::Rc;
788 ///
789 /// let mut values = Rc::<[u32]>::new_uninit_slice(3);
790 ///
5099ac24
FG
791 /// // Deferred initialization:
792 /// let data = Rc::get_mut(&mut values).unwrap();
793 /// data[0].write(1);
794 /// data[1].write(2);
795 /// data[2].write(3);
e1599b0c 796 ///
5099ac24 797 /// let values = unsafe { values.assume_init() };
e1599b0c
XL
798 ///
799 /// assert_eq!(*values, [1, 2, 3])
800 /// ```
801 #[unstable(feature = "new_uninit", issue = "63291")]
802 #[inline]
803 pub unsafe fn assume_init(self) -> Rc<[T]> {
f035d41b 804 unsafe { Rc::from_ptr(mem::ManuallyDrop::new(self).ptr.as_ptr() as _) }
e1599b0c
XL
805 }
806}
807
ea8adc8c 808impl<T: ?Sized> Rc<T> {
476ff2be
SL
809 /// Consumes the `Rc`, returning the wrapped pointer.
810 ///
811 /// To avoid a memory leak the pointer must be converted back to an `Rc` using
c295e0f8 812 /// [`Rc::from_raw`].
9e0c209e
SL
813 ///
814 /// # Examples
815 ///
816 /// ```
9e0c209e
SL
817 /// use std::rc::Rc;
818 ///
dc9dc135 819 /// let x = Rc::new("hello".to_owned());
476ff2be 820 /// let x_ptr = Rc::into_raw(x);
dc9dc135 821 /// assert_eq!(unsafe { &*x_ptr }, "hello");
476ff2be 822 /// ```
8bb4bdeb
XL
823 #[stable(feature = "rc_raw", since = "1.17.0")]
824 pub fn into_raw(this: Self) -> *const T {
ba9703b0
XL
825 let ptr = Self::as_ptr(&this);
826 mem::forget(this);
827 ptr
828 }
829
830 /// Provides a raw pointer to the data.
831 ///
832 /// The counts are not affected in any way and the `Rc` is not consumed. The pointer is valid
833 /// for as long there are strong counts in the `Rc`.
834 ///
835 /// # Examples
836 ///
837 /// ```
ba9703b0
XL
838 /// use std::rc::Rc;
839 ///
840 /// let x = Rc::new("hello".to_owned());
841 /// let y = Rc::clone(&x);
842 /// let x_ptr = Rc::as_ptr(&x);
843 /// assert_eq!(x_ptr, Rc::as_ptr(&y));
844 /// assert_eq!(unsafe { &*x_ptr }, "hello");
845 /// ```
f9f354fc 846 #[stable(feature = "weak_into_raw", since = "1.45.0")]
ba9703b0 847 pub fn as_ptr(this: &Self) -> *const T {
dfeec247 848 let ptr: *mut RcBox<T> = NonNull::as_ptr(this.ptr);
dfeec247 849
f035d41b
XL
850 // SAFETY: This cannot go through Deref::deref or Rc::inner because
851 // this is required to retain raw/mut provenance such that e.g. `get_mut` can
852 // write through the pointer after the Rc is recovered through `from_raw`.
5869c6ff 853 unsafe { ptr::addr_of_mut!((*ptr).value) }
476ff2be
SL
854 }
855
ba9703b0
XL
856 /// Constructs an `Rc<T>` from a raw pointer.
857 ///
858 /// The raw pointer must have been previously returned by a call to
859 /// [`Rc<U>::into_raw`][into_raw] where `U` must have the same size
860 /// and alignment as `T`. This is trivially true if `U` is `T`.
861 /// Note that if `U` is not `T` but has the same size and alignment, this is
862 /// basically like transmuting references of different types. See
c295e0f8 863 /// [`mem::transmute`] for more information on what
ba9703b0 864 /// restrictions apply in this case.
476ff2be 865 ///
ba9703b0
XL
866 /// The user of `from_raw` has to make sure a specific value of `T` is only
867 /// dropped once.
476ff2be 868 ///
ba9703b0
XL
869 /// This function is unsafe because improper use may lead to memory unsafety,
870 /// even if the returned `Rc<T>` is never accessed.
476ff2be 871 ///
3dfed10e 872 /// [into_raw]: Rc::into_raw
476ff2be
SL
873 ///
874 /// # Examples
9e0c209e 875 ///
9e0c209e 876 /// ```
476ff2be
SL
877 /// use std::rc::Rc;
878 ///
dc9dc135 879 /// let x = Rc::new("hello".to_owned());
476ff2be
SL
880 /// let x_ptr = Rc::into_raw(x);
881 ///
882 /// unsafe {
883 /// // Convert back to an `Rc` to prevent leak.
884 /// let x = Rc::from_raw(x_ptr);
dc9dc135 885 /// assert_eq!(&*x, "hello");
476ff2be 886 ///
e1599b0c 887 /// // Further calls to `Rc::from_raw(x_ptr)` would be memory-unsafe.
476ff2be
SL
888 /// }
889 ///
890 /// // The memory was freed when `x` went out of scope above, so `x_ptr` is now dangling!
891 /// ```
8bb4bdeb
XL
892 #[stable(feature = "rc_raw", since = "1.17.0")]
893 pub unsafe fn from_raw(ptr: *const T) -> Self {
f035d41b 894 let offset = unsafe { data_offset(ptr) };
ea8adc8c
XL
895
896 // Reverse the offset to find the original RcBox.
5869c6ff 897 let rc_ptr =
ee023bcb 898 unsafe { (ptr as *mut u8).offset(-offset).with_metadata_of(ptr as *mut RcBox<T>) };
7cac9316 899
f035d41b 900 unsafe { Self::from_ptr(rc_ptr) }
cc61c64b 901 }
cc61c64b 902
3dfed10e 903 /// Creates a new [`Weak`] pointer to this allocation.
d9579d0f
AL
904 ///
905 /// # Examples
906 ///
907 /// ```
d9579d0f
AL
908 /// use std::rc::Rc;
909 ///
910 /// let five = Rc::new(5);
911 ///
e9174d1e 912 /// let weak_five = Rc::downgrade(&five);
d9579d0f 913 /// ```
3c0e092e
XL
914 #[must_use = "this returns a new `Weak` pointer, \
915 without modifying the original `Rc`"]
e9174d1e
SL
916 #[stable(feature = "rc_weak", since = "1.4.0")]
917 pub fn downgrade(this: &Self) -> Weak<T> {
1b1a35ee 918 this.inner().inc_weak();
8faf50e0 919 // Make sure we do not create a dangling Weak
5869c6ff 920 debug_assert!(!is_dangling(this.ptr.as_ptr()));
54a0048b 921 Weak { ptr: this.ptr }
d9579d0f 922 }
d9579d0f 923
3dfed10e 924 /// Gets the number of [`Weak`] pointers to this allocation.
9e0c209e
SL
925 ///
926 /// # Examples
927 ///
928 /// ```
9e0c209e
SL
929 /// use std::rc::Rc;
930 ///
931 /// let five = Rc::new(5);
932 /// let _weak_five = Rc::downgrade(&five);
933 ///
934 /// assert_eq!(1, Rc::weak_count(&five));
935 /// ```
62682a34 936 #[inline]
476ff2be 937 #[stable(feature = "rc_counts", since = "1.15.0")]
b039eaaf 938 pub fn weak_count(this: &Self) -> usize {
1b1a35ee 939 this.inner().weak() - 1
b039eaaf 940 }
62682a34 941
e74abb32 942 /// Gets the number of strong (`Rc`) pointers to this allocation.
9e0c209e
SL
943 ///
944 /// # Examples
945 ///
946 /// ```
9e0c209e
SL
947 /// use std::rc::Rc;
948 ///
949 /// let five = Rc::new(5);
7cac9316 950 /// let _also_five = Rc::clone(&five);
9e0c209e
SL
951 ///
952 /// assert_eq!(2, Rc::strong_count(&five));
953 /// ```
62682a34 954 #[inline]
476ff2be 955 #[stable(feature = "rc_counts", since = "1.15.0")]
b039eaaf 956 pub fn strong_count(this: &Self) -> usize {
1b1a35ee 957 this.inner().strong()
b039eaaf 958 }
62682a34 959
cdc7bbd5
XL
960 /// Increments the strong reference count on the `Rc<T>` associated with the
961 /// provided pointer by one.
962 ///
963 /// # Safety
964 ///
965 /// The pointer must have been obtained through `Rc::into_raw`, and the
966 /// associated `Rc` instance must be valid (i.e. the strong count must be at
967 /// least 1) for the duration of this method.
968 ///
969 /// # Examples
970 ///
971 /// ```
972 /// use std::rc::Rc;
973 ///
974 /// let five = Rc::new(5);
975 ///
976 /// unsafe {
977 /// let ptr = Rc::into_raw(five);
978 /// Rc::increment_strong_count(ptr);
979 ///
980 /// let five = Rc::from_raw(ptr);
981 /// assert_eq!(2, Rc::strong_count(&five));
982 /// }
983 /// ```
984 #[inline]
985 #[stable(feature = "rc_mutate_strong_count", since = "1.53.0")]
986 pub unsafe fn increment_strong_count(ptr: *const T) {
987 // Retain Rc, but don't touch refcount by wrapping in ManuallyDrop
988 let rc = unsafe { mem::ManuallyDrop::new(Rc::<T>::from_raw(ptr)) };
989 // Now increase refcount, but don't drop new refcount either
990 let _rc_clone: mem::ManuallyDrop<_> = rc.clone();
991 }
992
993 /// Decrements the strong reference count on the `Rc<T>` associated with the
994 /// provided pointer by one.
995 ///
996 /// # Safety
997 ///
998 /// The pointer must have been obtained through `Rc::into_raw`, and the
999 /// associated `Rc` instance must be valid (i.e. the strong count must be at
1000 /// least 1) when invoking this method. This method can be used to release
1001 /// the final `Rc` and backing storage, but **should not** be called after
1002 /// the final `Rc` has been released.
1003 ///
1004 /// # Examples
1005 ///
1006 /// ```
1007 /// use std::rc::Rc;
1008 ///
1009 /// let five = Rc::new(5);
1010 ///
1011 /// unsafe {
1012 /// let ptr = Rc::into_raw(five);
1013 /// Rc::increment_strong_count(ptr);
1014 ///
1015 /// let five = Rc::from_raw(ptr);
1016 /// assert_eq!(2, Rc::strong_count(&five));
1017 /// Rc::decrement_strong_count(ptr);
1018 /// assert_eq!(1, Rc::strong_count(&five));
1019 /// }
1020 /// ```
1021 #[inline]
1022 #[stable(feature = "rc_mutate_strong_count", since = "1.53.0")]
1023 pub unsafe fn decrement_strong_count(ptr: *const T) {
1024 unsafe { mem::drop(Rc::from_raw(ptr)) };
1025 }
1026
3dfed10e 1027 /// Returns `true` if there are no other `Rc` or [`Weak`] pointers to
e74abb32 1028 /// this allocation.
62682a34 1029 #[inline]
cc61c64b 1030 fn is_unique(this: &Self) -> bool {
e9174d1e 1031 Rc::weak_count(this) == 0 && Rc::strong_count(this) == 1
62682a34
SL
1032 }
1033
e74abb32 1034 /// Returns a mutable reference into the given `Rc`, if there are
3dfed10e 1035 /// no other `Rc` or [`Weak`] pointers to the same allocation.
62682a34 1036 ///
476ff2be 1037 /// Returns [`None`] otherwise, because it is not safe to
9e0c209e
SL
1038 /// mutate a shared value.
1039 ///
1040 /// See also [`make_mut`][make_mut], which will [`clone`][clone]
94222f64 1041 /// the inner value when there are other `Rc` pointers.
9e0c209e 1042 ///
3dfed10e
XL
1043 /// [make_mut]: Rc::make_mut
1044 /// [clone]: Clone::clone
62682a34
SL
1045 ///
1046 /// # Examples
1047 ///
1048 /// ```
62682a34
SL
1049 /// use std::rc::Rc;
1050 ///
1051 /// let mut x = Rc::new(3);
1052 /// *Rc::get_mut(&mut x).unwrap() = 4;
1053 /// assert_eq!(*x, 4);
1054 ///
7cac9316 1055 /// let _y = Rc::clone(&x);
62682a34
SL
1056 /// assert!(Rc::get_mut(&mut x).is_none());
1057 /// ```
1058 #[inline]
e9174d1e
SL
1059 #[stable(feature = "rc_unique", since = "1.4.0")]
1060 pub fn get_mut(this: &mut Self) -> Option<&mut T> {
dfeec247 1061 if Rc::is_unique(this) { unsafe { Some(Rc::get_mut_unchecked(this)) } } else { None }
1a4d82fc 1062 }
9e0c209e 1063
e74abb32 1064 /// Returns a mutable reference into the given `Rc`,
e1599b0c
XL
1065 /// without any check.
1066 ///
1067 /// See also [`get_mut`], which is safe and does appropriate checks.
1068 ///
3dfed10e 1069 /// [`get_mut`]: Rc::get_mut
e1599b0c
XL
1070 ///
1071 /// # Safety
1072 ///
e74abb32 1073 /// Any other `Rc` or [`Weak`] pointers to the same allocation must not be dereferenced
e1599b0c
XL
1074 /// for the duration of the returned borrow.
1075 /// This is trivially the case if no such pointers exist,
1076 /// for example immediately after `Rc::new`.
1077 ///
1078 /// # Examples
1079 ///
1080 /// ```
1081 /// #![feature(get_mut_unchecked)]
1082 ///
1083 /// use std::rc::Rc;
1084 ///
1085 /// let mut x = Rc::new(String::new());
1086 /// unsafe {
1087 /// Rc::get_mut_unchecked(&mut x).push_str("foo")
1088 /// }
1089 /// assert_eq!(*x, "foo");
1090 /// ```
1091 #[inline]
1092 #[unstable(feature = "get_mut_unchecked", issue = "63292")]
1093 pub unsafe fn get_mut_unchecked(this: &mut Self) -> &mut T {
1b1a35ee
XL
1094 // We are careful to *not* create a reference covering the "count" fields, as
1095 // this would conflict with accesses to the reference counts (e.g. by `Weak`).
1096 unsafe { &mut (*this.ptr.as_ptr()).value }
e1599b0c
XL
1097 }
1098
9e0c209e 1099 #[inline]
8bb4bdeb 1100 #[stable(feature = "ptr_eq", since = "1.17.0")]
e74abb32
XL
1101 /// Returns `true` if the two `Rc`s point to the same allocation
1102 /// (in a vein similar to [`ptr::eq`]).
9e0c209e
SL
1103 ///
1104 /// # Examples
1105 ///
1106 /// ```
9e0c209e
SL
1107 /// use std::rc::Rc;
1108 ///
1109 /// let five = Rc::new(5);
7cac9316 1110 /// let same_five = Rc::clone(&five);
9e0c209e
SL
1111 /// let other_five = Rc::new(5);
1112 ///
1113 /// assert!(Rc::ptr_eq(&five, &same_five));
1114 /// assert!(!Rc::ptr_eq(&five, &other_five));
1115 /// ```
1116 pub fn ptr_eq(this: &Self, other: &Self) -> bool {
7cac9316 1117 this.ptr.as_ptr() == other.ptr.as_ptr()
9e0c209e 1118 }
1a4d82fc
JJ
1119}
1120
1a4d82fc 1121impl<T: Clone> Rc<T> {
9e0c209e
SL
1122 /// Makes a mutable reference into the given `Rc`.
1123 ///
e74abb32
XL
1124 /// If there are other `Rc` pointers to the same allocation, then `make_mut` will
1125 /// [`clone`] the inner value to a new allocation to ensure unique ownership. This is also
dc9dc135 1126 /// referred to as clone-on-write.
9e0c209e 1127 ///
94222f64
XL
1128 /// However, if there are no other `Rc` pointers to this allocation, but some [`Weak`]
1129 /// pointers, then the [`Weak`] pointers will be disassociated and the inner value will not
1130 /// be cloned.
1a4d82fc 1131 ///
94222f64
XL
1132 /// See also [`get_mut`], which will fail rather than cloning the inner value
1133 /// or diassociating [`Weak`] pointers.
dc9dc135 1134 ///
3dfed10e
XL
1135 /// [`clone`]: Clone::clone
1136 /// [`get_mut`]: Rc::get_mut
1a4d82fc
JJ
1137 ///
1138 /// # Examples
1139 ///
1140 /// ```
1141 /// use std::rc::Rc;
1142 ///
e9174d1e
SL
1143 /// let mut data = Rc::new(5);
1144 ///
94222f64
XL
1145 /// *Rc::make_mut(&mut data) += 1; // Won't clone anything
1146 /// let mut other_data = Rc::clone(&data); // Won't clone inner data
1147 /// *Rc::make_mut(&mut data) += 1; // Clones inner data
1148 /// *Rc::make_mut(&mut data) += 1; // Won't clone anything
1149 /// *Rc::make_mut(&mut other_data) *= 2; // Won't clone anything
e9174d1e 1150 ///
e74abb32 1151 /// // Now `data` and `other_data` point to different allocations.
e9174d1e
SL
1152 /// assert_eq!(*data, 8);
1153 /// assert_eq!(*other_data, 12);
1a4d82fc 1154 /// ```
dc9dc135 1155 ///
e74abb32 1156 /// [`Weak`] pointers will be disassociated:
dc9dc135
XL
1157 ///
1158 /// ```
1159 /// use std::rc::Rc;
1160 ///
1161 /// let mut data = Rc::new(75);
1162 /// let weak = Rc::downgrade(&data);
1163 ///
1164 /// assert!(75 == *data);
1165 /// assert!(75 == *weak.upgrade().unwrap());
1166 ///
1167 /// *Rc::make_mut(&mut data) += 1;
1168 ///
1169 /// assert!(76 == *data);
1170 /// assert!(weak.upgrade().is_none());
1171 /// ```
17df50a5 1172 #[cfg(not(no_global_oom_handling))]
1a4d82fc 1173 #[inline]
e9174d1e
SL
1174 #[stable(feature = "rc_unique", since = "1.4.0")]
1175 pub fn make_mut(this: &mut Self) -> &mut T {
1176 if Rc::strong_count(this) != 1 {
5869c6ff
XL
1177 // Gotta clone the data, there are other Rcs.
1178 // Pre-allocate memory to allow writing the cloned value directly.
1179 let mut rc = Self::new_uninit();
1180 unsafe {
1181 let data = Rc::get_mut_unchecked(&mut rc);
1182 (**this).write_clone_into_raw(data.as_mut_ptr());
1183 *this = rc.assume_init();
1184 }
e9174d1e
SL
1185 } else if Rc::weak_count(this) != 0 {
1186 // Can just steal the data, all that's left is Weaks
5869c6ff 1187 let mut rc = Self::new_uninit();
e9174d1e 1188 unsafe {
5869c6ff
XL
1189 let data = Rc::get_mut_unchecked(&mut rc);
1190 data.as_mut_ptr().copy_from_nonoverlapping(&**this, 1);
1191
1192 this.inner().dec_strong();
e9174d1e
SL
1193 // Remove implicit strong-weak ref (no need to craft a fake
1194 // Weak here -- we know other Weaks can clean up for us)
5869c6ff
XL
1195 this.inner().dec_weak();
1196 ptr::write(this, rc.assume_init());
e9174d1e 1197 }
1a4d82fc 1198 }
c34b1796
AL
1199 // This unsafety is ok because we're guaranteed that the pointer
1200 // returned is the *only* pointer that will ever be returned to T. Our
1201 // reference count is guaranteed to be 1 at this point, and we required
1202 // the `Rc<T>` itself to be `mut`, so we're returning the only possible
e74abb32 1203 // reference to the allocation.
dfeec247 1204 unsafe { &mut this.ptr.as_mut().value }
1a4d82fc 1205 }
5099ac24
FG
1206
1207 /// If we have the only reference to `T` then unwrap it. Otherwise, clone `T` and return the
1208 /// clone.
1209 ///
1210 /// Assuming `rc_t` is of type `Rc<T>`, this function is functionally equivalent to
1211 /// `(*rc_t).clone()`, but will avoid cloning the inner value where possible.
1212 ///
1213 /// # Examples
1214 ///
1215 /// ```
1216 /// #![feature(arc_unwrap_or_clone)]
1217 /// # use std::{ptr, rc::Rc};
1218 /// let inner = String::from("test");
1219 /// let ptr = inner.as_ptr();
1220 ///
1221 /// let rc = Rc::new(inner);
1222 /// let inner = Rc::unwrap_or_clone(rc);
1223 /// // The inner value was not cloned
1224 /// assert!(ptr::eq(ptr, inner.as_ptr()));
1225 ///
1226 /// let rc = Rc::new(inner);
1227 /// let rc2 = rc.clone();
1228 /// let inner = Rc::unwrap_or_clone(rc);
1229 /// // Because there were 2 references, we had to clone the inner value.
1230 /// assert!(!ptr::eq(ptr, inner.as_ptr()));
1231 /// // `rc2` is the last reference, so when we unwrap it we get back
1232 /// // the original `String`.
1233 /// let inner = Rc::unwrap_or_clone(rc2);
1234 /// assert!(ptr::eq(ptr, inner.as_ptr()));
1235 /// ```
1236 #[inline]
1237 #[unstable(feature = "arc_unwrap_or_clone", issue = "93610")]
1238 pub fn unwrap_or_clone(this: Self) -> T {
1239 Rc::try_unwrap(this).unwrap_or_else(|rc| (*rc).clone())
1240 }
1a4d82fc
JJ
1241}
1242
8faf50e0 1243impl Rc<dyn Any> {
ea8adc8c 1244 #[inline]
8faf50e0 1245 #[stable(feature = "rc_downcast", since = "1.29.0")]
a1dfa0c6 1246 /// Attempt to downcast the `Rc<dyn Any>` to a concrete type.
ea8adc8c
XL
1247 ///
1248 /// # Examples
1249 ///
1250 /// ```
ea8adc8c
XL
1251 /// use std::any::Any;
1252 /// use std::rc::Rc;
1253 ///
a1dfa0c6 1254 /// fn print_if_string(value: Rc<dyn Any>) {
ea8adc8c
XL
1255 /// if let Ok(string) = value.downcast::<String>() {
1256 /// println!("String ({}): {}", string.len(), string);
1257 /// }
1258 /// }
1259 ///
e74abb32
XL
1260 /// let my_string = "Hello World".to_string();
1261 /// print_if_string(Rc::new(my_string));
1262 /// print_if_string(Rc::new(0i8));
ea8adc8c 1263 /// ```
8faf50e0 1264 pub fn downcast<T: Any>(self) -> Result<Rc<T>, Rc<dyn Any>> {
ea8adc8c 1265 if (*self).is::<T>() {
3c0e092e
XL
1266 unsafe {
1267 let ptr = self.ptr.cast::<RcBox<T>>();
1268 forget(self);
1269 Ok(Rc::from_inner(ptr))
1270 }
ea8adc8c
XL
1271 } else {
1272 Err(self)
1273 }
1274 }
1275}
1276
3b2f2976 1277impl<T: ?Sized> Rc<T> {
416331ca 1278 /// Allocates an `RcBox<T>` with sufficient space for
e74abb32 1279 /// a possibly-unsized inner value where the value has the layout provided.
416331ca
XL
1280 ///
1281 /// The function `mem_to_rcbox` is called with the data pointer
1282 /// and must return back a (potentially fat)-pointer for the `RcBox<T>`.
17df50a5 1283 #[cfg(not(no_global_oom_handling))]
e1599b0c 1284 unsafe fn allocate_for_layout(
416331ca 1285 value_layout: Layout,
1b1a35ee 1286 allocate: impl FnOnce(Layout) -> Result<NonNull<[u8]>, AllocError>,
dfeec247 1287 mem_to_rcbox: impl FnOnce(*mut u8) -> *mut RcBox<T>,
416331ca
XL
1288 ) -> *mut RcBox<T> {
1289 // Calculate layout using the given value layout.
a1dfa0c6
XL
1290 // Previously, layout was calculated on the expression
1291 // `&*(ptr as *const RcBox<T>)`, but this created a misaligned
1292 // reference (see #54908).
dfeec247 1293 let layout = Layout::new::<RcBox<()>>().extend(value_layout).unwrap().0.pad_to_align();
5869c6ff
XL
1294 unsafe {
1295 Rc::try_allocate_for_layout(value_layout, allocate, mem_to_rcbox)
1296 .unwrap_or_else(|_| handle_alloc_error(layout))
1297 }
1298 }
1299
1300 /// Allocates an `RcBox<T>` with sufficient space for
1301 /// a possibly-unsized inner value where the value has the layout provided,
1302 /// returning an error if allocation fails.
1303 ///
1304 /// The function `mem_to_rcbox` is called with the data pointer
1305 /// and must return back a (potentially fat)-pointer for the `RcBox<T>`.
1306 #[inline]
1307 unsafe fn try_allocate_for_layout(
1308 value_layout: Layout,
1309 allocate: impl FnOnce(Layout) -> Result<NonNull<[u8]>, AllocError>,
1310 mem_to_rcbox: impl FnOnce(*mut u8) -> *mut RcBox<T>,
1311 ) -> Result<*mut RcBox<T>, AllocError> {
1312 // Calculate layout using the given value layout.
1313 // Previously, layout was calculated on the expression
1314 // `&*(ptr as *const RcBox<T>)`, but this created a misaligned
1315 // reference (see #54908).
1316 let layout = Layout::new::<RcBox<()>>().extend(value_layout).unwrap().0.pad_to_align();
3b2f2976 1317
416331ca 1318 // Allocate for the layout.
5869c6ff 1319 let ptr = allocate(layout)?;
3b2f2976 1320
a1dfa0c6 1321 // Initialize the RcBox
3dfed10e 1322 let inner = mem_to_rcbox(ptr.as_non_null_ptr().as_ptr());
f035d41b
XL
1323 unsafe {
1324 debug_assert_eq!(Layout::for_value(&*inner), layout);
3b2f2976 1325
f035d41b
XL
1326 ptr::write(&mut (*inner).strong, Cell::new(1));
1327 ptr::write(&mut (*inner).weak, Cell::new(1));
1328 }
3b2f2976 1329
5869c6ff 1330 Ok(inner)
3b2f2976
XL
1331 }
1332
e74abb32 1333 /// Allocates an `RcBox<T>` with sufficient space for an unsized inner value
17df50a5 1334 #[cfg(not(no_global_oom_handling))]
416331ca
XL
1335 unsafe fn allocate_for_ptr(ptr: *const T) -> *mut RcBox<T> {
1336 // Allocate for the `RcBox<T>` using the given value.
f035d41b 1337 unsafe {
3dfed10e
XL
1338 Self::allocate_for_layout(
1339 Layout::for_value(&*ptr),
fc512014 1340 |layout| Global.allocate(layout),
ee023bcb 1341 |mem| mem.with_metadata_of(ptr as *mut RcBox<T>),
3dfed10e 1342 )
f035d41b 1343 }
416331ca
XL
1344 }
1345
17df50a5 1346 #[cfg(not(no_global_oom_handling))]
3b2f2976
XL
1347 fn from_box(v: Box<T>) -> Rc<T> {
1348 unsafe {
29967ef6 1349 let (box_unique, alloc) = Box::into_unique(v);
83c7162d 1350 let bptr = box_unique.as_ptr();
3b2f2976
XL
1351
1352 let value_size = size_of_val(&*bptr);
1353 let ptr = Self::allocate_for_ptr(bptr);
1354
1355 // Copy value as bytes
1356 ptr::copy_nonoverlapping(
1357 bptr as *const T as *const u8,
1358 &mut (*ptr).value as *mut _ as *mut u8,
dfeec247
XL
1359 value_size,
1360 );
3b2f2976
XL
1361
1362 // Free the allocation without dropping its contents
29967ef6 1363 box_free(box_unique, alloc);
3b2f2976 1364
416331ca 1365 Self::from_ptr(ptr)
3b2f2976
XL
1366 }
1367 }
1368}
1369
416331ca
XL
1370impl<T> Rc<[T]> {
1371 /// Allocates an `RcBox<[T]>` with the given length.
17df50a5 1372 #[cfg(not(no_global_oom_handling))]
416331ca 1373 unsafe fn allocate_for_slice(len: usize) -> *mut RcBox<[T]> {
f035d41b 1374 unsafe {
3dfed10e
XL
1375 Self::allocate_for_layout(
1376 Layout::array::<T>(len).unwrap(),
fc512014 1377 |layout| Global.allocate(layout),
3dfed10e
XL
1378 |mem| ptr::slice_from_raw_parts_mut(mem as *mut T, len) as *mut RcBox<[T]>,
1379 )
f035d41b 1380 }
416331ca 1381 }
3b2f2976 1382
f9f354fc 1383 /// Copy elements from slice into newly allocated Rc<\[T\]>
416331ca
XL
1384 ///
1385 /// Unsafe because the caller must either take ownership or bind `T: Copy`
17df50a5 1386 #[cfg(not(no_global_oom_handling))]
3b2f2976 1387 unsafe fn copy_from_slice(v: &[T]) -> Rc<[T]> {
f035d41b
XL
1388 unsafe {
1389 let ptr = Self::allocate_for_slice(v.len());
1390 ptr::copy_nonoverlapping(v.as_ptr(), &mut (*ptr).value as *mut [T] as *mut T, v.len());
1391 Self::from_ptr(ptr)
1392 }
3b2f2976 1393 }
3b2f2976 1394
416331ca
XL
1395 /// Constructs an `Rc<[T]>` from an iterator known to be of a certain size.
1396 ///
1397 /// Behavior is undefined should the size be wrong.
17df50a5 1398 #[cfg(not(no_global_oom_handling))]
416331ca 1399 unsafe fn from_iter_exact(iter: impl iter::Iterator<Item = T>, len: usize) -> Rc<[T]> {
3b2f2976
XL
1400 // Panic guard while cloning T elements.
1401 // In the event of a panic, elements that have been written
1402 // into the new RcBox will be dropped, then the memory freed.
1403 struct Guard<T> {
94b46f34 1404 mem: NonNull<u8>,
3b2f2976
XL
1405 elems: *mut T,
1406 layout: Layout,
1407 n_elems: usize,
1408 }
1409
1410 impl<T> Drop for Guard<T> {
1411 fn drop(&mut self) {
3b2f2976
XL
1412 unsafe {
1413 let slice = from_raw_parts_mut(self.elems, self.n_elems);
1414 ptr::drop_in_place(slice);
1415
fc512014 1416 Global.deallocate(self.mem, self.layout);
3b2f2976
XL
1417 }
1418 }
1419 }
1420
f035d41b
XL
1421 unsafe {
1422 let ptr = Self::allocate_for_slice(len);
3b2f2976 1423
f035d41b
XL
1424 let mem = ptr as *mut _ as *mut u8;
1425 let layout = Layout::for_value(&*ptr);
3b2f2976 1426
f035d41b
XL
1427 // Pointer to first element
1428 let elems = &mut (*ptr).value as *mut [T] as *mut T;
3b2f2976 1429
f035d41b 1430 let mut guard = Guard { mem: NonNull::new_unchecked(mem), elems, layout, n_elems: 0 };
3b2f2976 1431
f035d41b
XL
1432 for (i, item) in iter.enumerate() {
1433 ptr::write(elems.add(i), item);
1434 guard.n_elems += 1;
1435 }
416331ca 1436
f035d41b
XL
1437 // All clear. Forget the guard so it doesn't free the new RcBox.
1438 forget(guard);
416331ca 1439
f035d41b
XL
1440 Self::from_ptr(ptr)
1441 }
416331ca
XL
1442 }
1443}
3b2f2976 1444
416331ca
XL
1445/// Specialization trait used for `From<&[T]>`.
1446trait RcFromSlice<T> {
1447 fn from_slice(slice: &[T]) -> Self;
1448}
3b2f2976 1449
17df50a5 1450#[cfg(not(no_global_oom_handling))]
416331ca
XL
1451impl<T: Clone> RcFromSlice<T> for Rc<[T]> {
1452 #[inline]
1453 default fn from_slice(v: &[T]) -> Self {
dfeec247 1454 unsafe { Self::from_iter_exact(v.iter().cloned(), v.len()) }
3b2f2976
XL
1455 }
1456}
1457
17df50a5 1458#[cfg(not(no_global_oom_handling))]
3b2f2976
XL
1459impl<T: Copy> RcFromSlice<T> for Rc<[T]> {
1460 #[inline]
1461 fn from_slice(v: &[T]) -> Self {
1462 unsafe { Rc::copy_from_slice(v) }
1463 }
1464}
1465
d9579d0f
AL
1466#[stable(feature = "rust1", since = "1.0.0")]
1467impl<T: ?Sized> Deref for Rc<T> {
1468 type Target = T;
1469
1470 #[inline(always)]
1471 fn deref(&self) -> &T {
1472 &self.inner().value
1473 }
1474}
1a4d82fc 1475
dfeec247 1476#[unstable(feature = "receiver_trait", issue = "none")]
0731742a
XL
1477impl<T: ?Sized> Receiver for Rc<T> {}
1478
d9579d0f 1479#[stable(feature = "rust1", since = "1.0.0")]
32a655c1 1480unsafe impl<#[may_dangle] T: ?Sized> Drop for Rc<T> {
9e0c209e 1481 /// Drops the `Rc`.
d9579d0f
AL
1482 ///
1483 /// This will decrement the strong reference count. If the strong reference
c30ab7b3 1484 /// count reaches zero then the only other references (if any) are
b7449926 1485 /// [`Weak`], so we `drop` the inner value.
d9579d0f
AL
1486 ///
1487 /// # Examples
1488 ///
1489 /// ```
d9579d0f
AL
1490 /// use std::rc::Rc;
1491 ///
9e0c209e 1492 /// struct Foo;
d9579d0f 1493 ///
9e0c209e
SL
1494 /// impl Drop for Foo {
1495 /// fn drop(&mut self) {
1496 /// println!("dropped!");
1497 /// }
d9579d0f 1498 /// }
d9579d0f 1499 ///
9e0c209e 1500 /// let foo = Rc::new(Foo);
7cac9316 1501 /// let foo2 = Rc::clone(&foo);
d9579d0f 1502 ///
9e0c209e
SL
1503 /// drop(foo); // Doesn't print anything
1504 /// drop(foo2); // Prints "dropped!"
d9579d0f
AL
1505 /// ```
1506 fn drop(&mut self) {
1507 unsafe {
1b1a35ee
XL
1508 self.inner().dec_strong();
1509 if self.inner().strong() == 0 {
9e0c209e 1510 // destroy the contained object
1b1a35ee 1511 ptr::drop_in_place(Self::get_mut_unchecked(self));
d9579d0f 1512
9e0c209e
SL
1513 // remove the implicit "strong weak" pointer now that we've
1514 // destroyed the contents.
1b1a35ee 1515 self.inner().dec_weak();
d9579d0f 1516
1b1a35ee 1517 if self.inner().weak() == 0 {
fc512014 1518 Global.deallocate(self.ptr.cast(), Layout::for_value(self.ptr.as_ref()));
d9579d0f
AL
1519 }
1520 }
1521 }
1522 }
1523}
1524
d9579d0f
AL
1525#[stable(feature = "rust1", since = "1.0.0")]
1526impl<T: ?Sized> Clone for Rc<T> {
9e0c209e 1527 /// Makes a clone of the `Rc` pointer.
d9579d0f 1528 ///
e74abb32 1529 /// This creates another pointer to the same allocation, increasing the
9e0c209e 1530 /// strong reference count.
d9579d0f
AL
1531 ///
1532 /// # Examples
1533 ///
1534 /// ```
d9579d0f
AL
1535 /// use std::rc::Rc;
1536 ///
1537 /// let five = Rc::new(5);
1538 ///
0bf4aa26 1539 /// let _ = Rc::clone(&five);
d9579d0f
AL
1540 /// ```
1541 #[inline]
1542 fn clone(&self) -> Rc<T> {
3c0e092e
XL
1543 unsafe {
1544 self.inner().inc_strong();
1545 Self::from_inner(self.ptr)
1546 }
d9579d0f
AL
1547 }
1548}
1a4d82fc 1549
136023e0 1550#[cfg(not(no_global_oom_handling))]
85aaf69f 1551#[stable(feature = "rust1", since = "1.0.0")]
1a4d82fc
JJ
1552impl<T: Default> Default for Rc<T> {
1553 /// Creates a new `Rc<T>`, with the `Default` value for `T`.
1554 ///
1555 /// # Examples
1556 ///
1557 /// ```
1558 /// use std::rc::Rc;
1a4d82fc 1559 ///
85aaf69f 1560 /// let x: Rc<i32> = Default::default();
9e0c209e 1561 /// assert_eq!(*x, 0);
1a4d82fc
JJ
1562 /// ```
1563 #[inline]
1a4d82fc
JJ
1564 fn default() -> Rc<T> {
1565 Rc::new(Default::default())
1566 }
1567}
1568
0731742a
XL
1569#[stable(feature = "rust1", since = "1.0.0")]
1570trait RcEqIdent<T: ?Sized + PartialEq> {
1571 fn eq(&self, other: &Rc<T>) -> bool;
1572 fn ne(&self, other: &Rc<T>) -> bool;
1573}
1574
1575#[stable(feature = "rust1", since = "1.0.0")]
1576impl<T: ?Sized + PartialEq> RcEqIdent<T> for Rc<T> {
1577 #[inline]
1578 default fn eq(&self, other: &Rc<T>) -> bool {
1579 **self == **other
1580 }
1581
1582 #[inline]
1583 default fn ne(&self, other: &Rc<T>) -> bool {
1584 **self != **other
1585 }
1586}
1587
f9f354fc
XL
1588// Hack to allow specializing on `Eq` even though `Eq` has a method.
1589#[rustc_unsafe_specialization_marker]
1590pub(crate) trait MarkerEq: PartialEq<Self> {}
1591
1592impl<T: Eq> MarkerEq for T {}
1593
48663c56
XL
1594/// We're doing this specialization here, and not as a more general optimization on `&T`, because it
1595/// would otherwise add a cost to all equality checks on refs. We assume that `Rc`s are used to
1596/// store large values, that are slow to clone, but also heavy to check for equality, causing this
1597/// cost to pay off more easily. It's also more likely to have two `Rc` clones, that point to
1598/// the same value, than two `&T`s.
e74abb32
XL
1599///
1600/// We can only do this when `T: Eq` as a `PartialEq` might be deliberately irreflexive.
0731742a 1601#[stable(feature = "rust1", since = "1.0.0")]
f9f354fc 1602impl<T: ?Sized + MarkerEq> RcEqIdent<T> for Rc<T> {
0731742a
XL
1603 #[inline]
1604 fn eq(&self, other: &Rc<T>) -> bool {
1605 Rc::ptr_eq(self, other) || **self == **other
1606 }
1607
1608 #[inline]
1609 fn ne(&self, other: &Rc<T>) -> bool {
1610 !Rc::ptr_eq(self, other) && **self != **other
1611 }
1612}
1613
85aaf69f 1614#[stable(feature = "rust1", since = "1.0.0")]
62682a34 1615impl<T: ?Sized + PartialEq> PartialEq for Rc<T> {
9e0c209e 1616 /// Equality for two `Rc`s.
1a4d82fc 1617 ///
e74abb32
XL
1618 /// Two `Rc`s are equal if their inner values are equal, even if they are
1619 /// stored in different allocation.
1a4d82fc 1620 ///
e74abb32
XL
1621 /// If `T` also implements `Eq` (implying reflexivity of equality),
1622 /// two `Rc`s that point to the same allocation are
0731742a
XL
1623 /// always equal.
1624 ///
1a4d82fc
JJ
1625 /// # Examples
1626 ///
1627 /// ```
1628 /// use std::rc::Rc;
1629 ///
85aaf69f 1630 /// let five = Rc::new(5);
1a4d82fc 1631 ///
9e0c209e 1632 /// assert!(five == Rc::new(5));
1a4d82fc 1633 /// ```
0731742a 1634 #[inline]
b039eaaf 1635 fn eq(&self, other: &Rc<T>) -> bool {
0731742a 1636 RcEqIdent::eq(self, other)
b039eaaf 1637 }
1a4d82fc 1638
9e0c209e 1639 /// Inequality for two `Rc`s.
1a4d82fc 1640 ///
9e0c209e 1641 /// Two `Rc`s are unequal if their inner values are unequal.
1a4d82fc 1642 ///
e74abb32
XL
1643 /// If `T` also implements `Eq` (implying reflexivity of equality),
1644 /// two `Rc`s that point to the same allocation are
0731742a
XL
1645 /// never unequal.
1646 ///
1a4d82fc
JJ
1647 /// # Examples
1648 ///
1649 /// ```
1650 /// use std::rc::Rc;
1651 ///
85aaf69f 1652 /// let five = Rc::new(5);
1a4d82fc 1653 ///
9e0c209e 1654 /// assert!(five != Rc::new(6));
1a4d82fc 1655 /// ```
0731742a 1656 #[inline]
b039eaaf 1657 fn ne(&self, other: &Rc<T>) -> bool {
0731742a 1658 RcEqIdent::ne(self, other)
b039eaaf 1659 }
1a4d82fc
JJ
1660}
1661
85aaf69f 1662#[stable(feature = "rust1", since = "1.0.0")]
62682a34 1663impl<T: ?Sized + Eq> Eq for Rc<T> {}
1a4d82fc 1664
85aaf69f 1665#[stable(feature = "rust1", since = "1.0.0")]
62682a34 1666impl<T: ?Sized + PartialOrd> PartialOrd for Rc<T> {
9e0c209e 1667 /// Partial comparison for two `Rc`s.
1a4d82fc
JJ
1668 ///
1669 /// The two are compared by calling `partial_cmp()` on their inner values.
1670 ///
1671 /// # Examples
1672 ///
1673 /// ```
1674 /// use std::rc::Rc;
9e0c209e 1675 /// use std::cmp::Ordering;
1a4d82fc 1676 ///
85aaf69f 1677 /// let five = Rc::new(5);
1a4d82fc 1678 ///
9e0c209e 1679 /// assert_eq!(Some(Ordering::Less), five.partial_cmp(&Rc::new(6)));
1a4d82fc
JJ
1680 /// ```
1681 #[inline(always)]
1682 fn partial_cmp(&self, other: &Rc<T>) -> Option<Ordering> {
1683 (**self).partial_cmp(&**other)
1684 }
1685
9e0c209e 1686 /// Less-than comparison for two `Rc`s.
1a4d82fc
JJ
1687 ///
1688 /// The two are compared by calling `<` on their inner values.
1689 ///
1690 /// # Examples
1691 ///
1692 /// ```
1693 /// use std::rc::Rc;
1694 ///
85aaf69f 1695 /// let five = Rc::new(5);
1a4d82fc 1696 ///
9e0c209e 1697 /// assert!(five < Rc::new(6));
1a4d82fc
JJ
1698 /// ```
1699 #[inline(always)]
b039eaaf
SL
1700 fn lt(&self, other: &Rc<T>) -> bool {
1701 **self < **other
1702 }
1a4d82fc 1703
9e0c209e 1704 /// 'Less than or equal to' comparison for two `Rc`s.
1a4d82fc
JJ
1705 ///
1706 /// The two are compared by calling `<=` on their inner values.
1707 ///
1708 /// # Examples
1709 ///
1710 /// ```
1711 /// use std::rc::Rc;
1712 ///
85aaf69f 1713 /// let five = Rc::new(5);
1a4d82fc 1714 ///
9e0c209e 1715 /// assert!(five <= Rc::new(5));
1a4d82fc
JJ
1716 /// ```
1717 #[inline(always)]
b039eaaf
SL
1718 fn le(&self, other: &Rc<T>) -> bool {
1719 **self <= **other
1720 }
1a4d82fc 1721
9e0c209e 1722 /// Greater-than comparison for two `Rc`s.
1a4d82fc
JJ
1723 ///
1724 /// The two are compared by calling `>` on their inner values.
1725 ///
1726 /// # Examples
1727 ///
1728 /// ```
1729 /// use std::rc::Rc;
1730 ///
85aaf69f 1731 /// let five = Rc::new(5);
1a4d82fc 1732 ///
9e0c209e 1733 /// assert!(five > Rc::new(4));
1a4d82fc
JJ
1734 /// ```
1735 #[inline(always)]
b039eaaf
SL
1736 fn gt(&self, other: &Rc<T>) -> bool {
1737 **self > **other
1738 }
1a4d82fc 1739
9e0c209e 1740 /// 'Greater than or equal to' comparison for two `Rc`s.
1a4d82fc
JJ
1741 ///
1742 /// The two are compared by calling `>=` on their inner values.
1743 ///
1744 /// # Examples
1745 ///
1746 /// ```
1747 /// use std::rc::Rc;
1748 ///
85aaf69f 1749 /// let five = Rc::new(5);
1a4d82fc 1750 ///
9e0c209e 1751 /// assert!(five >= Rc::new(5));
1a4d82fc
JJ
1752 /// ```
1753 #[inline(always)]
b039eaaf
SL
1754 fn ge(&self, other: &Rc<T>) -> bool {
1755 **self >= **other
1756 }
1a4d82fc
JJ
1757}
1758
85aaf69f 1759#[stable(feature = "rust1", since = "1.0.0")]
62682a34 1760impl<T: ?Sized + Ord> Ord for Rc<T> {
9e0c209e 1761 /// Comparison for two `Rc`s.
1a4d82fc
JJ
1762 ///
1763 /// The two are compared by calling `cmp()` on their inner values.
1764 ///
1765 /// # Examples
1766 ///
1767 /// ```
1768 /// use std::rc::Rc;
9e0c209e 1769 /// use std::cmp::Ordering;
1a4d82fc 1770 ///
85aaf69f 1771 /// let five = Rc::new(5);
1a4d82fc 1772 ///
9e0c209e 1773 /// assert_eq!(Ordering::Less, five.cmp(&Rc::new(6)));
1a4d82fc
JJ
1774 /// ```
1775 #[inline]
b039eaaf
SL
1776 fn cmp(&self, other: &Rc<T>) -> Ordering {
1777 (**self).cmp(&**other)
1778 }
1a4d82fc
JJ
1779}
1780
d9579d0f 1781#[stable(feature = "rust1", since = "1.0.0")]
92a42be0 1782impl<T: ?Sized + Hash> Hash for Rc<T> {
d9579d0f
AL
1783 fn hash<H: Hasher>(&self, state: &mut H) {
1784 (**self).hash(state);
1785 }
1786}
1a4d82fc 1787
d9579d0f 1788#[stable(feature = "rust1", since = "1.0.0")]
92a42be0 1789impl<T: ?Sized + fmt::Display> fmt::Display for Rc<T> {
9fa01778 1790 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
d9579d0f
AL
1791 fmt::Display::fmt(&**self, f)
1792 }
1793}
1a4d82fc 1794
d9579d0f 1795#[stable(feature = "rust1", since = "1.0.0")]
92a42be0 1796impl<T: ?Sized + fmt::Debug> fmt::Debug for Rc<T> {
9fa01778 1797 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
d9579d0f
AL
1798 fmt::Debug::fmt(&**self, f)
1799 }
1800}
1a4d82fc 1801
9346a6ac 1802#[stable(feature = "rust1", since = "1.0.0")]
7453a54e 1803impl<T: ?Sized> fmt::Pointer for Rc<T> {
9fa01778 1804 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
ff7c6d11 1805 fmt::Pointer::fmt(&(&**self as *const T), f)
9346a6ac
AL
1806 }
1807}
1808
136023e0 1809#[cfg(not(no_global_oom_handling))]
92a42be0
SL
1810#[stable(feature = "from_for_ptrs", since = "1.6.0")]
1811impl<T> From<T> for Rc<T> {
94222f64 1812 /// Converts a generic type `T` into an `Rc<T>`
17df50a5
XL
1813 ///
1814 /// The conversion allocates on the heap and moves `t`
1815 /// from the stack into it.
1816 ///
1817 /// # Example
1818 /// ```rust
1819 /// # use std::rc::Rc;
1820 /// let x = 5;
1821 /// let rc = Rc::new(5);
1822 ///
1823 /// assert_eq!(Rc::from(x), rc);
1824 /// ```
92a42be0
SL
1825 fn from(t: T) -> Self {
1826 Rc::new(t)
1827 }
1828}
1829
17df50a5 1830#[cfg(not(no_global_oom_handling))]
3b2f2976 1831#[stable(feature = "shared_from_slice", since = "1.21.0")]
532ac7d7 1832impl<T: Clone> From<&[T]> for Rc<[T]> {
6a06907d
XL
1833 /// Allocate a reference-counted slice and fill it by cloning `v`'s items.
1834 ///
1835 /// # Example
1836 ///
1837 /// ```
1838 /// # use std::rc::Rc;
1839 /// let original: &[i32] = &[1, 2, 3];
1840 /// let shared: Rc<[i32]> = Rc::from(original);
1841 /// assert_eq!(&[1, 2, 3], &shared[..]);
1842 /// ```
3b2f2976
XL
1843 #[inline]
1844 fn from(v: &[T]) -> Rc<[T]> {
1845 <Self as RcFromSlice<T>>::from_slice(v)
1846 }
1847}
1848
17df50a5 1849#[cfg(not(no_global_oom_handling))]
3b2f2976 1850#[stable(feature = "shared_from_slice", since = "1.21.0")]
532ac7d7 1851impl From<&str> for Rc<str> {
6a06907d
XL
1852 /// Allocate a reference-counted string slice and copy `v` into it.
1853 ///
1854 /// # Example
1855 ///
1856 /// ```
1857 /// # use std::rc::Rc;
1858 /// let shared: Rc<str> = Rc::from("statue");
1859 /// assert_eq!("statue", &shared[..]);
1860 /// ```
3b2f2976
XL
1861 #[inline]
1862 fn from(v: &str) -> Rc<str> {
ff7c6d11
XL
1863 let rc = Rc::<[u8]>::from(v.as_bytes());
1864 unsafe { Rc::from_raw(Rc::into_raw(rc) as *const str) }
3b2f2976
XL
1865 }
1866}
1867
17df50a5 1868#[cfg(not(no_global_oom_handling))]
3b2f2976
XL
1869#[stable(feature = "shared_from_slice", since = "1.21.0")]
1870impl From<String> for Rc<str> {
6a06907d
XL
1871 /// Allocate a reference-counted string slice and copy `v` into it.
1872 ///
1873 /// # Example
1874 ///
1875 /// ```
1876 /// # use std::rc::Rc;
1877 /// let original: String = "statue".to_owned();
1878 /// let shared: Rc<str> = Rc::from(original);
1879 /// assert_eq!("statue", &shared[..]);
1880 /// ```
3b2f2976
XL
1881 #[inline]
1882 fn from(v: String) -> Rc<str> {
1883 Rc::from(&v[..])
1884 }
1885}
1886
17df50a5 1887#[cfg(not(no_global_oom_handling))]
3b2f2976
XL
1888#[stable(feature = "shared_from_slice", since = "1.21.0")]
1889impl<T: ?Sized> From<Box<T>> for Rc<T> {
6a06907d
XL
1890 /// Move a boxed object to a new, reference counted, allocation.
1891 ///
1892 /// # Example
1893 ///
1894 /// ```
1895 /// # use std::rc::Rc;
1896 /// let original: Box<i32> = Box::new(1);
1897 /// let shared: Rc<i32> = Rc::from(original);
1898 /// assert_eq!(1, *shared);
1899 /// ```
3b2f2976
XL
1900 #[inline]
1901 fn from(v: Box<T>) -> Rc<T> {
1902 Rc::from_box(v)
1903 }
1904}
1905
17df50a5 1906#[cfg(not(no_global_oom_handling))]
3b2f2976
XL
1907#[stable(feature = "shared_from_slice", since = "1.21.0")]
1908impl<T> From<Vec<T>> for Rc<[T]> {
6a06907d
XL
1909 /// Allocate a reference-counted slice and move `v`'s items into it.
1910 ///
1911 /// # Example
1912 ///
1913 /// ```
1914 /// # use std::rc::Rc;
1915 /// let original: Box<Vec<i32>> = Box::new(vec![1, 2, 3]);
1916 /// let shared: Rc<Vec<i32>> = Rc::from(original);
1917 /// assert_eq!(vec![1, 2, 3], *shared);
1918 /// ```
3b2f2976
XL
1919 #[inline]
1920 fn from(mut v: Vec<T>) -> Rc<[T]> {
1921 unsafe {
1922 let rc = Rc::copy_from_slice(&v);
1923
1924 // Allow the Vec to free its memory, but not destroy its contents
1925 v.set_len(0);
1926
1927 rc
1928 }
1929 }
1930}
1931
f9f354fc
XL
1932#[stable(feature = "shared_from_cow", since = "1.45.0")]
1933impl<'a, B> From<Cow<'a, B>> for Rc<B>
1934where
1935 B: ToOwned + ?Sized,
1936 Rc<B>: From<&'a B> + From<B::Owned>,
1937{
17df50a5
XL
1938 /// Create a reference-counted pointer from
1939 /// a clone-on-write pointer by copying its content.
1940 ///
1941 /// # Example
1942 ///
1943 /// ```rust
1944 /// # use std::rc::Rc;
1945 /// # use std::borrow::Cow;
1946 /// let cow: Cow<str> = Cow::Borrowed("eggplant");
1947 /// let shared: Rc<str> = Rc::from(cow);
1948 /// assert_eq!("eggplant", &shared[..]);
1949 /// ```
f9f354fc
XL
1950 #[inline]
1951 fn from(cow: Cow<'a, B>) -> Rc<B> {
1952 match cow {
1953 Cow::Borrowed(s) => Rc::from(s),
1954 Cow::Owned(s) => Rc::from(s),
1955 }
1956 }
1957}
1958
74b04a01 1959#[stable(feature = "boxed_slice_try_from", since = "1.43.0")]
3dfed10e 1960impl<T, const N: usize> TryFrom<Rc<[T]>> for Rc<[T; N]> {
416331ca
XL
1961 type Error = Rc<[T]>;
1962
1963 fn try_from(boxed_slice: Rc<[T]>) -> Result<Self, Self::Error> {
1964 if boxed_slice.len() == N {
1965 Ok(unsafe { Rc::from_raw(Rc::into_raw(boxed_slice) as *mut [T; N]) })
1966 } else {
1967 Err(boxed_slice)
1968 }
1969 }
1970}
1971
17df50a5 1972#[cfg(not(no_global_oom_handling))]
416331ca
XL
1973#[stable(feature = "shared_from_iter", since = "1.37.0")]
1974impl<T> iter::FromIterator<T> for Rc<[T]> {
1975 /// Takes each element in the `Iterator` and collects it into an `Rc<[T]>`.
1976 ///
1977 /// # Performance characteristics
1978 ///
1979 /// ## The general case
1980 ///
1981 /// In the general case, collecting into `Rc<[T]>` is done by first
1982 /// collecting into a `Vec<T>`. That is, when writing the following:
1983 ///
1984 /// ```rust
1985 /// # use std::rc::Rc;
1986 /// let evens: Rc<[u8]> = (0..10).filter(|&x| x % 2 == 0).collect();
1987 /// # assert_eq!(&*evens, &[0, 2, 4, 6, 8]);
1988 /// ```
1989 ///
1990 /// this behaves as if we wrote:
1991 ///
1992 /// ```rust
1993 /// # use std::rc::Rc;
1994 /// let evens: Rc<[u8]> = (0..10).filter(|&x| x % 2 == 0)
1995 /// .collect::<Vec<_>>() // The first set of allocations happens here.
1996 /// .into(); // A second allocation for `Rc<[T]>` happens here.
1997 /// # assert_eq!(&*evens, &[0, 2, 4, 6, 8]);
1998 /// ```
1999 ///
2000 /// This will allocate as many times as needed for constructing the `Vec<T>`
2001 /// and then it will allocate once for turning the `Vec<T>` into the `Rc<[T]>`.
2002 ///
2003 /// ## Iterators of known length
2004 ///
2005 /// When your `Iterator` implements `TrustedLen` and is of an exact size,
2006 /// a single allocation will be made for the `Rc<[T]>`. For example:
2007 ///
2008 /// ```rust
2009 /// # use std::rc::Rc;
2010 /// let evens: Rc<[u8]> = (0..10).collect(); // Just a single allocation happens here.
2011 /// # assert_eq!(&*evens, &*(0..10).collect::<Vec<_>>());
2012 /// ```
2013 fn from_iter<I: iter::IntoIterator<Item = T>>(iter: I) -> Self {
f9f354fc 2014 ToRcSlice::to_rc_slice(iter.into_iter())
416331ca
XL
2015 }
2016}
2017
2018/// Specialization trait used for collecting into `Rc<[T]>`.
17df50a5 2019#[cfg(not(no_global_oom_handling))]
f9f354fc
XL
2020trait ToRcSlice<T>: Iterator<Item = T> + Sized {
2021 fn to_rc_slice(self) -> Rc<[T]>;
416331ca
XL
2022}
2023
17df50a5 2024#[cfg(not(no_global_oom_handling))]
f9f354fc
XL
2025impl<T, I: Iterator<Item = T>> ToRcSlice<T> for I {
2026 default fn to_rc_slice(self) -> Rc<[T]> {
2027 self.collect::<Vec<T>>().into()
416331ca
XL
2028 }
2029}
2030
17df50a5 2031#[cfg(not(no_global_oom_handling))]
f9f354fc
XL
2032impl<T, I: iter::TrustedLen<Item = T>> ToRcSlice<T> for I {
2033 fn to_rc_slice(self) -> Rc<[T]> {
416331ca 2034 // This is the case for a `TrustedLen` iterator.
f9f354fc 2035 let (low, high) = self.size_hint();
416331ca
XL
2036 if let Some(high) = high {
2037 debug_assert_eq!(
dfeec247
XL
2038 low,
2039 high,
416331ca
XL
2040 "TrustedLen iterator's size hint is not exact: {:?}",
2041 (low, high)
2042 );
2043
2044 unsafe {
2045 // SAFETY: We need to ensure that the iterator has an exact length and we have.
f9f354fc 2046 Rc::from_iter_exact(self, low)
416331ca
XL
2047 }
2048 } else {
cdc7bbd5
XL
2049 // TrustedLen contract guarantees that `upper_bound == `None` implies an iterator
2050 // length exceeding `usize::MAX`.
2051 // The default implementation would collect into a vec which would panic.
2052 // Thus we panic here immediately without invoking `Vec` code.
2053 panic!("capacity overflow");
416331ca
XL
2054 }
2055 }
2056}
2057
cc61c64b 2058/// `Weak` is a version of [`Rc`] that holds a non-owning reference to the
e74abb32 2059/// managed allocation. The allocation is accessed by calling [`upgrade`] on the `Weak`
c295e0f8 2060/// pointer, which returns an <code>[Option]<[Rc]\<T>></code>.
9e0c209e 2061///
cc61c64b 2062/// Since a `Weak` reference does not count towards ownership, it will not
e74abb32
XL
2063/// prevent the value stored in the allocation from being dropped, and `Weak` itself makes no
2064/// guarantees about the value still being present. Thus it may return [`None`]
2065/// when [`upgrade`]d. Note however that a `Weak` reference *does* prevent the allocation
2066/// itself (the backing store) from being deallocated.
9e0c209e 2067///
e74abb32
XL
2068/// A `Weak` pointer is useful for keeping a temporary reference to the allocation
2069/// managed by [`Rc`] without preventing its inner value from being dropped. It is also used to
2070/// prevent circular references between [`Rc`] pointers, since mutual owning references
3b2f2976 2071/// would never allow either [`Rc`] to be dropped. For example, a tree could
cc61c64b
XL
2072/// have strong [`Rc`] pointers from parent nodes to children, and `Weak`
2073/// pointers from children back to their parents.
1a4d82fc 2074///
cc61c64b 2075/// The typical way to obtain a `Weak` pointer is to call [`Rc::downgrade`].
1a4d82fc 2076///
3dfed10e 2077/// [`upgrade`]: Weak::upgrade
e9174d1e 2078#[stable(feature = "rc_weak", since = "1.4.0")]
d9579d0f 2079pub struct Weak<T: ?Sized> {
8faf50e0
XL
2080 // This is a `NonNull` to allow optimizing the size of this type in enums,
2081 // but it is not necessarily a valid pointer.
2082 // `Weak::new` sets this to `usize::MAX` so that it doesn’t need
2083 // to allocate space on the heap. That's not a value a real pointer
2084 // will ever have because RcBox has alignment at least 2.
f035d41b 2085 // This is only possible when `T: Sized`; unsized `T` never dangle.
2c00a5a8 2086 ptr: NonNull<RcBox<T>>,
d9579d0f 2087}
1a4d82fc 2088
7453a54e 2089#[stable(feature = "rc_weak", since = "1.4.0")]
d9579d0f 2090impl<T: ?Sized> !marker::Send for Weak<T> {}
7453a54e 2091#[stable(feature = "rc_weak", since = "1.4.0")]
d9579d0f 2092impl<T: ?Sized> !marker::Sync for Weak<T> {}
85aaf69f 2093
92a42be0
SL
2094#[unstable(feature = "coerce_unsized", issue = "27732")]
2095impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<Weak<U>> for Weak<T> {}
c1a9b12d 2096
dfeec247 2097#[unstable(feature = "dispatch_from_dyn", issue = "none")]
a1dfa0c6
XL
2098impl<T: ?Sized + Unsize<U>, U: ?Sized> DispatchFromDyn<Weak<U>> for Weak<T> {}
2099
a7813a04 2100impl<T> Weak<T> {
8faf50e0 2101 /// Constructs a new `Weak<T>`, without allocating any memory.
0731742a 2102 /// Calling [`upgrade`] on the return value always gives [`None`].
a7813a04 2103 ///
3dfed10e 2104 /// [`upgrade`]: Weak::upgrade
a7813a04
XL
2105 ///
2106 /// # Examples
2107 ///
2108 /// ```
2109 /// use std::rc::Weak;
2110 ///
2111 /// let empty: Weak<i64> = Weak::new();
9e0c209e 2112 /// assert!(empty.upgrade().is_none());
a7813a04
XL
2113 /// ```
2114 #[stable(feature = "downgraded_weak", since = "1.10.0")]
ee023bcb 2115 #[rustc_const_unstable(feature = "const_weak_new", issue = "95091", reason = "recently added")]
c295e0f8 2116 #[must_use]
ee023bcb
FG
2117 pub const fn new() -> Weak<T> {
2118 Weak { ptr: unsafe { NonNull::new_unchecked(ptr::invalid_mut::<RcBox<T>>(usize::MAX)) } }
a7813a04 2119 }
29967ef6
XL
2120}
2121
5869c6ff 2122pub(crate) fn is_dangling<T: ?Sized>(ptr: *mut T) -> bool {
ee023bcb 2123 (ptr as *mut ()).addr() == usize::MAX
29967ef6
XL
2124}
2125
2126/// Helper type to allow accessing the reference counts without
2127/// making any assertions about the data field.
2128struct WeakInner<'a> {
2129 weak: &'a Cell<usize>,
2130 strong: &'a Cell<usize>,
2131}
dc9dc135 2132
5869c6ff 2133impl<T: ?Sized> Weak<T> {
dc9dc135
XL
2134 /// Returns a raw pointer to the object `T` pointed to by this `Weak<T>`.
2135 ///
ba9703b0
XL
2136 /// The pointer is valid only if there are some strong references. The pointer may be dangling,
2137 /// unaligned or even [`null`] otherwise.
dc9dc135
XL
2138 ///
2139 /// # Examples
2140 ///
2141 /// ```
416331ca 2142 /// use std::rc::Rc;
dc9dc135
XL
2143 /// use std::ptr;
2144 ///
2145 /// let strong = Rc::new("hello".to_owned());
2146 /// let weak = Rc::downgrade(&strong);
2147 /// // Both point to the same object
ba9703b0 2148 /// assert!(ptr::eq(&*strong, weak.as_ptr()));
dc9dc135 2149 /// // The strong here keeps it alive, so we can still access the object.
ba9703b0 2150 /// assert_eq!("hello", unsafe { &*weak.as_ptr() });
dc9dc135
XL
2151 ///
2152 /// drop(strong);
ba9703b0 2153 /// // But not any more. We can do weak.as_ptr(), but accessing the pointer would lead to
dc9dc135 2154 /// // undefined behaviour.
ba9703b0 2155 /// // assert_eq!("hello", unsafe { &*weak.as_ptr() });
dc9dc135
XL
2156 /// ```
2157 ///
c295e0f8
XL
2158 /// [`null`]: ptr::null
2159 #[must_use]
f035d41b 2160 #[stable(feature = "rc_as_ptr", since = "1.45.0")]
ba9703b0 2161 pub fn as_ptr(&self) -> *const T {
f035d41b
XL
2162 let ptr: *mut RcBox<T> = NonNull::as_ptr(self.ptr);
2163
5869c6ff
XL
2164 if is_dangling(ptr) {
2165 // If the pointer is dangling, we return the sentinel directly. This cannot be
2166 // a valid payload address, as the payload is at least as aligned as RcBox (usize).
2167 ptr as *const T
2168 } else {
a2a8927a 2169 // SAFETY: if is_dangling returns false, then the pointer is dereferenceable.
5869c6ff
XL
2170 // The payload may be dropped at this point, and we have to maintain provenance,
2171 // so use raw pointer manipulation.
2172 unsafe { ptr::addr_of_mut!((*ptr).value) }
f035d41b 2173 }
dc9dc135
XL
2174 }
2175
2176 /// Consumes the `Weak<T>` and turns it into a raw pointer.
2177 ///
3dfed10e
XL
2178 /// This converts the weak pointer into a raw pointer, while still preserving the ownership of
2179 /// one weak reference (the weak count is not modified by this operation). It can be turned
2180 /// back into the `Weak<T>` with [`from_raw`].
dc9dc135
XL
2181 ///
2182 /// The same restrictions of accessing the target of the pointer as with
ba9703b0 2183 /// [`as_ptr`] apply.
dc9dc135
XL
2184 ///
2185 /// # Examples
2186 ///
2187 /// ```
dc9dc135
XL
2188 /// use std::rc::{Rc, Weak};
2189 ///
2190 /// let strong = Rc::new("hello".to_owned());
2191 /// let weak = Rc::downgrade(&strong);
416331ca 2192 /// let raw = weak.into_raw();
dc9dc135
XL
2193 ///
2194 /// assert_eq!(1, Rc::weak_count(&strong));
2195 /// assert_eq!("hello", unsafe { &*raw });
2196 ///
2197 /// drop(unsafe { Weak::from_raw(raw) });
2198 /// assert_eq!(0, Rc::weak_count(&strong));
2199 /// ```
2200 ///
3dfed10e
XL
2201 /// [`from_raw`]: Weak::from_raw
2202 /// [`as_ptr`]: Weak::as_ptr
c295e0f8 2203 #[must_use = "`self` will be dropped if the result is not used"]
f9f354fc 2204 #[stable(feature = "weak_into_raw", since = "1.45.0")]
416331ca 2205 pub fn into_raw(self) -> *const T {
ba9703b0 2206 let result = self.as_ptr();
416331ca 2207 mem::forget(self);
dc9dc135
XL
2208 result
2209 }
2210
2211 /// Converts a raw pointer previously created by [`into_raw`] back into `Weak<T>`.
2212 ///
2213 /// This can be used to safely get a strong reference (by calling [`upgrade`]
2214 /// later) or to deallocate the weak count by dropping the `Weak<T>`.
2215 ///
3dfed10e
XL
2216 /// It takes ownership of one weak reference (with the exception of pointers created by [`new`],
2217 /// as these don't own anything; the method still works on them).
dc9dc135
XL
2218 ///
2219 /// # Safety
2220 ///
3dfed10e
XL
2221 /// The pointer must have originated from the [`into_raw`] and must still own its potential
2222 /// weak reference.
60c5eb7d 2223 ///
3dfed10e
XL
2224 /// It is allowed for the strong count to be 0 at the time of calling this. Nevertheless, this
2225 /// takes ownership of one weak reference currently represented as a raw pointer (the weak
2226 /// count is not modified by this operation) and therefore it must be paired with a previous
2227 /// call to [`into_raw`].
dc9dc135
XL
2228 ///
2229 /// # Examples
2230 ///
2231 /// ```
dc9dc135
XL
2232 /// use std::rc::{Rc, Weak};
2233 ///
2234 /// let strong = Rc::new("hello".to_owned());
2235 ///
416331ca
XL
2236 /// let raw_1 = Rc::downgrade(&strong).into_raw();
2237 /// let raw_2 = Rc::downgrade(&strong).into_raw();
dc9dc135
XL
2238 ///
2239 /// assert_eq!(2, Rc::weak_count(&strong));
2240 ///
416331ca 2241 /// assert_eq!("hello", &*unsafe { Weak::from_raw(raw_1) }.upgrade().unwrap());
dc9dc135
XL
2242 /// assert_eq!(1, Rc::weak_count(&strong));
2243 ///
2244 /// drop(strong);
2245 ///
2246 /// // Decrement the last weak count.
416331ca 2247 /// assert!(unsafe { Weak::from_raw(raw_2) }.upgrade().is_none());
dc9dc135
XL
2248 /// ```
2249 ///
3dfed10e
XL
2250 /// [`into_raw`]: Weak::into_raw
2251 /// [`upgrade`]: Weak::upgrade
2252 /// [`new`]: Weak::new
f9f354fc 2253 #[stable(feature = "weak_into_raw", since = "1.45.0")]
dc9dc135 2254 pub unsafe fn from_raw(ptr: *const T) -> Self {
29967ef6 2255 // See Weak::as_ptr for context on how the input pointer is derived.
a7813a04 2256
5869c6ff
XL
2257 let ptr = if is_dangling(ptr as *mut T) {
2258 // This is a dangling Weak.
2259 ptr as *mut RcBox<T>
2260 } else {
2261 // Otherwise, we're guaranteed the pointer came from a nondangling Weak.
2262 // SAFETY: data_offset is safe to call, as ptr references a real (potentially dropped) T.
2263 let offset = unsafe { data_offset(ptr) };
2264 // Thus, we reverse the offset to get the whole RcBox.
2265 // SAFETY: the pointer originated from a Weak, so this offset is safe.
ee023bcb 2266 unsafe { (ptr as *mut u8).offset(-offset).with_metadata_of(ptr as *mut RcBox<T>) }
29967ef6 2267 };
8faf50e0 2268
29967ef6
XL
2269 // SAFETY: we now have recovered the original Weak pointer, so can create the Weak.
2270 Weak { ptr: unsafe { NonNull::new_unchecked(ptr) } }
2271 }
1b1a35ee 2272
e74abb32
XL
2273 /// Attempts to upgrade the `Weak` pointer to an [`Rc`], delaying
2274 /// dropping of the inner value if successful.
d9579d0f 2275 ///
e74abb32 2276 /// Returns [`None`] if the inner value has since been dropped.
d9579d0f 2277 ///
d9579d0f
AL
2278 /// # Examples
2279 ///
2280 /// ```
d9579d0f
AL
2281 /// use std::rc::Rc;
2282 ///
2283 /// let five = Rc::new(5);
2284 ///
e9174d1e 2285 /// let weak_five = Rc::downgrade(&five);
d9579d0f
AL
2286 ///
2287 /// let strong_five: Option<Rc<_>> = weak_five.upgrade();
9e0c209e
SL
2288 /// assert!(strong_five.is_some());
2289 ///
2290 /// // Destroy all strong pointers.
2291 /// drop(strong_five);
2292 /// drop(five);
2293 ///
2294 /// assert!(weak_five.upgrade().is_none());
d9579d0f 2295 /// ```
c295e0f8
XL
2296 #[must_use = "this returns a new `Rc`, \
2297 without modifying the original weak pointer"]
e9174d1e 2298 #[stable(feature = "rc_weak", since = "1.4.0")]
d9579d0f 2299 pub fn upgrade(&self) -> Option<Rc<T>> {
8faf50e0 2300 let inner = self.inner()?;
3c0e092e 2301
8faf50e0 2302 if inner.strong() == 0 {
d9579d0f
AL
2303 None
2304 } else {
3c0e092e
XL
2305 unsafe {
2306 inner.inc_strong();
2307 Some(Rc::from_inner(self.ptr))
2308 }
d9579d0f
AL
2309 }
2310 }
8faf50e0 2311
e74abb32 2312 /// Gets the number of strong (`Rc`) pointers pointing to this allocation.
9fa01778
XL
2313 ///
2314 /// If `self` was created using [`Weak::new`], this will return 0.
3c0e092e 2315 #[must_use]
60c5eb7d 2316 #[stable(feature = "weak_counts", since = "1.41.0")]
9fa01778 2317 pub fn strong_count(&self) -> usize {
dfeec247 2318 if let Some(inner) = self.inner() { inner.strong() } else { 0 }
9fa01778
XL
2319 }
2320
e74abb32 2321 /// Gets the number of `Weak` pointers pointing to this allocation.
9fa01778 2322 ///
60c5eb7d 2323 /// If no strong pointers remain, this will return zero.
3c0e092e 2324 #[must_use]
60c5eb7d
XL
2325 #[stable(feature = "weak_counts", since = "1.41.0")]
2326 pub fn weak_count(&self) -> usize {
dfeec247
XL
2327 self.inner()
2328 .map(|inner| {
2329 if inner.strong() > 0 {
2330 inner.weak() - 1 // subtract the implicit weak ptr
2331 } else {
2332 0
2333 }
2334 })
2335 .unwrap_or(0)
9fa01778
XL
2336 }
2337
1b1a35ee 2338 /// Returns `None` when the pointer is dangling and there is no allocated `RcBox`,
9fa01778 2339 /// (i.e., when this `Weak` was created by `Weak::new`).
8faf50e0 2340 #[inline]
1b1a35ee 2341 fn inner(&self) -> Option<WeakInner<'_>> {
5869c6ff 2342 if is_dangling(self.ptr.as_ptr()) {
1b1a35ee
XL
2343 None
2344 } else {
2345 // We are careful to *not* create a reference covering the "data" field, as
2346 // the field may be mutated concurrently (for example, if the last `Rc`
2347 // is dropped, the data field will be dropped in-place).
2348 Some(unsafe {
2349 let ptr = self.ptr.as_ptr();
2350 WeakInner { strong: &(*ptr).strong, weak: &(*ptr).weak }
2351 })
2352 }
8faf50e0 2353 }
0731742a 2354
e74abb32
XL
2355 /// Returns `true` if the two `Weak`s point to the same allocation (similar to
2356 /// [`ptr::eq`]), or if both don't point to any allocation
e1599b0c 2357 /// (because they were created with `Weak::new()`).
0731742a
XL
2358 ///
2359 /// # Notes
2360 ///
2361 /// Since this compares pointers it means that `Weak::new()` will equal each
e74abb32 2362 /// other, even though they don't point to any allocation.
0731742a
XL
2363 ///
2364 /// # Examples
2365 ///
2366 /// ```
dc9dc135 2367 /// use std::rc::Rc;
0731742a
XL
2368 ///
2369 /// let first_rc = Rc::new(5);
2370 /// let first = Rc::downgrade(&first_rc);
2371 /// let second = Rc::downgrade(&first_rc);
2372 ///
dc9dc135 2373 /// assert!(first.ptr_eq(&second));
0731742a
XL
2374 ///
2375 /// let third_rc = Rc::new(5);
2376 /// let third = Rc::downgrade(&third_rc);
2377 ///
dc9dc135 2378 /// assert!(!first.ptr_eq(&third));
0731742a
XL
2379 /// ```
2380 ///
2381 /// Comparing `Weak::new`.
2382 ///
2383 /// ```
0731742a
XL
2384 /// use std::rc::{Rc, Weak};
2385 ///
2386 /// let first = Weak::new();
2387 /// let second = Weak::new();
dc9dc135 2388 /// assert!(first.ptr_eq(&second));
0731742a
XL
2389 ///
2390 /// let third_rc = Rc::new(());
2391 /// let third = Rc::downgrade(&third_rc);
dc9dc135 2392 /// assert!(!first.ptr_eq(&third));
0731742a
XL
2393 /// ```
2394 #[inline]
3c0e092e 2395 #[must_use]
e1599b0c 2396 #[stable(feature = "weak_ptr_eq", since = "1.39.0")]
dc9dc135
XL
2397 pub fn ptr_eq(&self, other: &Self) -> bool {
2398 self.ptr.as_ptr() == other.ptr.as_ptr()
0731742a 2399 }
d9579d0f
AL
2400}
2401
7453a54e 2402#[stable(feature = "rc_weak", since = "1.4.0")]
17df50a5 2403unsafe impl<#[may_dangle] T: ?Sized> Drop for Weak<T> {
9e0c209e 2404 /// Drops the `Weak` pointer.
d9579d0f 2405 ///
d9579d0f
AL
2406 /// # Examples
2407 ///
2408 /// ```
7cac9316 2409 /// use std::rc::{Rc, Weak};
d9579d0f 2410 ///
9e0c209e 2411 /// struct Foo;
d9579d0f 2412 ///
9e0c209e
SL
2413 /// impl Drop for Foo {
2414 /// fn drop(&mut self) {
2415 /// println!("dropped!");
2416 /// }
d9579d0f 2417 /// }
d9579d0f 2418 ///
9e0c209e
SL
2419 /// let foo = Rc::new(Foo);
2420 /// let weak_foo = Rc::downgrade(&foo);
7cac9316 2421 /// let other_weak_foo = Weak::clone(&weak_foo);
9e0c209e
SL
2422 ///
2423 /// drop(weak_foo); // Doesn't print anything
2424 /// drop(foo); // Prints "dropped!"
d9579d0f 2425 ///
9e0c209e 2426 /// assert!(other_weak_foo.upgrade().is_none());
d9579d0f
AL
2427 /// ```
2428 fn drop(&mut self) {
1b1a35ee
XL
2429 let inner = if let Some(inner) = self.inner() { inner } else { return };
2430
2431 inner.dec_weak();
2432 // the weak count starts at 1, and will only go to zero if all
2433 // the strong pointers have disappeared.
2434 if inner.weak() == 0 {
2435 unsafe {
5869c6ff 2436 Global.deallocate(self.ptr.cast(), Layout::for_value_raw(self.ptr.as_ptr()));
d9579d0f
AL
2437 }
2438 }
2439 }
2440}
2441
e9174d1e 2442#[stable(feature = "rc_weak", since = "1.4.0")]
d9579d0f 2443impl<T: ?Sized> Clone for Weak<T> {
e74abb32 2444 /// Makes a clone of the `Weak` pointer that points to the same allocation.
d9579d0f
AL
2445 ///
2446 /// # Examples
2447 ///
2448 /// ```
7cac9316 2449 /// use std::rc::{Rc, Weak};
d9579d0f 2450 ///
e9174d1e 2451 /// let weak_five = Rc::downgrade(&Rc::new(5));
d9579d0f 2452 ///
0bf4aa26 2453 /// let _ = Weak::clone(&weak_five);
d9579d0f
AL
2454 /// ```
2455 #[inline]
2456 fn clone(&self) -> Weak<T> {
8faf50e0
XL
2457 if let Some(inner) = self.inner() {
2458 inner.inc_weak()
2459 }
54a0048b 2460 Weak { ptr: self.ptr }
d9579d0f
AL
2461 }
2462}
1a4d82fc 2463
7453a54e 2464#[stable(feature = "rc_weak", since = "1.4.0")]
92a42be0 2465impl<T: ?Sized + fmt::Debug> fmt::Debug for Weak<T> {
9fa01778 2466 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
d9579d0f
AL
2467 write!(f, "(Weak)")
2468 }
2469}
1a4d82fc 2470
a7813a04
XL
2471#[stable(feature = "downgraded_weak", since = "1.10.0")]
2472impl<T> Default for Weak<T> {
cdc7bbd5
XL
2473 /// Constructs a new `Weak<T>`, without allocating any memory.
2474 /// Calling [`upgrade`] on the return value always gives [`None`].
9e0c209e 2475 ///
3dfed10e 2476 /// [`upgrade`]: Weak::upgrade
9e0c209e
SL
2477 ///
2478 /// # Examples
2479 ///
2480 /// ```
2481 /// use std::rc::Weak;
2482 ///
2483 /// let empty: Weak<i64> = Default::default();
2484 /// assert!(empty.upgrade().is_none());
2485 /// ```
a7813a04
XL
2486 fn default() -> Weak<T> {
2487 Weak::new()
9cc50fc6
SL
2488 }
2489}
2490
8faf50e0 2491// NOTE: We checked_add here to deal with mem::forget safely. In particular
c1a9b12d
SL
2492// if you mem::forget Rcs (or Weaks), the ref-count can overflow, and then
2493// you can free the allocation while outstanding Rcs (or Weaks) exist.
2494// We abort because this is such a degenerate scenario that we don't care about
2495// what happens -- no real program should ever experience this.
2496//
2497// This should have negligible overhead since you don't actually need to
2498// clone these much in Rust thanks to ownership and move-semantics.
2499
d9579d0f 2500#[doc(hidden)]
1b1a35ee
XL
2501trait RcInnerPtr {
2502 fn weak_ref(&self) -> &Cell<usize>;
2503 fn strong_ref(&self) -> &Cell<usize>;
d9579d0f
AL
2504
2505 #[inline]
b039eaaf 2506 fn strong(&self) -> usize {
1b1a35ee 2507 self.strong_ref().get()
b039eaaf 2508 }
d9579d0f
AL
2509
2510 #[inline]
c1a9b12d 2511 fn inc_strong(&self) {
416331ca
XL
2512 let strong = self.strong();
2513
b7449926
XL
2514 // We want to abort on overflow instead of dropping the value.
2515 // The reference count will never be zero when this is called;
2516 // nevertheless, we insert an abort here to hint LLVM at
2517 // an otherwise missed optimization.
f035d41b
XL
2518 if strong == 0 || strong == usize::MAX {
2519 abort();
b7449926 2520 }
1b1a35ee 2521 self.strong_ref().set(strong + 1);
c1a9b12d 2522 }
d9579d0f
AL
2523
2524 #[inline]
b039eaaf 2525 fn dec_strong(&self) {
1b1a35ee 2526 self.strong_ref().set(self.strong() - 1);
b039eaaf 2527 }
d9579d0f
AL
2528
2529 #[inline]
b039eaaf 2530 fn weak(&self) -> usize {
1b1a35ee 2531 self.weak_ref().get()
b039eaaf 2532 }
d9579d0f
AL
2533
2534 #[inline]
c1a9b12d 2535 fn inc_weak(&self) {
416331ca
XL
2536 let weak = self.weak();
2537
b7449926
XL
2538 // We want to abort on overflow instead of dropping the value.
2539 // The reference count will never be zero when this is called;
2540 // nevertheless, we insert an abort here to hint LLVM at
2541 // an otherwise missed optimization.
f035d41b
XL
2542 if weak == 0 || weak == usize::MAX {
2543 abort();
b7449926 2544 }
1b1a35ee 2545 self.weak_ref().set(weak + 1);
c1a9b12d 2546 }
d9579d0f
AL
2547
2548 #[inline]
b039eaaf 2549 fn dec_weak(&self) {
1b1a35ee 2550 self.weak_ref().set(self.weak() - 1);
b039eaaf 2551 }
d9579d0f 2552}
1a4d82fc 2553
1b1a35ee 2554impl<T: ?Sized> RcInnerPtr for RcBox<T> {
d9579d0f 2555 #[inline(always)]
1b1a35ee
XL
2556 fn weak_ref(&self) -> &Cell<usize> {
2557 &self.weak
2558 }
2559
2560 #[inline(always)]
2561 fn strong_ref(&self) -> &Cell<usize> {
2562 &self.strong
85aaf69f 2563 }
1a4d82fc
JJ
2564}
2565
1b1a35ee 2566impl<'a> RcInnerPtr for WeakInner<'a> {
d9579d0f 2567 #[inline(always)]
1b1a35ee
XL
2568 fn weak_ref(&self) -> &Cell<usize> {
2569 self.weak
2570 }
2571
2572 #[inline(always)]
2573 fn strong_ref(&self) -> &Cell<usize> {
2574 self.strong
85aaf69f 2575 }
1a4d82fc
JJ
2576}
2577
92a42be0 2578#[stable(feature = "rust1", since = "1.0.0")]
e9174d1e 2579impl<T: ?Sized> borrow::Borrow<T> for Rc<T> {
b039eaaf
SL
2580 fn borrow(&self) -> &T {
2581 &**self
2582 }
2583}
2584
2585#[stable(since = "1.5.0", feature = "smart_ptr_as_ref")]
2586impl<T: ?Sized> AsRef<T> for Rc<T> {
2587 fn as_ref(&self) -> &T {
2588 &**self
2589 }
e9174d1e 2590}
b7449926 2591
0731742a 2592#[stable(feature = "pin", since = "1.33.0")]
dfeec247 2593impl<T: ?Sized> Unpin for Rc<T> {}
dc9dc135 2594
5869c6ff 2595/// Get the offset within an `RcBox` for the payload behind a pointer.
f035d41b
XL
2596///
2597/// # Safety
2598///
5869c6ff
XL
2599/// The pointer must point to (and have valid metadata for) a previously
2600/// valid instance of T, but the T is allowed to be dropped.
dc9dc135 2601unsafe fn data_offset<T: ?Sized>(ptr: *const T) -> isize {
5869c6ff
XL
2602 // Align the unsized value to the end of the RcBox.
2603 // Because RcBox is repr(C), it will always be the last field in memory.
2604 // SAFETY: since the only unsized types possible are slices, trait objects,
2605 // and extern types, the input safety requirement is currently enough to
2606 // satisfy the requirements of align_of_val_raw; this is an implementation
94222f64 2607 // detail of the language that must not be relied upon outside of std.
f035d41b 2608 unsafe { data_offset_align(align_of_val_raw(ptr)) }
416331ca
XL
2609}
2610
2611#[inline]
2612fn data_offset_align(align: usize) -> isize {
dc9dc135
XL
2613 let layout = Layout::new::<RcBox<()>>();
2614 (layout.size() + layout.padding_needed_for(align)) as isize
2615}