]> git.proxmox.com Git - rustc.git/blame - library/alloc/src/collections/binary_heap/mod.rs
New upstream version 1.75.0+dfsg1
[rustc.git] / library / alloc / src / collections / binary_heap / mod.rs
CommitLineData
1a4d82fc
JJ
1//! A priority queue implemented with a binary heap.
2//!
3dfed10e
XL
3//! Insertion and popping the largest element have *O*(log(*n*)) time complexity.
4//! Checking the largest element is *O*(1). Converting a vector to a binary heap
5//! can be done in-place, and has *O*(*n*) complexity. A binary heap can also be
c295e0f8 6//! converted to a sorted vector in-place, allowing it to be used for an *O*(*n* * log(*n*))
ba9703b0 7//! in-place heapsort.
1a4d82fc
JJ
8//!
9//! # Examples
10//!
11//! This is a larger example that implements [Dijkstra's algorithm][dijkstra]
12//! to solve the [shortest path problem][sssp] on a [directed graph][dir_graph].
cc61c64b 13//! It shows how to use [`BinaryHeap`] with custom types.
1a4d82fc 14//!
3dfed10e
XL
15//! [dijkstra]: https://en.wikipedia.org/wiki/Dijkstra%27s_algorithm
16//! [sssp]: https://en.wikipedia.org/wiki/Shortest_path_problem
17//! [dir_graph]: https://en.wikipedia.org/wiki/Directed_graph
1a4d82fc
JJ
18//!
19//! ```
20//! use std::cmp::Ordering;
21//! use std::collections::BinaryHeap;
1a4d82fc 22//!
c34b1796 23//! #[derive(Copy, Clone, Eq, PartialEq)]
1a4d82fc 24//! struct State {
85aaf69f
SL
25//! cost: usize,
26//! position: usize,
1a4d82fc
JJ
27//! }
28//!
29//! // The priority queue depends on `Ord`.
30//! // Explicitly implement the trait so the queue becomes a min-heap
31//! // instead of a max-heap.
32//! impl Ord for State {
1b1a35ee 33//! fn cmp(&self, other: &Self) -> Ordering {
7cac9316
XL
34//! // Notice that the we flip the ordering on costs.
35//! // In case of a tie we compare positions - this step is necessary
36//! // to make implementations of `PartialEq` and `Ord` consistent.
1a4d82fc 37//! other.cost.cmp(&self.cost)
7cac9316 38//! .then_with(|| self.position.cmp(&other.position))
1a4d82fc
JJ
39//! }
40//! }
41//!
42//! // `PartialOrd` needs to be implemented as well.
43//! impl PartialOrd for State {
1b1a35ee 44//! fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
1a4d82fc
JJ
45//! Some(self.cmp(other))
46//! }
47//! }
48//!
94222f64 49//! // Each node is represented as a `usize`, for a shorter implementation.
1a4d82fc 50//! struct Edge {
85aaf69f
SL
51//! node: usize,
52//! cost: usize,
1a4d82fc
JJ
53//! }
54//!
55//! // Dijkstra's shortest path algorithm.
56//!
57//! // Start at `start` and use `dist` to track the current shortest distance
58//! // to each node. This implementation isn't memory-efficient as it may leave duplicate
85aaf69f 59//! // nodes in the queue. It also uses `usize::MAX` as a sentinel value,
1a4d82fc 60//! // for a simpler implementation.
9cc50fc6 61//! fn shortest_path(adj_list: &Vec<Vec<Edge>>, start: usize, goal: usize) -> Option<usize> {
1a4d82fc 62//! // dist[node] = current shortest distance from `start` to `node`
85aaf69f 63//! let mut dist: Vec<_> = (0..adj_list.len()).map(|_| usize::MAX).collect();
1a4d82fc
JJ
64//!
65//! let mut heap = BinaryHeap::new();
66//!
67//! // We're at `start`, with a zero cost
68//! dist[start] = 0;
69//! heap.push(State { cost: 0, position: start });
70//!
71//! // Examine the frontier with lower cost nodes first (min-heap)
72//! while let Some(State { cost, position }) = heap.pop() {
73//! // Alternatively we could have continued to find all shortest paths
9cc50fc6 74//! if position == goal { return Some(cost); }
1a4d82fc
JJ
75//!
76//! // Important as we may have already found a better way
77//! if cost > dist[position] { continue; }
78//!
79//! // For each node we can reach, see if we can find a way with
80//! // a lower cost going through this node
62682a34 81//! for edge in &adj_list[position] {
1a4d82fc
JJ
82//! let next = State { cost: cost + edge.cost, position: edge.node };
83//!
84//! // If so, add it to the frontier and continue
85//! if next.cost < dist[next.position] {
86//! heap.push(next);
87//! // Relaxation, we have now found a better way
88//! dist[next.position] = next.cost;
89//! }
90//! }
91//! }
92//!
93//! // Goal not reachable
9cc50fc6 94//! None
1a4d82fc
JJ
95//! }
96//!
97//! fn main() {
98//! // This is the directed graph we're going to use.
99//! // The node numbers correspond to the different states,
100//! // and the edge weights symbolize the cost of moving
101//! // from one node to another.
102//! // Note that the edges are one-way.
103//! //
104//! // 7
105//! // +-----------------+
106//! // | |
e9174d1e 107//! // v 1 2 | 2
1a4d82fc
JJ
108//! // 0 -----> 1 -----> 3 ---> 4
109//! // | ^ ^ ^
110//! // | | 1 | |
111//! // | | | 3 | 1
112//! // +------> 2 -------+ |
113//! // 10 | |
114//! // +---------------+
115//! //
116//! // The graph is represented as an adjacency list where each index,
117//! // corresponding to a node value, has a list of outgoing edges.
118//! // Chosen for its efficiency.
119//! let graph = vec![
120//! // Node 0
121//! vec![Edge { node: 2, cost: 10 },
122//! Edge { node: 1, cost: 1 }],
123//! // Node 1
124//! vec![Edge { node: 3, cost: 2 }],
125//! // Node 2
126//! vec![Edge { node: 1, cost: 1 },
127//! Edge { node: 3, cost: 3 },
128//! Edge { node: 4, cost: 1 }],
129//! // Node 3
130//! vec![Edge { node: 0, cost: 7 },
131//! Edge { node: 4, cost: 2 }],
132//! // Node 4
133//! vec![]];
134//!
9cc50fc6
SL
135//! assert_eq!(shortest_path(&graph, 0, 1), Some(1));
136//! assert_eq!(shortest_path(&graph, 0, 3), Some(3));
137//! assert_eq!(shortest_path(&graph, 3, 0), Some(7));
138//! assert_eq!(shortest_path(&graph, 0, 4), Some(5));
139//! assert_eq!(shortest_path(&graph, 4, 0), None);
1a4d82fc
JJ
140//! }
141//! ```
142
143#![allow(missing_docs)]
85aaf69f 144#![stable(feature = "rust1", since = "1.0.0")]
1a4d82fc 145
fe692bf9 146use core::alloc::Allocator;
dfeec247 147use core::fmt;
353b0b11 148use core::iter::{FusedIterator, InPlaceIterable, SourceIter, TrustedLen};
1b1a35ee 149use core::mem::{self, swap, ManuallyDrop};
9c376795 150use core::num::NonZeroUsize;
dfeec247 151use core::ops::{Deref, DerefMut};
1a4d82fc
JJ
152use core::ptr;
153
fe692bf9 154use crate::alloc::Global;
a2a8927a 155use crate::collections::TryReserveError;
9fa01778 156use crate::slice;
5e7ed085 157use crate::vec::{self, AsVecIntoIter, Vec};
1a4d82fc 158
04454e1e
FG
159#[cfg(test)]
160mod tests;
161
1a4d82fc
JJ
162/// A priority queue implemented with a binary heap.
163///
164/// This will be a max-heap.
c34b1796
AL
165///
166/// It is a logic error for an item to be modified in such a way that the
c295e0f8 167/// item's ordering relative to any other item, as determined by the [`Ord`]
c34b1796 168/// trait, changes while it is in the heap. This is normally only possible
9c376795 169/// through interior mutability, global state, I/O, or unsafe code. The
923072b8
FG
170/// behavior resulting from such a logic error is not specified, but will
171/// be encapsulated to the `BinaryHeap` that observed the logic error and not
172/// result in undefined behavior. This could include panics, incorrect results,
173/// aborts, memory leaks, and non-termination.
54a0048b 174///
9c376795
FG
175/// As long as no elements change their relative order while being in the heap
176/// as described above, the API of `BinaryHeap` guarantees that the heap
177/// invariant remains intact i.e. its methods all behave as documented. For
178/// example if a method is documented as iterating in sorted order, that's
179/// guaranteed to work as long as elements in the heap have not changed order,
180/// even in the presence of closures getting unwinded out of, iterators getting
181/// leaked, and similar foolishness.
182///
54a0048b
SL
183/// # Examples
184///
185/// ```
186/// use std::collections::BinaryHeap;
187///
188/// // Type inference lets us omit an explicit type signature (which
189/// // would be `BinaryHeap<i32>` in this example).
190/// let mut heap = BinaryHeap::new();
191///
192/// // We can use peek to look at the next item in the heap. In this case,
193/// // there's no items in there yet so we get None.
194/// assert_eq!(heap.peek(), None);
195///
196/// // Let's add some scores...
197/// heap.push(1);
198/// heap.push(5);
199/// heap.push(2);
200///
201/// // Now peek shows the most important item in the heap.
202/// assert_eq!(heap.peek(), Some(&5));
203///
204/// // We can check the length of a heap.
205/// assert_eq!(heap.len(), 3);
206///
207/// // We can iterate over the items in the heap, although they are returned in
208/// // a random order.
209/// for x in &heap {
5e7ed085 210/// println!("{x}");
54a0048b
SL
211/// }
212///
213/// // If we instead pop these scores, they should come back in order.
214/// assert_eq!(heap.pop(), Some(5));
215/// assert_eq!(heap.pop(), Some(2));
216/// assert_eq!(heap.pop(), Some(1));
217/// assert_eq!(heap.pop(), None);
218///
219/// // We can clear the heap of any remaining items.
220/// heap.clear();
221///
222/// // The heap should now be empty.
223/// assert!(heap.is_empty())
224/// ```
48663c56 225///
94222f64
XL
226/// A `BinaryHeap` with a known list of items can be initialized from an array:
227///
228/// ```
229/// use std::collections::BinaryHeap;
230///
231/// let heap = BinaryHeap::from([1, 5, 2]);
232/// ```
233///
48663c56
XL
234/// ## Min-heap
235///
c295e0f8 236/// Either [`core::cmp::Reverse`] or a custom [`Ord`] implementation can be used to
48663c56
XL
237/// make `BinaryHeap` a min-heap. This makes `heap.pop()` return the smallest
238/// value instead of the greatest one.
239///
240/// ```
241/// use std::collections::BinaryHeap;
242/// use std::cmp::Reverse;
243///
244/// let mut heap = BinaryHeap::new();
245///
246/// // Wrap values in `Reverse`
247/// heap.push(Reverse(1));
248/// heap.push(Reverse(5));
249/// heap.push(Reverse(2));
250///
251/// // If we pop these scores now, they should come back in the reverse order.
252/// assert_eq!(heap.pop(), Some(Reverse(1)));
253/// assert_eq!(heap.pop(), Some(Reverse(2)));
254/// assert_eq!(heap.pop(), Some(Reverse(5)));
255/// assert_eq!(heap.pop(), None);
256/// ```
257///
258/// # Time complexity
259///
c295e0f8
XL
260/// | [push] | [pop] | [peek]/[peek\_mut] |
261/// |---------|---------------|--------------------|
262/// | *O*(1)~ | *O*(log(*n*)) | *O*(1) |
48663c56
XL
263///
264/// The value for `push` is an expected cost; the method documentation gives a
265/// more detailed analysis.
266///
c295e0f8 267/// [`core::cmp::Reverse`]: core::cmp::Reverse
c295e0f8
XL
268/// [`Cell`]: core::cell::Cell
269/// [`RefCell`]: core::cell::RefCell
1b1a35ee
XL
270/// [push]: BinaryHeap::push
271/// [pop]: BinaryHeap::pop
272/// [peek]: BinaryHeap::peek
273/// [peek\_mut]: BinaryHeap::peek_mut
85aaf69f 274#[stable(feature = "rust1", since = "1.0.0")]
6a06907d 275#[cfg_attr(not(test), rustc_diagnostic_item = "BinaryHeap")]
fe692bf9
FG
276pub struct BinaryHeap<
277 T,
278 #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
279> {
280 data: Vec<T, A>,
1a4d82fc
JJ
281}
282
cc61c64b
XL
283/// Structure wrapping a mutable reference to the greatest item on a
284/// `BinaryHeap`.
3157f602 285///
cc61c64b
XL
286/// This `struct` is created by the [`peek_mut`] method on [`BinaryHeap`]. See
287/// its documentation for more.
288///
1b1a35ee 289/// [`peek_mut`]: BinaryHeap::peek_mut
5bcae85e 290#[stable(feature = "binary_heap_peek_mut", since = "1.12.0")]
fe692bf9
FG
291pub struct PeekMut<
292 'a,
293 T: 'a + Ord,
294 #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
295> {
296 heap: &'a mut BinaryHeap<T, A>,
9c376795
FG
297 // If a set_len + sift_down are required, this is Some. If a &mut T has not
298 // yet been exposed to peek_mut()'s caller, it's None.
299 original_len: Option<NonZeroUsize>,
3157f602
XL
300}
301
8bb4bdeb 302#[stable(feature = "collection_debug", since = "1.17.0")]
fe692bf9 303impl<T: Ord + fmt::Debug, A: Allocator> fmt::Debug for PeekMut<'_, T, A> {
9fa01778 304 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
dfeec247 305 f.debug_tuple("PeekMut").field(&self.heap.data[0]).finish()
8bb4bdeb
XL
306 }
307}
308
5bcae85e 309#[stable(feature = "binary_heap_peek_mut", since = "1.12.0")]
fe692bf9 310impl<T: Ord, A: Allocator> Drop for PeekMut<'_, T, A> {
3157f602 311 fn drop(&mut self) {
9c376795
FG
312 if let Some(original_len) = self.original_len {
313 // SAFETY: That's how many elements were in the Vec at the time of
314 // the PeekMut::deref_mut call, and therefore also at the time of
315 // the BinaryHeap::peek_mut call. Since the PeekMut did not end up
316 // getting leaked, we are now undoing the leak amplification that
317 // the DerefMut prepared for.
318 unsafe { self.heap.data.set_len(original_len.get()) };
319
6a06907d
XL
320 // SAFETY: PeekMut is only instantiated for non-empty heaps.
321 unsafe { self.heap.sift_down(0) };
32a655c1 322 }
3157f602
XL
323 }
324}
325
5bcae85e 326#[stable(feature = "binary_heap_peek_mut", since = "1.12.0")]
fe692bf9 327impl<T: Ord, A: Allocator> Deref for PeekMut<'_, T, A> {
3157f602
XL
328 type Target = T;
329 fn deref(&self) -> &T {
9fa01778
XL
330 debug_assert!(!self.heap.is_empty());
331 // SAFE: PeekMut is only instantiated for non-empty heaps
332 unsafe { self.heap.data.get_unchecked(0) }
3157f602
XL
333 }
334}
335
5bcae85e 336#[stable(feature = "binary_heap_peek_mut", since = "1.12.0")]
fe692bf9 337impl<T: Ord, A: Allocator> DerefMut for PeekMut<'_, T, A> {
3157f602 338 fn deref_mut(&mut self) -> &mut T {
9fa01778 339 debug_assert!(!self.heap.is_empty());
9c376795
FG
340
341 let len = self.heap.len();
342 if len > 1 {
343 // Here we preemptively leak all the rest of the underlying vector
344 // after the currently max element. If the caller mutates the &mut T
345 // we're about to give them, and then leaks the PeekMut, all these
346 // elements will remain leaked. If they don't leak the PeekMut, then
347 // either Drop or PeekMut::pop will un-leak the vector elements.
348 //
349 // This is technique is described throughout several other places in
350 // the standard library as "leak amplification".
351 unsafe {
352 // SAFETY: len > 1 so len != 0.
353 self.original_len = Some(NonZeroUsize::new_unchecked(len));
354 // SAFETY: len > 1 so all this does for now is leak elements,
355 // which is safe.
356 self.heap.data.set_len(1);
357 }
358 }
359
9fa01778
XL
360 // SAFE: PeekMut is only instantiated for non-empty heaps
361 unsafe { self.heap.data.get_unchecked_mut(0) }
3157f602
XL
362 }
363}
364
fe692bf9 365impl<'a, T: Ord, A: Allocator> PeekMut<'a, T, A> {
32a655c1 366 /// Removes the peeked value from the heap and returns it.
cc61c64b 367 #[stable(feature = "binary_heap_peek_mut_pop", since = "1.18.0")]
fe692bf9 368 pub fn pop(mut this: PeekMut<'a, T, A>) -> T {
9c376795
FG
369 if let Some(original_len) = this.original_len.take() {
370 // SAFETY: This is how many elements were in the Vec at the time of
371 // the BinaryHeap::peek_mut call.
372 unsafe { this.heap.data.set_len(original_len.get()) };
373
374 // Unlike in Drop, here we don't also need to do a sift_down even if
375 // the caller could've mutated the element. It is removed from the
376 // heap on the next line and pop() is not sensitive to its value.
377 }
378 this.heap.pop().unwrap()
32a655c1
SL
379 }
380}
381
b039eaaf 382#[stable(feature = "rust1", since = "1.0.0")]
fe692bf9 383impl<T: Clone, A: Allocator + Clone> Clone for BinaryHeap<T, A> {
b039eaaf
SL
384 fn clone(&self) -> Self {
385 BinaryHeap { data: self.data.clone() }
386 }
387
388 fn clone_from(&mut self, source: &Self) {
389 self.data.clone_from(&source.data);
390 }
391}
392
85aaf69f 393#[stable(feature = "rust1", since = "1.0.0")]
1a4d82fc 394impl<T: Ord> Default for BinaryHeap<T> {
9e0c209e 395 /// Creates an empty `BinaryHeap<T>`.
1a4d82fc 396 #[inline]
92a42be0
SL
397 fn default() -> BinaryHeap<T> {
398 BinaryHeap::new()
399 }
1a4d82fc
JJ
400}
401
e9174d1e 402#[stable(feature = "binaryheap_debug", since = "1.4.0")]
fe692bf9 403impl<T: fmt::Debug, A: Allocator> fmt::Debug for BinaryHeap<T, A> {
9fa01778 404 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
e9174d1e
SL
405 f.debug_list().entries(self.iter()).finish()
406 }
407}
408
fe692bf9
FG
409struct RebuildOnDrop<
410 'a,
411 T: Ord,
412 #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
413> {
414 heap: &'a mut BinaryHeap<T, A>,
353b0b11
FG
415 rebuild_from: usize,
416}
417
fe692bf9 418impl<T: Ord, A: Allocator> Drop for RebuildOnDrop<'_, T, A> {
353b0b11
FG
419 fn drop(&mut self) {
420 self.heap.rebuild_tail(self.rebuild_from);
421 }
422}
423
1a4d82fc
JJ
424impl<T: Ord> BinaryHeap<T> {
425 /// Creates an empty `BinaryHeap` as a max-heap.
426 ///
427 /// # Examples
428 ///
54a0048b
SL
429 /// Basic usage:
430 ///
1a4d82fc
JJ
431 /// ```
432 /// use std::collections::BinaryHeap;
433 /// let mut heap = BinaryHeap::new();
85aaf69f 434 /// heap.push(4);
1a4d82fc 435 /// ```
85aaf69f 436 #[stable(feature = "rust1", since = "1.0.0")]
ed00b5ec 437 #[rustc_const_unstable(feature = "const_binary_heap_constructor", issue = "112353")]
c295e0f8 438 #[must_use]
ed00b5ec 439 pub const fn new() -> BinaryHeap<T> {
92a42be0
SL
440 BinaryHeap { data: vec![] }
441 }
1a4d82fc 442
923072b8
FG
443 /// Creates an empty `BinaryHeap` with at least the specified capacity.
444 ///
445 /// The binary heap will be able to hold at least `capacity` elements without
446 /// reallocating. This method is allowed to allocate for more elements than
447 /// `capacity`. If `capacity` is 0, the binary heap will not allocate.
1a4d82fc
JJ
448 ///
449 /// # Examples
450 ///
54a0048b
SL
451 /// Basic usage:
452 ///
1a4d82fc
JJ
453 /// ```
454 /// use std::collections::BinaryHeap;
455 /// let mut heap = BinaryHeap::with_capacity(10);
85aaf69f 456 /// heap.push(4);
1a4d82fc 457 /// ```
85aaf69f 458 #[stable(feature = "rust1", since = "1.0.0")]
c295e0f8 459 #[must_use]
85aaf69f 460 pub fn with_capacity(capacity: usize) -> BinaryHeap<T> {
1a4d82fc
JJ
461 BinaryHeap { data: Vec::with_capacity(capacity) }
462 }
fe692bf9
FG
463}
464
465impl<T: Ord, A: Allocator> BinaryHeap<T, A> {
466 /// Creates an empty `BinaryHeap` as a max-heap, using `A` as allocator.
467 ///
468 /// # Examples
469 ///
470 /// Basic usage:
471 ///
472 /// ```
473 /// #![feature(allocator_api)]
474 ///
475 /// use std::alloc::System;
476 /// use std::collections::BinaryHeap;
477 /// let mut heap = BinaryHeap::new_in(System);
478 /// heap.push(4);
479 /// ```
480 #[unstable(feature = "allocator_api", issue = "32838")]
ed00b5ec 481 #[rustc_const_unstable(feature = "const_binary_heap_constructor", issue = "112353")]
fe692bf9 482 #[must_use]
ed00b5ec 483 pub const fn new_in(alloc: A) -> BinaryHeap<T, A> {
fe692bf9
FG
484 BinaryHeap { data: Vec::new_in(alloc) }
485 }
486
487 /// Creates an empty `BinaryHeap` with at least the specified capacity, using `A` as allocator.
488 ///
489 /// The binary heap will be able to hold at least `capacity` elements without
490 /// reallocating. This method is allowed to allocate for more elements than
491 /// `capacity`. If `capacity` is 0, the binary heap will not allocate.
492 ///
493 /// # Examples
494 ///
495 /// Basic usage:
496 ///
497 /// ```
498 /// #![feature(allocator_api)]
499 ///
500 /// use std::alloc::System;
501 /// use std::collections::BinaryHeap;
502 /// let mut heap = BinaryHeap::with_capacity_in(10, System);
503 /// heap.push(4);
504 /// ```
505 #[unstable(feature = "allocator_api", issue = "32838")]
506 #[must_use]
507 pub fn with_capacity_in(capacity: usize, alloc: A) -> BinaryHeap<T, A> {
508 BinaryHeap { data: Vec::with_capacity_in(capacity, alloc) }
509 }
1a4d82fc 510
3157f602
XL
511 /// Returns a mutable reference to the greatest item in the binary heap, or
512 /// `None` if it is empty.
513 ///
9c376795
FG
514 /// Note: If the `PeekMut` value is leaked, some heap elements might get
515 /// leaked along with it, but the remaining elements will remain a valid
516 /// heap.
3157f602
XL
517 ///
518 /// # Examples
519 ///
520 /// Basic usage:
521 ///
522 /// ```
3157f602
XL
523 /// use std::collections::BinaryHeap;
524 /// let mut heap = BinaryHeap::new();
525 /// assert!(heap.peek_mut().is_none());
526 ///
527 /// heap.push(1);
528 /// heap.push(5);
529 /// heap.push(2);
530 /// {
531 /// let mut val = heap.peek_mut().unwrap();
532 /// *val = 0;
533 /// }
534 /// assert_eq!(heap.peek(), Some(&2));
535 /// ```
48663c56
XL
536 ///
537 /// # Time complexity
538 ///
1b1a35ee
XL
539 /// If the item is modified then the worst case time complexity is *O*(log(*n*)),
540 /// otherwise it's *O*(1).
5bcae85e 541 #[stable(feature = "binary_heap_peek_mut", since = "1.12.0")]
fe692bf9 542 pub fn peek_mut(&mut self) -> Option<PeekMut<'_, T, A>> {
9c376795 543 if self.is_empty() { None } else { Some(PeekMut { heap: self, original_len: None }) }
3157f602
XL
544 }
545
1a4d82fc
JJ
546 /// Removes the greatest item from the binary heap and returns it, or `None` if it
547 /// is empty.
548 ///
549 /// # Examples
550 ///
54a0048b
SL
551 /// Basic usage:
552 ///
1a4d82fc
JJ
553 /// ```
554 /// use std::collections::BinaryHeap;
5099ac24 555 /// let mut heap = BinaryHeap::from([1, 3]);
1a4d82fc
JJ
556 ///
557 /// assert_eq!(heap.pop(), Some(3));
558 /// assert_eq!(heap.pop(), Some(1));
559 /// assert_eq!(heap.pop(), None);
560 /// ```
48663c56
XL
561 ///
562 /// # Time complexity
563 ///
3dfed10e 564 /// The worst case cost of `pop` on a heap containing *n* elements is *O*(log(*n*)).
85aaf69f 565 #[stable(feature = "rust1", since = "1.0.0")]
1a4d82fc
JJ
566 pub fn pop(&mut self) -> Option<T> {
567 self.data.pop().map(|mut item| {
568 if !self.is_empty() {
569 swap(&mut item, &mut self.data[0]);
6a06907d
XL
570 // SAFETY: !self.is_empty() means that self.len() > 0
571 unsafe { self.sift_down_to_bottom(0) };
1a4d82fc
JJ
572 }
573 item
574 })
575 }
576
577 /// Pushes an item onto the binary heap.
578 ///
579 /// # Examples
580 ///
54a0048b
SL
581 /// Basic usage:
582 ///
1a4d82fc
JJ
583 /// ```
584 /// use std::collections::BinaryHeap;
585 /// let mut heap = BinaryHeap::new();
85aaf69f 586 /// heap.push(3);
1a4d82fc
JJ
587 /// heap.push(5);
588 /// heap.push(1);
589 ///
590 /// assert_eq!(heap.len(), 3);
591 /// assert_eq!(heap.peek(), Some(&5));
592 /// ```
48663c56
XL
593 ///
594 /// # Time complexity
595 ///
596 /// The expected cost of `push`, averaged over every possible ordering of
597 /// the elements being pushed, and over a sufficiently large number of
3dfed10e 598 /// pushes, is *O*(1). This is the most meaningful cost metric when pushing
48663c56
XL
599 /// elements that are *not* already in any sorted pattern.
600 ///
601 /// The time complexity degrades if elements are pushed in predominantly
602 /// ascending order. In the worst case, elements are pushed in ascending
3dfed10e 603 /// sorted order and the amortized cost per push is *O*(log(*n*)) against a heap
48663c56
XL
604 /// containing *n* elements.
605 ///
3dfed10e 606 /// The worst case cost of a *single* call to `push` is *O*(*n*). The worst case
48663c56
XL
607 /// occurs when capacity is exhausted and needs a resize. The resize cost
608 /// has been amortized in the previous figures.
85aaf69f 609 #[stable(feature = "rust1", since = "1.0.0")]
1a4d82fc
JJ
610 pub fn push(&mut self, item: T) {
611 let old_len = self.len();
612 self.data.push(item);
6a06907d
XL
613 // SAFETY: Since we pushed a new item it means that
614 // old_len = self.len() - 1 < self.len()
615 unsafe { self.sift_up(0, old_len) };
1a4d82fc
JJ
616 }
617
1a4d82fc
JJ
618 /// Consumes the `BinaryHeap` and returns a vector in sorted
619 /// (ascending) order.
620 ///
621 /// # Examples
622 ///
54a0048b
SL
623 /// Basic usage:
624 ///
1a4d82fc
JJ
625 /// ```
626 /// use std::collections::BinaryHeap;
627 ///
5099ac24 628 /// let mut heap = BinaryHeap::from([1, 2, 4, 5, 7]);
1a4d82fc
JJ
629 /// heap.push(6);
630 /// heap.push(3);
631 ///
632 /// let vec = heap.into_sorted_vec();
c34b1796 633 /// assert_eq!(vec, [1, 2, 3, 4, 5, 6, 7]);
1a4d82fc 634 /// ```
3c0e092e 635 #[must_use = "`self` will be dropped if the result is not used"]
b039eaaf 636 #[stable(feature = "binary_heap_extras_15", since = "1.5.0")]
fe692bf9 637 pub fn into_sorted_vec(mut self) -> Vec<T, A> {
1a4d82fc
JJ
638 let mut end = self.len();
639 while end > 1 {
640 end -= 1;
29967ef6
XL
641 // SAFETY: `end` goes from `self.len() - 1` to 1 (both included),
642 // so it's always a valid index to access.
643 // It is safe to access index 0 (i.e. `ptr`), because
644 // 1 <= end < self.len(), which means self.len() >= 2.
645 unsafe {
646 let ptr = self.data.as_mut_ptr();
647 ptr::swap(ptr, ptr.add(end));
648 }
6a06907d
XL
649 // SAFETY: `end` goes from `self.len() - 1` to 1 (both included) so:
650 // 0 < 1 <= end <= self.len() - 1 < self.len()
651 // Which means 0 < end and end < self.len().
652 unsafe { self.sift_down_range(0, end) };
1a4d82fc
JJ
653 }
654 self.into_vec()
655 }
656
657 // The implementations of sift_up and sift_down use unsafe blocks in
658 // order to move an element out of the vector (leaving behind a
d9579d0f
AL
659 // hole), shift along the others and move the removed element back into the
660 // vector at the final location of the hole.
661 // The `Hole` type is used to represent this, and make sure
662 // the hole is filled back at the end of its scope, even on panic.
663 // Using a hole reduces the constant factor compared to using swaps,
664 // which involves twice as many moves.
6a06907d
XL
665
666 /// # Safety
667 ///
668 /// The caller must guarantee that `pos < self.len()`.
669 unsafe fn sift_up(&mut self, start: usize, pos: usize) -> usize {
670 // Take out the value at `pos` and create a hole.
671 // SAFETY: The caller guarantees that pos < self.len()
672 let mut hole = unsafe { Hole::new(&mut self.data, pos) };
673
674 while hole.pos() > start {
675 let parent = (hole.pos() - 1) / 2;
676
677 // SAFETY: hole.pos() > start >= 0, which means hole.pos() > 0
678 // and so hole.pos() - 1 can't underflow.
679 // This guarantees that parent < hole.pos() so
680 // it's a valid index and also != hole.pos().
681 if hole.element() <= unsafe { hole.get(parent) } {
682 break;
1a4d82fc 683 }
6a06907d
XL
684
685 // SAFETY: Same as above
686 unsafe { hole.move_to(parent) };
1a4d82fc 687 }
6a06907d
XL
688
689 hole.pos()
1a4d82fc
JJ
690 }
691
92a42be0
SL
692 /// Take an element at `pos` and move it down the heap,
693 /// while its children are larger.
6a06907d
XL
694 ///
695 /// # Safety
696 ///
697 /// The caller must guarantee that `pos < end <= self.len()`.
698 unsafe fn sift_down_range(&mut self, pos: usize, end: usize) {
699 // SAFETY: The caller guarantees that pos < end <= self.len().
700 let mut hole = unsafe { Hole::new(&mut self.data, pos) };
701 let mut child = 2 * hole.pos() + 1;
702
703 // Loop invariant: child == 2 * hole.pos() + 1.
704 while child <= end.saturating_sub(2) {
705 // compare with the greater of the two children
706 // SAFETY: child < end - 1 < self.len() and
707 // child + 1 < end <= self.len(), so they're valid indexes.
708 // child == 2 * hole.pos() + 1 != hole.pos() and
709 // child + 1 == 2 * hole.pos() + 2 != hole.pos().
710 // FIXME: 2 * hole.pos() + 1 or 2 * hole.pos() + 2 could overflow
711 // if T is a ZST
712 child += unsafe { hole.get(child) <= hole.get(child + 1) } as usize;
713
714 // if we are already in order, stop.
715 // SAFETY: child is now either the old child or the old child+1
716 // We already proven that both are < self.len() and != hole.pos()
717 if hole.element() >= unsafe { hole.get(child) } {
718 return;
29967ef6 719 }
6a06907d
XL
720
721 // SAFETY: same as above.
722 unsafe { hole.move_to(child) };
723 child = 2 * hole.pos() + 1;
724 }
725
726 // SAFETY: && short circuit, which means that in the
727 // second condition it's already true that child == end - 1 < self.len().
728 if child == end - 1 && hole.element() < unsafe { hole.get(child) } {
729 // SAFETY: child is already proven to be a valid index and
730 // child == 2 * hole.pos() + 1 != hole.pos().
731 unsafe { hole.move_to(child) };
1a4d82fc
JJ
732 }
733 }
734
6a06907d
XL
735 /// # Safety
736 ///
737 /// The caller must guarantee that `pos < self.len()`.
738 unsafe fn sift_down(&mut self, pos: usize) {
1a4d82fc 739 let len = self.len();
6a06907d
XL
740 // SAFETY: pos < len is guaranteed by the caller and
741 // obviously len = self.len() <= self.len().
742 unsafe { self.sift_down_range(pos, len) };
1a4d82fc
JJ
743 }
744
9cc50fc6
SL
745 /// Take an element at `pos` and move it all the way down the heap,
746 /// then sift it up to its position.
747 ///
748 /// Note: This is faster when the element is known to be large / should
749 /// be closer to the bottom.
6a06907d
XL
750 ///
751 /// # Safety
752 ///
753 /// The caller must guarantee that `pos < self.len()`.
754 unsafe fn sift_down_to_bottom(&mut self, mut pos: usize) {
9cc50fc6
SL
755 let end = self.len();
756 let start = pos;
6a06907d
XL
757
758 // SAFETY: The caller guarantees that pos < self.len().
759 let mut hole = unsafe { Hole::new(&mut self.data, pos) };
760 let mut child = 2 * hole.pos() + 1;
761
762 // Loop invariant: child == 2 * hole.pos() + 1.
763 while child <= end.saturating_sub(2) {
764 // SAFETY: child < end - 1 < self.len() and
765 // child + 1 < end <= self.len(), so they're valid indexes.
766 // child == 2 * hole.pos() + 1 != hole.pos() and
767 // child + 1 == 2 * hole.pos() + 2 != hole.pos().
768 // FIXME: 2 * hole.pos() + 1 or 2 * hole.pos() + 2 could overflow
769 // if T is a ZST
770 child += unsafe { hole.get(child) <= hole.get(child + 1) } as usize;
771
772 // SAFETY: Same as above
773 unsafe { hole.move_to(child) };
774 child = 2 * hole.pos() + 1;
9cc50fc6 775 }
6a06907d
XL
776
777 if child == end - 1 {
778 // SAFETY: child == end - 1 < self.len(), so it's a valid index
779 // and child == 2 * hole.pos() + 1 != hole.pos().
780 unsafe { hole.move_to(child) };
781 }
782 pos = hole.pos();
783 drop(hole);
784
785 // SAFETY: pos is the position in the hole and was already proven
786 // to be a valid index.
787 unsafe { self.sift_up(start, pos) };
9cc50fc6
SL
788 }
789
cdc7bbd5
XL
790 /// Rebuild assuming data[0..start] is still a proper heap.
791 fn rebuild_tail(&mut self, start: usize) {
792 if start == self.len() {
793 return;
794 }
795
796 let tail_len = self.len() - start;
797
798 #[inline(always)]
799 fn log2_fast(x: usize) -> usize {
800 (usize::BITS - x.leading_zeros() - 1) as usize
801 }
802
803 // `rebuild` takes O(self.len()) operations
804 // and about 2 * self.len() comparisons in the worst case
805 // while repeating `sift_up` takes O(tail_len * log(start)) operations
806 // and about 1 * tail_len * log_2(start) comparisons in the worst case,
807 // assuming start >= tail_len. For larger heaps, the crossover point
808 // no longer follows this reasoning and was determined empirically.
809 let better_to_rebuild = if start < tail_len {
810 true
811 } else if self.len() <= 2048 {
812 2 * self.len() < tail_len * log2_fast(start)
813 } else {
814 2 * self.len() < tail_len * 11
815 };
816
817 if better_to_rebuild {
818 self.rebuild();
819 } else {
820 for i in start..self.len() {
821 // SAFETY: The index `i` is always less than self.len().
822 unsafe { self.sift_up(0, i) };
823 }
824 }
825 }
826
9fa01778
XL
827 fn rebuild(&mut self) {
828 let mut n = self.len() / 2;
829 while n > 0 {
830 n -= 1;
6a06907d
XL
831 // SAFETY: n starts from self.len() / 2 and goes down to 0.
832 // The only case when !(n < self.len()) is if
833 // self.len() == 0, but it's ruled out by the loop condition.
834 unsafe { self.sift_down(n) };
9fa01778
XL
835 }
836 }
837
838 /// Moves all the elements of `other` into `self`, leaving `other` empty.
839 ///
840 /// # Examples
841 ///
842 /// Basic usage:
843 ///
844 /// ```
845 /// use std::collections::BinaryHeap;
846 ///
5099ac24
FG
847 /// let mut a = BinaryHeap::from([-10, 1, 2, 3, 3]);
848 /// let mut b = BinaryHeap::from([-20, 5, 43]);
9fa01778
XL
849 ///
850 /// a.append(&mut b);
851 ///
852 /// assert_eq!(a.into_sorted_vec(), [-20, -10, 1, 2, 3, 3, 5, 43]);
853 /// assert!(b.is_empty());
854 /// ```
855 #[stable(feature = "binary_heap_append", since = "1.11.0")]
856 pub fn append(&mut self, other: &mut Self) {
857 if self.len() < other.len() {
858 swap(self, other);
859 }
860
cdc7bbd5 861 let start = self.data.len();
9fa01778 862
cdc7bbd5 863 self.data.append(&mut other.data);
9fa01778 864
cdc7bbd5 865 self.rebuild_tail(start);
9fa01778 866 }
e74abb32 867
5099ac24
FG
868 /// Clears the binary heap, returning an iterator over the removed elements
869 /// in heap order. If the iterator is dropped before being fully consumed,
870 /// it drops the remaining elements in heap order.
871 ///
872 /// The returned iterator keeps a mutable borrow on the heap to optimize
873 /// its implementation.
e74abb32
XL
874 ///
875 /// Note:
3dfed10e 876 /// * `.drain_sorted()` is *O*(*n* \* log(*n*)); much slower than `.drain()`.
e74abb32
XL
877 /// You should use the latter for most cases.
878 ///
879 /// # Examples
880 ///
881 /// Basic usage:
882 ///
883 /// ```
884 /// #![feature(binary_heap_drain_sorted)]
885 /// use std::collections::BinaryHeap;
886 ///
5099ac24 887 /// let mut heap = BinaryHeap::from([1, 2, 3, 4, 5]);
e74abb32
XL
888 /// assert_eq!(heap.len(), 5);
889 ///
890 /// drop(heap.drain_sorted()); // removes all elements in heap order
891 /// assert_eq!(heap.len(), 0);
892 /// ```
893 #[inline]
894 #[unstable(feature = "binary_heap_drain_sorted", issue = "59278")]
fe692bf9 895 pub fn drain_sorted(&mut self) -> DrainSorted<'_, T, A> {
dfeec247 896 DrainSorted { inner: self }
e74abb32 897 }
f9f354fc
XL
898
899 /// Retains only the elements specified by the predicate.
900 ///
5e7ed085 901 /// In other words, remove all elements `e` for which `f(&e)` returns
f9f354fc
XL
902 /// `false`. The elements are visited in unsorted (and unspecified) order.
903 ///
904 /// # Examples
905 ///
906 /// Basic usage:
907 ///
908 /// ```
f9f354fc
XL
909 /// use std::collections::BinaryHeap;
910 ///
5099ac24 911 /// let mut heap = BinaryHeap::from([-10, -5, 1, 2, 4, 13]);
f9f354fc
XL
912 ///
913 /// heap.retain(|x| x % 2 == 0); // only keep even numbers
914 ///
915 /// assert_eq!(heap.into_sorted_vec(), [-10, 2, 4])
916 /// ```
353b0b11 917 #[stable(feature = "binary_heap_retain", since = "1.70.0")]
cdc7bbd5 918 pub fn retain<F>(&mut self, mut f: F)
f9f354fc
XL
919 where
920 F: FnMut(&T) -> bool,
921 {
353b0b11
FG
922 // rebuild_start will be updated to the first touched element below, and the rebuild will
923 // only be done for the tail.
924 let mut guard = RebuildOnDrop { rebuild_from: self.len(), heap: self };
cdc7bbd5 925 let mut i = 0;
353b0b11 926
9ffffee4 927 guard.heap.data.retain(|e| {
cdc7bbd5 928 let keep = f(e);
353b0b11
FG
929 if !keep && i < guard.rebuild_from {
930 guard.rebuild_from = i;
cdc7bbd5
XL
931 }
932 i += 1;
933 keep
934 });
f9f354fc 935 }
9fa01778
XL
936}
937
fe692bf9 938impl<T, A: Allocator> BinaryHeap<T, A> {
9fa01778
XL
939 /// Returns an iterator visiting all values in the underlying vector, in
940 /// arbitrary order.
941 ///
942 /// # Examples
943 ///
944 /// Basic usage:
945 ///
946 /// ```
947 /// use std::collections::BinaryHeap;
5099ac24 948 /// let heap = BinaryHeap::from([1, 2, 3, 4]);
9fa01778
XL
949 ///
950 /// // Print 1, 2, 3, 4 in arbitrary order
951 /// for x in heap.iter() {
5e7ed085 952 /// println!("{x}");
9fa01778
XL
953 /// }
954 /// ```
955 #[stable(feature = "rust1", since = "1.0.0")]
956 pub fn iter(&self) -> Iter<'_, T> {
957 Iter { iter: self.data.iter() }
958 }
959
e74abb32
XL
960 /// Returns an iterator which retrieves elements in heap order.
961 /// This method consumes the original heap.
962 ///
963 /// # Examples
964 ///
965 /// Basic usage:
966 ///
967 /// ```
968 /// #![feature(binary_heap_into_iter_sorted)]
969 /// use std::collections::BinaryHeap;
5099ac24 970 /// let heap = BinaryHeap::from([1, 2, 3, 4, 5]);
e74abb32 971 ///
5099ac24 972 /// assert_eq!(heap.into_iter_sorted().take(2).collect::<Vec<_>>(), [5, 4]);
e74abb32
XL
973 /// ```
974 #[unstable(feature = "binary_heap_into_iter_sorted", issue = "59278")]
fe692bf9 975 pub fn into_iter_sorted(self) -> IntoIterSorted<T, A> {
dfeec247 976 IntoIterSorted { inner: self }
e74abb32
XL
977 }
978
9fa01778
XL
979 /// Returns the greatest item in the binary heap, or `None` if it is empty.
980 ///
981 /// # Examples
982 ///
983 /// Basic usage:
984 ///
985 /// ```
986 /// use std::collections::BinaryHeap;
987 /// let mut heap = BinaryHeap::new();
988 /// assert_eq!(heap.peek(), None);
989 ///
990 /// heap.push(1);
991 /// heap.push(5);
992 /// heap.push(2);
993 /// assert_eq!(heap.peek(), Some(&5));
994 ///
995 /// ```
48663c56
XL
996 ///
997 /// # Time complexity
998 ///
3dfed10e 999 /// Cost is *O*(1) in the worst case.
3c0e092e 1000 #[must_use]
9fa01778
XL
1001 #[stable(feature = "rust1", since = "1.0.0")]
1002 pub fn peek(&self) -> Option<&T> {
1003 self.data.get(0)
1004 }
1005
1006 /// Returns the number of elements the binary heap can hold without reallocating.
1007 ///
1008 /// # Examples
1009 ///
1010 /// Basic usage:
1011 ///
1012 /// ```
1013 /// use std::collections::BinaryHeap;
1014 /// let mut heap = BinaryHeap::with_capacity(100);
1015 /// assert!(heap.capacity() >= 100);
1016 /// heap.push(4);
1017 /// ```
3c0e092e 1018 #[must_use]
9fa01778
XL
1019 #[stable(feature = "rust1", since = "1.0.0")]
1020 pub fn capacity(&self) -> usize {
1021 self.data.capacity()
1022 }
1023
923072b8
FG
1024 /// Reserves the minimum capacity for at least `additional` elements more than
1025 /// the current length. Unlike [`reserve`], this will not
1026 /// deliberately over-allocate to speculatively avoid frequent allocations.
1027 /// After calling `reserve_exact`, capacity will be greater than or equal to
1028 /// `self.len() + additional`. Does nothing if the capacity is already
1029 /// sufficient.
9fa01778 1030 ///
923072b8 1031 /// [`reserve`]: BinaryHeap::reserve
9fa01778
XL
1032 ///
1033 /// # Panics
1034 ///
923072b8 1035 /// Panics if the new capacity overflows [`usize`].
9fa01778
XL
1036 ///
1037 /// # Examples
1038 ///
1039 /// Basic usage:
1040 ///
1041 /// ```
1042 /// use std::collections::BinaryHeap;
1043 /// let mut heap = BinaryHeap::new();
1044 /// heap.reserve_exact(100);
1045 /// assert!(heap.capacity() >= 100);
1046 /// heap.push(4);
1047 /// ```
1048 ///
1b1a35ee 1049 /// [`reserve`]: BinaryHeap::reserve
9fa01778
XL
1050 #[stable(feature = "rust1", since = "1.0.0")]
1051 pub fn reserve_exact(&mut self, additional: usize) {
1052 self.data.reserve_exact(additional);
1053 }
1054
923072b8
FG
1055 /// Reserves capacity for at least `additional` elements more than the
1056 /// current length. The allocator may reserve more space to speculatively
1057 /// avoid frequent allocations. After calling `reserve`,
1058 /// capacity will be greater than or equal to `self.len() + additional`.
1059 /// Does nothing if capacity is already sufficient.
9fa01778
XL
1060 ///
1061 /// # Panics
1062 ///
923072b8 1063 /// Panics if the new capacity overflows [`usize`].
9fa01778
XL
1064 ///
1065 /// # Examples
1066 ///
1067 /// Basic usage:
1068 ///
1069 /// ```
1070 /// use std::collections::BinaryHeap;
1071 /// let mut heap = BinaryHeap::new();
1072 /// heap.reserve(100);
1073 /// assert!(heap.capacity() >= 100);
1074 /// heap.push(4);
1075 /// ```
1076 #[stable(feature = "rust1", since = "1.0.0")]
1077 pub fn reserve(&mut self, additional: usize) {
1078 self.data.reserve(additional);
1079 }
1080
923072b8
FG
1081 /// Tries to reserve the minimum capacity for at least `additional` elements
1082 /// more than the current length. Unlike [`try_reserve`], this will not
1083 /// deliberately over-allocate to speculatively avoid frequent allocations.
1084 /// After calling `try_reserve_exact`, capacity will be greater than or
1085 /// equal to `self.len() + additional` if it returns `Ok(())`.
a2a8927a
XL
1086 /// Does nothing if the capacity is already sufficient.
1087 ///
1088 /// Note that the allocator may give the collection more space than it
1089 /// requests. Therefore, capacity can not be relied upon to be precisely
1090 /// minimal. Prefer [`try_reserve`] if future insertions are expected.
1091 ///
1092 /// [`try_reserve`]: BinaryHeap::try_reserve
1093 ///
1094 /// # Errors
1095 ///
1096 /// If the capacity overflows, or the allocator reports a failure, then an error
1097 /// is returned.
1098 ///
1099 /// # Examples
1100 ///
1101 /// ```
a2a8927a
XL
1102 /// use std::collections::BinaryHeap;
1103 /// use std::collections::TryReserveError;
1104 ///
1105 /// fn find_max_slow(data: &[u32]) -> Result<Option<u32>, TryReserveError> {
1106 /// let mut heap = BinaryHeap::new();
1107 ///
1108 /// // Pre-reserve the memory, exiting if we can't
1109 /// heap.try_reserve_exact(data.len())?;
1110 ///
1111 /// // Now we know this can't OOM in the middle of our complex work
1112 /// heap.extend(data.iter());
1113 ///
1114 /// Ok(heap.pop())
1115 /// }
1116 /// # find_max_slow(&[1, 2, 3]).expect("why is the test harness OOMing on 12 bytes?");
1117 /// ```
923072b8 1118 #[stable(feature = "try_reserve_2", since = "1.63.0")]
a2a8927a
XL
1119 pub fn try_reserve_exact(&mut self, additional: usize) -> Result<(), TryReserveError> {
1120 self.data.try_reserve_exact(additional)
1121 }
1122
923072b8
FG
1123 /// Tries to reserve capacity for at least `additional` elements more than the
1124 /// current length. The allocator may reserve more space to speculatively
1125 /// avoid frequent allocations. After calling `try_reserve`, capacity will be
1126 /// greater than or equal to `self.len() + additional` if it returns
f2b60f7d
FG
1127 /// `Ok(())`. Does nothing if capacity is already sufficient. This method
1128 /// preserves the contents even if an error occurs.
a2a8927a
XL
1129 ///
1130 /// # Errors
1131 ///
1132 /// If the capacity overflows, or the allocator reports a failure, then an error
1133 /// is returned.
1134 ///
1135 /// # Examples
1136 ///
1137 /// ```
a2a8927a
XL
1138 /// use std::collections::BinaryHeap;
1139 /// use std::collections::TryReserveError;
1140 ///
1141 /// fn find_max_slow(data: &[u32]) -> Result<Option<u32>, TryReserveError> {
1142 /// let mut heap = BinaryHeap::new();
1143 ///
1144 /// // Pre-reserve the memory, exiting if we can't
1145 /// heap.try_reserve(data.len())?;
1146 ///
1147 /// // Now we know this can't OOM in the middle of our complex work
1148 /// heap.extend(data.iter());
1149 ///
1150 /// Ok(heap.pop())
1151 /// }
1152 /// # find_max_slow(&[1, 2, 3]).expect("why is the test harness OOMing on 12 bytes?");
1153 /// ```
923072b8 1154 #[stable(feature = "try_reserve_2", since = "1.63.0")]
a2a8927a
XL
1155 pub fn try_reserve(&mut self, additional: usize) -> Result<(), TryReserveError> {
1156 self.data.try_reserve(additional)
1157 }
1158
9fa01778
XL
1159 /// Discards as much additional capacity as possible.
1160 ///
1161 /// # Examples
1162 ///
1163 /// Basic usage:
1164 ///
1165 /// ```
1166 /// use std::collections::BinaryHeap;
1167 /// let mut heap: BinaryHeap<i32> = BinaryHeap::with_capacity(100);
1168 ///
1169 /// assert!(heap.capacity() >= 100);
1170 /// heap.shrink_to_fit();
1171 /// assert!(heap.capacity() == 0);
1172 /// ```
1173 #[stable(feature = "rust1", since = "1.0.0")]
1174 pub fn shrink_to_fit(&mut self) {
1175 self.data.shrink_to_fit();
1176 }
1177
1178 /// Discards capacity with a lower bound.
1179 ///
1180 /// The capacity will remain at least as large as both the length
1181 /// and the supplied value.
1182 ///
5869c6ff 1183 /// If the current capacity is less than the lower limit, this is a no-op.
9fa01778
XL
1184 ///
1185 /// # Examples
1186 ///
1187 /// ```
9fa01778
XL
1188 /// use std::collections::BinaryHeap;
1189 /// let mut heap: BinaryHeap<i32> = BinaryHeap::with_capacity(100);
1190 ///
1191 /// assert!(heap.capacity() >= 100);
1192 /// heap.shrink_to(10);
1193 /// assert!(heap.capacity() >= 10);
1194 /// ```
1195 #[inline]
94222f64 1196 #[stable(feature = "shrink_to", since = "1.56.0")]
9fa01778
XL
1197 pub fn shrink_to(&mut self, min_capacity: usize) {
1198 self.data.shrink_to(min_capacity)
1199 }
1200
cdc7bbd5
XL
1201 /// Returns a slice of all values in the underlying vector, in arbitrary
1202 /// order.
1203 ///
1204 /// # Examples
1205 ///
1206 /// Basic usage:
1207 ///
1208 /// ```
1209 /// #![feature(binary_heap_as_slice)]
1210 /// use std::collections::BinaryHeap;
1211 /// use std::io::{self, Write};
1212 ///
5099ac24 1213 /// let heap = BinaryHeap::from([1, 2, 3, 4, 5, 6, 7]);
cdc7bbd5
XL
1214 ///
1215 /// io::sink().write(heap.as_slice()).unwrap();
1216 /// ```
c295e0f8 1217 #[must_use]
cdc7bbd5
XL
1218 #[unstable(feature = "binary_heap_as_slice", issue = "83659")]
1219 pub fn as_slice(&self) -> &[T] {
1220 self.data.as_slice()
1221 }
1222
9fa01778
XL
1223 /// Consumes the `BinaryHeap` and returns the underlying vector
1224 /// in arbitrary order.
1225 ///
1226 /// # Examples
1227 ///
1228 /// Basic usage:
1229 ///
1230 /// ```
1231 /// use std::collections::BinaryHeap;
5099ac24 1232 /// let heap = BinaryHeap::from([1, 2, 3, 4, 5, 6, 7]);
9fa01778
XL
1233 /// let vec = heap.into_vec();
1234 ///
1235 /// // Will print in some order
1236 /// for x in vec {
5e7ed085 1237 /// println!("{x}");
9fa01778
XL
1238 /// }
1239 /// ```
c295e0f8 1240 #[must_use = "`self` will be dropped if the result is not used"]
9fa01778 1241 #[stable(feature = "binary_heap_extras_15", since = "1.5.0")]
fe692bf9 1242 pub fn into_vec(self) -> Vec<T, A> {
9fa01778
XL
1243 self.into()
1244 }
1245
fe692bf9
FG
1246 /// Returns a reference to the underlying allocator.
1247 #[unstable(feature = "allocator_api", issue = "32838")]
1248 #[inline]
1249 pub fn allocator(&self) -> &A {
1250 self.data.allocator()
1251 }
1252
1a4d82fc 1253 /// Returns the length of the binary heap.
54a0048b
SL
1254 ///
1255 /// # Examples
1256 ///
1257 /// Basic usage:
1258 ///
1259 /// ```
1260 /// use std::collections::BinaryHeap;
5099ac24 1261 /// let heap = BinaryHeap::from([1, 3]);
54a0048b
SL
1262 ///
1263 /// assert_eq!(heap.len(), 2);
1264 /// ```
3c0e092e 1265 #[must_use]
85aaf69f 1266 #[stable(feature = "rust1", since = "1.0.0")]
92a42be0
SL
1267 pub fn len(&self) -> usize {
1268 self.data.len()
1269 }
1a4d82fc
JJ
1270
1271 /// Checks if the binary heap is empty.
54a0048b
SL
1272 ///
1273 /// # Examples
1274 ///
1275 /// Basic usage:
1276 ///
1277 /// ```
1278 /// use std::collections::BinaryHeap;
1279 /// let mut heap = BinaryHeap::new();
1280 ///
1281 /// assert!(heap.is_empty());
1282 ///
1283 /// heap.push(3);
1284 /// heap.push(5);
1285 /// heap.push(1);
1286 ///
1287 /// assert!(!heap.is_empty());
1288 /// ```
3c0e092e 1289 #[must_use]
85aaf69f 1290 #[stable(feature = "rust1", since = "1.0.0")]
92a42be0
SL
1291 pub fn is_empty(&self) -> bool {
1292 self.len() == 0
1293 }
1a4d82fc 1294
5099ac24
FG
1295 /// Clears the binary heap, returning an iterator over the removed elements
1296 /// in arbitrary order. If the iterator is dropped before being fully
1297 /// consumed, it drops the remaining elements in arbitrary order.
c34b1796 1298 ///
5099ac24
FG
1299 /// The returned iterator keeps a mutable borrow on the heap to optimize
1300 /// its implementation.
54a0048b
SL
1301 ///
1302 /// # Examples
1303 ///
1304 /// Basic usage:
1305 ///
1306 /// ```
1307 /// use std::collections::BinaryHeap;
5099ac24 1308 /// let mut heap = BinaryHeap::from([1, 3]);
54a0048b
SL
1309 ///
1310 /// assert!(!heap.is_empty());
1311 ///
1312 /// for x in heap.drain() {
5e7ed085 1313 /// println!("{x}");
54a0048b
SL
1314 /// }
1315 ///
1316 /// assert!(heap.is_empty());
1317 /// ```
1a4d82fc 1318 #[inline]
92a42be0 1319 #[stable(feature = "drain", since = "1.6.0")]
fe692bf9 1320 pub fn drain(&mut self) -> Drain<'_, T, A> {
d9579d0f 1321 Drain { iter: self.data.drain(..) }
1a4d82fc
JJ
1322 }
1323
1324 /// Drops all items from the binary heap.
54a0048b
SL
1325 ///
1326 /// # Examples
1327 ///
1328 /// Basic usage:
1329 ///
1330 /// ```
1331 /// use std::collections::BinaryHeap;
5099ac24 1332 /// let mut heap = BinaryHeap::from([1, 3]);
54a0048b
SL
1333 ///
1334 /// assert!(!heap.is_empty());
1335 ///
1336 /// heap.clear();
1337 ///
1338 /// assert!(heap.is_empty());
1339 /// ```
85aaf69f 1340 #[stable(feature = "rust1", since = "1.0.0")]
92a42be0
SL
1341 pub fn clear(&mut self) {
1342 self.drain();
1343 }
1a4d82fc
JJ
1344}
1345
0731742a 1346/// Hole represents a hole in a slice i.e., an index without valid value
d9579d0f
AL
1347/// (because it was moved from or duplicated).
1348/// In drop, `Hole` will restore the slice by filling the hole
1349/// position with the value that was originally removed.
1350struct Hole<'a, T: 'a> {
1351 data: &'a mut [T],
83c7162d 1352 elt: ManuallyDrop<T>,
d9579d0f
AL
1353 pos: usize,
1354}
1355
1356impl<'a, T> Hole<'a, T> {
9fa01778 1357 /// Create a new `Hole` at index `pos`.
9e0c209e
SL
1358 ///
1359 /// Unsafe because pos must be within the data slice.
1360 #[inline]
1361 unsafe fn new(data: &'a mut [T], pos: usize) -> Self {
1362 debug_assert!(pos < data.len());
9fa01778 1363 // SAFE: pos should be inside the slice
f035d41b 1364 let elt = unsafe { ptr::read(data.get_unchecked(pos)) };
dfeec247 1365 Hole { data, elt: ManuallyDrop::new(elt), pos }
d9579d0f
AL
1366 }
1367
9e0c209e 1368 #[inline]
92a42be0
SL
1369 fn pos(&self) -> usize {
1370 self.pos
1371 }
d9579d0f 1372
cc61c64b 1373 /// Returns a reference to the element removed.
9e0c209e 1374 #[inline]
92a42be0 1375 fn element(&self) -> &T {
83c7162d 1376 &self.elt
d9579d0f
AL
1377 }
1378
cc61c64b 1379 /// Returns a reference to the element at `index`.
d9579d0f 1380 ///
9e0c209e
SL
1381 /// Unsafe because index must be within the data slice and not equal to pos.
1382 #[inline]
d9579d0f
AL
1383 unsafe fn get(&self, index: usize) -> &T {
1384 debug_assert!(index != self.pos);
9e0c209e 1385 debug_assert!(index < self.data.len());
f035d41b 1386 unsafe { self.data.get_unchecked(index) }
d9579d0f
AL
1387 }
1388
1389 /// Move hole to new location
1390 ///
9e0c209e
SL
1391 /// Unsafe because index must be within the data slice and not equal to pos.
1392 #[inline]
d9579d0f
AL
1393 unsafe fn move_to(&mut self, index: usize) {
1394 debug_assert!(index != self.pos);
9e0c209e 1395 debug_assert!(index < self.data.len());
f035d41b 1396 unsafe {
29967ef6
XL
1397 let ptr = self.data.as_mut_ptr();
1398 let index_ptr: *const _ = ptr.add(index);
1399 let hole_ptr = ptr.add(self.pos);
f035d41b
XL
1400 ptr::copy_nonoverlapping(index_ptr, hole_ptr, 1);
1401 }
d9579d0f
AL
1402 self.pos = index;
1403 }
1404}
1405
9fa01778 1406impl<T> Drop for Hole<'_, T> {
9e0c209e 1407 #[inline]
d9579d0f
AL
1408 fn drop(&mut self) {
1409 // fill the hole again
1410 unsafe {
1411 let pos = self.pos;
83c7162d 1412 ptr::copy_nonoverlapping(&*self.elt, self.data.get_unchecked_mut(pos), 1);
d9579d0f
AL
1413 }
1414 }
1415}
1416
cc61c64b
XL
1417/// An iterator over the elements of a `BinaryHeap`.
1418///
1b1a35ee 1419/// This `struct` is created by [`BinaryHeap::iter()`]. See its
cc61c64b
XL
1420/// documentation for more.
1421///
1b1a35ee 1422/// [`iter`]: BinaryHeap::iter
3c0e092e 1423#[must_use = "iterators are lazy and do nothing unless consumed"]
85aaf69f 1424#[stable(feature = "rust1", since = "1.0.0")]
92a42be0 1425pub struct Iter<'a, T: 'a> {
1a4d82fc
JJ
1426 iter: slice::Iter<'a, T>,
1427}
1428
8bb4bdeb 1429#[stable(feature = "collection_debug", since = "1.17.0")]
9fa01778
XL
1430impl<T: fmt::Debug> fmt::Debug for Iter<'_, T> {
1431 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
dfeec247 1432 f.debug_tuple("Iter").field(&self.iter.as_slice()).finish()
8bb4bdeb
XL
1433 }
1434}
1435
ea8adc8c 1436// FIXME(#26925) Remove in favor of `#[derive(Clone)]`
85aaf69f 1437#[stable(feature = "rust1", since = "1.0.0")]
9fa01778
XL
1438impl<T> Clone for Iter<'_, T> {
1439 fn clone(&self) -> Self {
1a4d82fc
JJ
1440 Iter { iter: self.iter.clone() }
1441 }
1442}
1443
85aaf69f 1444#[stable(feature = "rust1", since = "1.0.0")]
1a4d82fc
JJ
1445impl<'a, T> Iterator for Iter<'a, T> {
1446 type Item = &'a T;
1447
1448 #[inline]
92a42be0
SL
1449 fn next(&mut self) -> Option<&'a T> {
1450 self.iter.next()
1451 }
1a4d82fc
JJ
1452
1453 #[inline]
92a42be0
SL
1454 fn size_hint(&self) -> (usize, Option<usize>) {
1455 self.iter.size_hint()
1456 }
416331ca
XL
1457
1458 #[inline]
1459 fn last(self) -> Option<&'a T> {
1460 self.iter.last()
1461 }
1a4d82fc
JJ
1462}
1463
85aaf69f 1464#[stable(feature = "rust1", since = "1.0.0")]
1a4d82fc
JJ
1465impl<'a, T> DoubleEndedIterator for Iter<'a, T> {
1466 #[inline]
92a42be0
SL
1467 fn next_back(&mut self) -> Option<&'a T> {
1468 self.iter.next_back()
1469 }
1a4d82fc
JJ
1470}
1471
85aaf69f 1472#[stable(feature = "rust1", since = "1.0.0")]
9fa01778 1473impl<T> ExactSizeIterator for Iter<'_, T> {
476ff2be
SL
1474 fn is_empty(&self) -> bool {
1475 self.iter.is_empty()
1476 }
1477}
1a4d82fc 1478
0531ce1d 1479#[stable(feature = "fused", since = "1.26.0")]
9fa01778 1480impl<T> FusedIterator for Iter<'_, T> {}
9e0c209e 1481
cc61c64b
XL
1482/// An owning iterator over the elements of a `BinaryHeap`.
1483///
1b1a35ee 1484/// This `struct` is created by [`BinaryHeap::into_iter()`]
c295e0f8 1485/// (provided by the [`IntoIterator`] trait). See its documentation for more.
cc61c64b 1486///
1b1a35ee 1487/// [`into_iter`]: BinaryHeap::into_iter
85aaf69f 1488#[stable(feature = "rust1", since = "1.0.0")]
a7813a04 1489#[derive(Clone)]
fe692bf9
FG
1490pub struct IntoIter<
1491 T,
1492 #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
1493> {
1494 iter: vec::IntoIter<T, A>,
1495}
1496
1497impl<T, A: Allocator> IntoIter<T, A> {
1498 /// Returns a reference to the underlying allocator.
1499 #[unstable(feature = "allocator_api", issue = "32838")]
1500 pub fn allocator(&self) -> &A {
1501 self.iter.allocator()
1502 }
1a4d82fc
JJ
1503}
1504
8bb4bdeb 1505#[stable(feature = "collection_debug", since = "1.17.0")]
fe692bf9 1506impl<T: fmt::Debug, A: Allocator> fmt::Debug for IntoIter<T, A> {
9fa01778 1507 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
dfeec247 1508 f.debug_tuple("IntoIter").field(&self.iter.as_slice()).finish()
8bb4bdeb
XL
1509 }
1510}
1511
85aaf69f 1512#[stable(feature = "rust1", since = "1.0.0")]
fe692bf9 1513impl<T, A: Allocator> Iterator for IntoIter<T, A> {
1a4d82fc
JJ
1514 type Item = T;
1515
1516 #[inline]
92a42be0
SL
1517 fn next(&mut self) -> Option<T> {
1518 self.iter.next()
1519 }
1a4d82fc
JJ
1520
1521 #[inline]
92a42be0
SL
1522 fn size_hint(&self) -> (usize, Option<usize>) {
1523 self.iter.size_hint()
1524 }
1a4d82fc
JJ
1525}
1526
85aaf69f 1527#[stable(feature = "rust1", since = "1.0.0")]
fe692bf9 1528impl<T, A: Allocator> DoubleEndedIterator for IntoIter<T, A> {
1a4d82fc 1529 #[inline]
92a42be0
SL
1530 fn next_back(&mut self) -> Option<T> {
1531 self.iter.next_back()
1532 }
1a4d82fc
JJ
1533}
1534
85aaf69f 1535#[stable(feature = "rust1", since = "1.0.0")]
fe692bf9 1536impl<T, A: Allocator> ExactSizeIterator for IntoIter<T, A> {
476ff2be
SL
1537 fn is_empty(&self) -> bool {
1538 self.iter.is_empty()
1539 }
1540}
1a4d82fc 1541
0531ce1d 1542#[stable(feature = "fused", since = "1.26.0")]
fe692bf9 1543impl<T, A: Allocator> FusedIterator for IntoIter<T, A> {}
9e0c209e 1544
353b0b11
FG
1545#[stable(feature = "default_iters", since = "1.70.0")]
1546impl<T> Default for IntoIter<T> {
1547 /// Creates an empty `binary_heap::IntoIter`.
1548 ///
1549 /// ```
1550 /// # use std::collections::binary_heap;
1551 /// let iter: binary_heap::IntoIter<u8> = Default::default();
1552 /// assert_eq!(iter.len(), 0);
1553 /// ```
1554 fn default() -> Self {
1555 IntoIter { iter: Default::default() }
1556 }
1557}
1558
5e7ed085
FG
1559// In addition to the SAFETY invariants of the following three unsafe traits
1560// also refer to the vec::in_place_collect module documentation to get an overview
1b1a35ee 1561#[unstable(issue = "none", feature = "inplace_iteration")]
17df50a5 1562#[doc(hidden)]
fe692bf9
FG
1563unsafe impl<T, A: Allocator> SourceIter for IntoIter<T, A> {
1564 type Source = IntoIter<T, A>;
1b1a35ee
XL
1565
1566 #[inline]
1567 unsafe fn as_inner(&mut self) -> &mut Self::Source {
1568 self
1569 }
1570}
1571
1572#[unstable(issue = "none", feature = "inplace_iteration")]
17df50a5 1573#[doc(hidden)]
fe692bf9 1574unsafe impl<I, A: Allocator> InPlaceIterable for IntoIter<I, A> {}
1b1a35ee 1575
5e7ed085 1576unsafe impl<I> AsVecIntoIter for IntoIter<I> {
1b1a35ee
XL
1577 type Item = I;
1578
1579 fn as_into_iter(&mut self) -> &mut vec::IntoIter<Self::Item> {
1580 &mut self.iter
1581 }
1582}
1583
3c0e092e 1584#[must_use = "iterators are lazy and do nothing unless consumed"]
e74abb32
XL
1585#[unstable(feature = "binary_heap_into_iter_sorted", issue = "59278")]
1586#[derive(Clone, Debug)]
fe692bf9
FG
1587pub struct IntoIterSorted<
1588 T,
1589 #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
1590> {
1591 inner: BinaryHeap<T, A>,
1592}
1593
1594impl<T, A: Allocator> IntoIterSorted<T, A> {
1595 /// Returns a reference to the underlying allocator.
1596 #[unstable(feature = "allocator_api", issue = "32838")]
1597 pub fn allocator(&self) -> &A {
1598 self.inner.allocator()
1599 }
e74abb32
XL
1600}
1601
1602#[unstable(feature = "binary_heap_into_iter_sorted", issue = "59278")]
fe692bf9 1603impl<T: Ord, A: Allocator> Iterator for IntoIterSorted<T, A> {
e74abb32
XL
1604 type Item = T;
1605
1606 #[inline]
1607 fn next(&mut self) -> Option<T> {
1608 self.inner.pop()
1609 }
1610
1611 #[inline]
1612 fn size_hint(&self) -> (usize, Option<usize>) {
1613 let exact = self.inner.len();
1614 (exact, Some(exact))
1615 }
1616}
1617
1618#[unstable(feature = "binary_heap_into_iter_sorted", issue = "59278")]
fe692bf9 1619impl<T: Ord, A: Allocator> ExactSizeIterator for IntoIterSorted<T, A> {}
e74abb32
XL
1620
1621#[unstable(feature = "binary_heap_into_iter_sorted", issue = "59278")]
fe692bf9 1622impl<T: Ord, A: Allocator> FusedIterator for IntoIterSorted<T, A> {}
e74abb32
XL
1623
1624#[unstable(feature = "trusted_len", issue = "37572")]
fe692bf9 1625unsafe impl<T: Ord, A: Allocator> TrustedLen for IntoIterSorted<T, A> {}
e74abb32 1626
cc61c64b
XL
1627/// A draining iterator over the elements of a `BinaryHeap`.
1628///
1b1a35ee 1629/// This `struct` is created by [`BinaryHeap::drain()`]. See its
cc61c64b
XL
1630/// documentation for more.
1631///
1b1a35ee 1632/// [`drain`]: BinaryHeap::drain
92a42be0 1633#[stable(feature = "drain", since = "1.6.0")]
8bb4bdeb 1634#[derive(Debug)]
fe692bf9
FG
1635pub struct Drain<
1636 'a,
1637 T: 'a,
1638 #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
1639> {
1640 iter: vec::Drain<'a, T, A>,
1641}
1642
1643impl<T, A: Allocator> Drain<'_, T, A> {
1644 /// Returns a reference to the underlying allocator.
1645 #[unstable(feature = "allocator_api", issue = "32838")]
1646 pub fn allocator(&self) -> &A {
1647 self.iter.allocator()
1648 }
1a4d82fc
JJ
1649}
1650
c30ab7b3 1651#[stable(feature = "drain", since = "1.6.0")]
fe692bf9 1652impl<T, A: Allocator> Iterator for Drain<'_, T, A> {
1a4d82fc
JJ
1653 type Item = T;
1654
1655 #[inline]
92a42be0
SL
1656 fn next(&mut self) -> Option<T> {
1657 self.iter.next()
1658 }
1a4d82fc
JJ
1659
1660 #[inline]
92a42be0
SL
1661 fn size_hint(&self) -> (usize, Option<usize>) {
1662 self.iter.size_hint()
1663 }
1a4d82fc
JJ
1664}
1665
c30ab7b3 1666#[stable(feature = "drain", since = "1.6.0")]
fe692bf9 1667impl<T, A: Allocator> DoubleEndedIterator for Drain<'_, T, A> {
1a4d82fc 1668 #[inline]
92a42be0
SL
1669 fn next_back(&mut self) -> Option<T> {
1670 self.iter.next_back()
1671 }
1a4d82fc
JJ
1672}
1673
c30ab7b3 1674#[stable(feature = "drain", since = "1.6.0")]
fe692bf9 1675impl<T, A: Allocator> ExactSizeIterator for Drain<'_, T, A> {
476ff2be
SL
1676 fn is_empty(&self) -> bool {
1677 self.iter.is_empty()
1678 }
1679}
1a4d82fc 1680
0531ce1d 1681#[stable(feature = "fused", since = "1.26.0")]
fe692bf9 1682impl<T, A: Allocator> FusedIterator for Drain<'_, T, A> {}
9e0c209e 1683
e74abb32
XL
1684/// A draining iterator over the elements of a `BinaryHeap`.
1685///
1b1a35ee 1686/// This `struct` is created by [`BinaryHeap::drain_sorted()`]. See its
e74abb32
XL
1687/// documentation for more.
1688///
1b1a35ee 1689/// [`drain_sorted`]: BinaryHeap::drain_sorted
e74abb32
XL
1690#[unstable(feature = "binary_heap_drain_sorted", issue = "59278")]
1691#[derive(Debug)]
fe692bf9
FG
1692pub struct DrainSorted<
1693 'a,
1694 T: Ord,
1695 #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
1696> {
1697 inner: &'a mut BinaryHeap<T, A>,
1698}
1699
1700impl<'a, T: Ord, A: Allocator> DrainSorted<'a, T, A> {
1701 /// Returns a reference to the underlying allocator.
1702 #[unstable(feature = "allocator_api", issue = "32838")]
1703 pub fn allocator(&self) -> &A {
1704 self.inner.allocator()
1705 }
e74abb32
XL
1706}
1707
1708#[unstable(feature = "binary_heap_drain_sorted", issue = "59278")]
fe692bf9 1709impl<'a, T: Ord, A: Allocator> Drop for DrainSorted<'a, T, A> {
e74abb32
XL
1710 /// Removes heap elements in heap order.
1711 fn drop(&mut self) {
fe692bf9 1712 struct DropGuard<'r, 'a, T: Ord, A: Allocator>(&'r mut DrainSorted<'a, T, A>);
74b04a01 1713
fe692bf9 1714 impl<'r, 'a, T: Ord, A: Allocator> Drop for DropGuard<'r, 'a, T, A> {
74b04a01 1715 fn drop(&mut self) {
f9f354fc 1716 while self.0.inner.pop().is_some() {}
74b04a01
XL
1717 }
1718 }
1719
1720 while let Some(item) = self.inner.pop() {
1721 let guard = DropGuard(self);
1722 drop(item);
1723 mem::forget(guard);
1724 }
e74abb32
XL
1725 }
1726}
1727
1728#[unstable(feature = "binary_heap_drain_sorted", issue = "59278")]
fe692bf9 1729impl<T: Ord, A: Allocator> Iterator for DrainSorted<'_, T, A> {
e74abb32
XL
1730 type Item = T;
1731
1732 #[inline]
1733 fn next(&mut self) -> Option<T> {
1734 self.inner.pop()
1735 }
1736
1737 #[inline]
1738 fn size_hint(&self) -> (usize, Option<usize>) {
1739 let exact = self.inner.len();
1740 (exact, Some(exact))
1741 }
1742}
1743
1744#[unstable(feature = "binary_heap_drain_sorted", issue = "59278")]
fe692bf9 1745impl<T: Ord, A: Allocator> ExactSizeIterator for DrainSorted<'_, T, A> {}
e74abb32
XL
1746
1747#[unstable(feature = "binary_heap_drain_sorted", issue = "59278")]
fe692bf9 1748impl<T: Ord, A: Allocator> FusedIterator for DrainSorted<'_, T, A> {}
e74abb32
XL
1749
1750#[unstable(feature = "trusted_len", issue = "37572")]
fe692bf9 1751unsafe impl<T: Ord, A: Allocator> TrustedLen for DrainSorted<'_, T, A> {}
e74abb32 1752
8bb4bdeb 1753#[stable(feature = "binary_heap_extras_15", since = "1.5.0")]
fe692bf9 1754impl<T: Ord, A: Allocator> From<Vec<T, A>> for BinaryHeap<T, A> {
e1599b0c
XL
1755 /// Converts a `Vec<T>` into a `BinaryHeap<T>`.
1756 ///
3dfed10e 1757 /// This conversion happens in-place, and has *O*(*n*) time complexity.
fe692bf9 1758 fn from(vec: Vec<T, A>) -> BinaryHeap<T, A> {
b039eaaf 1759 let mut heap = BinaryHeap { data: vec };
a7813a04 1760 heap.rebuild();
b039eaaf
SL
1761 heap
1762 }
1763}
94222f64
XL
1764
1765#[stable(feature = "std_collections_from_array", since = "1.56.0")]
1766impl<T: Ord, const N: usize> From<[T; N]> for BinaryHeap<T> {
1767 /// ```
1768 /// use std::collections::BinaryHeap;
1769 ///
1770 /// let mut h1 = BinaryHeap::from([1, 4, 2, 3]);
1771 /// let mut h2: BinaryHeap<_> = [1, 4, 2, 3].into();
1772 /// while let Some((a, b)) = h1.pop().zip(h2.pop()) {
1773 /// assert_eq!(a, b);
1774 /// }
1775 /// ```
1776 fn from(arr: [T; N]) -> Self {
a2a8927a 1777 Self::from_iter(arr)
94222f64
XL
1778 }
1779}
b039eaaf 1780
8bb4bdeb 1781#[stable(feature = "binary_heap_extras_15", since = "1.5.0")]
fe692bf9 1782impl<T, A: Allocator> From<BinaryHeap<T, A>> for Vec<T, A> {
1b1a35ee
XL
1783 /// Converts a `BinaryHeap<T>` into a `Vec<T>`.
1784 ///
1785 /// This conversion requires no data movement or allocation, and has
1786 /// constant time complexity.
fe692bf9 1787 fn from(heap: BinaryHeap<T, A>) -> Vec<T, A> {
b039eaaf
SL
1788 heap.data
1789 }
1790}
1791
85aaf69f 1792#[stable(feature = "rust1", since = "1.0.0")]
1a4d82fc 1793impl<T: Ord> FromIterator<T> for BinaryHeap<T> {
92a42be0 1794 fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> BinaryHeap<T> {
b039eaaf 1795 BinaryHeap::from(iter.into_iter().collect::<Vec<_>>())
85aaf69f
SL
1796 }
1797}
1798
1799#[stable(feature = "rust1", since = "1.0.0")]
fe692bf9 1800impl<T, A: Allocator> IntoIterator for BinaryHeap<T, A> {
85aaf69f 1801 type Item = T;
fe692bf9 1802 type IntoIter = IntoIter<T, A>;
85aaf69f 1803
9346a6ac
AL
1804 /// Creates a consuming iterator, that is, one that moves each value out of
1805 /// the binary heap in arbitrary order. The binary heap cannot be used
1806 /// after calling this.
1807 ///
1808 /// # Examples
1809 ///
54a0048b
SL
1810 /// Basic usage:
1811 ///
9346a6ac 1812 /// ```
9346a6ac 1813 /// use std::collections::BinaryHeap;
5099ac24 1814 /// let heap = BinaryHeap::from([1, 2, 3, 4]);
9346a6ac
AL
1815 ///
1816 /// // Print 1, 2, 3, 4 in arbitrary order
1817 /// for x in heap.into_iter() {
1818 /// // x has type i32, not &i32
5e7ed085 1819 /// println!("{x}");
9346a6ac
AL
1820 /// }
1821 /// ```
fe692bf9 1822 fn into_iter(self) -> IntoIter<T, A> {
9346a6ac 1823 IntoIter { iter: self.data.into_iter() }
85aaf69f
SL
1824 }
1825}
1826
1827#[stable(feature = "rust1", since = "1.0.0")]
fe692bf9 1828impl<'a, T, A: Allocator> IntoIterator for &'a BinaryHeap<T, A> {
85aaf69f
SL
1829 type Item = &'a T;
1830 type IntoIter = Iter<'a, T>;
1831
1832 fn into_iter(self) -> Iter<'a, T> {
1833 self.iter()
1a4d82fc
JJ
1834 }
1835}
1836
85aaf69f 1837#[stable(feature = "rust1", since = "1.0.0")]
fe692bf9 1838impl<T: Ord, A: Allocator> Extend<T> for BinaryHeap<T, A> {
a7813a04 1839 #[inline]
54a0048b 1840 fn extend<I: IntoIterator<Item = T>>(&mut self, iter: I) {
353b0b11
FG
1841 let guard = RebuildOnDrop { rebuild_from: self.len(), heap: self };
1842 guard.heap.data.extend(iter);
a7813a04 1843 }
f9f354fc
XL
1844
1845 #[inline]
1846 fn extend_one(&mut self, item: T) {
1847 self.push(item);
1848 }
1849
1850 #[inline]
1851 fn extend_reserve(&mut self, additional: usize) {
1852 self.reserve(additional);
1853 }
a7813a04
XL
1854}
1855
62682a34 1856#[stable(feature = "extend_ref", since = "1.2.0")]
fe692bf9 1857impl<'a, T: 'a + Ord + Copy, A: Allocator> Extend<&'a T> for BinaryHeap<T, A> {
92a42be0 1858 fn extend<I: IntoIterator<Item = &'a T>>(&mut self, iter: I) {
62682a34
SL
1859 self.extend(iter.into_iter().cloned());
1860 }
f9f354fc
XL
1861
1862 #[inline]
1863 fn extend_one(&mut self, &item: &'a T) {
1864 self.push(item);
1865 }
1866
1867 #[inline]
1868 fn extend_reserve(&mut self, additional: usize) {
1869 self.reserve(additional);
1870 }
62682a34 1871}