1 //! A priority queue implemented with a binary heap.
3 //! Insertion and popping the largest element have *O*(log(*n*)) time complexity.
4 //! Checking the largest element is *O*(1). Converting a vector to a binary heap
5 //! can be done in-place, and has *O*(*n*) complexity. A binary heap can also be
6 //! converted to a sorted vector in-place, allowing it to be used for an *O*(*n* \* log(*n*))
11 //! This is a larger example that implements [Dijkstra's algorithm][dijkstra]
12 //! to solve the [shortest path problem][sssp] on a [directed graph][dir_graph].
13 //! It shows how to use [`BinaryHeap`] with custom types.
15 //! [dijkstra]: https://en.wikipedia.org/wiki/Dijkstra%27s_algorithm
16 //! [sssp]: https://en.wikipedia.org/wiki/Shortest_path_problem
17 //! [dir_graph]: https://en.wikipedia.org/wiki/Directed_graph
20 //! use std::cmp::Ordering;
21 //! use std::collections::BinaryHeap;
23 //! #[derive(Copy, Clone, Eq, PartialEq)]
29 //! // The priority queue depends on `Ord`.
30 //! // Explicitly implement the trait so the queue becomes a min-heap
31 //! // instead of a max-heap.
32 //! impl Ord for State {
33 //! fn cmp(&self, other: &Self) -> Ordering {
34 //! // Notice that the we flip the ordering on costs.
35 //! // In case of a tie we compare positions - this step is necessary
36 //! // to make implementations of `PartialEq` and `Ord` consistent.
37 //! other.cost.cmp(&self.cost)
38 //! .then_with(|| self.position.cmp(&other.position))
42 //! // `PartialOrd` needs to be implemented as well.
43 //! impl PartialOrd for State {
44 //! fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
45 //! Some(self.cmp(other))
49 //! // Each node is represented as an `usize`, for a shorter implementation.
55 //! // Dijkstra's shortest path algorithm.
57 //! // Start at `start` and use `dist` to track the current shortest distance
58 //! // to each node. This implementation isn't memory-efficient as it may leave duplicate
59 //! // nodes in the queue. It also uses `usize::MAX` as a sentinel value,
60 //! // for a simpler implementation.
61 //! fn shortest_path(adj_list: &Vec<Vec<Edge>>, start: usize, goal: usize) -> Option<usize> {
62 //! // dist[node] = current shortest distance from `start` to `node`
63 //! let mut dist: Vec<_> = (0..adj_list.len()).map(|_| usize::MAX).collect();
65 //! let mut heap = BinaryHeap::new();
67 //! // We're at `start`, with a zero cost
69 //! heap.push(State { cost: 0, position: start });
71 //! // Examine the frontier with lower cost nodes first (min-heap)
72 //! while let Some(State { cost, position }) = heap.pop() {
73 //! // Alternatively we could have continued to find all shortest paths
74 //! if position == goal { return Some(cost); }
76 //! // Important as we may have already found a better way
77 //! if cost > dist[position] { continue; }
79 //! // For each node we can reach, see if we can find a way with
80 //! // a lower cost going through this node
81 //! for edge in &adj_list[position] {
82 //! let next = State { cost: cost + edge.cost, position: edge.node };
84 //! // If so, add it to the frontier and continue
85 //! if next.cost < dist[next.position] {
87 //! // Relaxation, we have now found a better way
88 //! dist[next.position] = next.cost;
93 //! // Goal not reachable
98 //! // This is the directed graph we're going to use.
99 //! // The node numbers correspond to the different states,
100 //! // and the edge weights symbolize the cost of moving
101 //! // from one node to another.
102 //! // Note that the edges are one-way.
105 //! // +-----------------+
108 //! // 0 -----> 1 -----> 3 ---> 4
112 //! // +------> 2 -------+ |
114 //! // +---------------+
116 //! // The graph is represented as an adjacency list where each index,
117 //! // corresponding to a node value, has a list of outgoing edges.
118 //! // Chosen for its efficiency.
119 //! let graph = vec![
121 //! vec![Edge { node: 2, cost: 10 },
122 //! Edge { node: 1, cost: 1 }],
124 //! vec![Edge { node: 3, cost: 2 }],
126 //! vec![Edge { node: 1, cost: 1 },
127 //! Edge { node: 3, cost: 3 },
128 //! Edge { node: 4, cost: 1 }],
130 //! vec![Edge { node: 0, cost: 7 },
131 //! Edge { node: 4, cost: 2 }],
135 //! assert_eq!(shortest_path(&graph, 0, 1), Some(1));
136 //! assert_eq!(shortest_path(&graph, 0, 3), Some(3));
137 //! assert_eq!(shortest_path(&graph, 3, 0), Some(7));
138 //! assert_eq!(shortest_path(&graph, 0, 4), Some(5));
139 //! assert_eq!(shortest_path(&graph, 4, 0), None);
143 #![allow(missing_docs)]
144 #![stable(feature = "rust1", since = "1.0.0")]
147 use core
::iter
::{FromIterator, FusedIterator, InPlaceIterable, SourceIter, TrustedLen}
;
148 use core
::mem
::{self, swap, ManuallyDrop}
;
149 use core
::ops
::{Deref, DerefMut}
;
153 use crate::vec
::{self, AsIntoIter, Vec}
;
155 use super::SpecExtend
;
157 /// A priority queue implemented with a binary heap.
159 /// This will be a max-heap.
161 /// It is a logic error for an item to be modified in such a way that the
162 /// item's ordering relative to any other item, as determined by the `Ord`
163 /// trait, changes while it is in the heap. This is normally only possible
164 /// through `Cell`, `RefCell`, global state, I/O, or unsafe code.
169 /// use std::collections::BinaryHeap;
171 /// // Type inference lets us omit an explicit type signature (which
172 /// // would be `BinaryHeap<i32>` in this example).
173 /// let mut heap = BinaryHeap::new();
175 /// // We can use peek to look at the next item in the heap. In this case,
176 /// // there's no items in there yet so we get None.
177 /// assert_eq!(heap.peek(), None);
179 /// // Let's add some scores...
184 /// // Now peek shows the most important item in the heap.
185 /// assert_eq!(heap.peek(), Some(&5));
187 /// // We can check the length of a heap.
188 /// assert_eq!(heap.len(), 3);
190 /// // We can iterate over the items in the heap, although they are returned in
191 /// // a random order.
193 /// println!("{}", x);
196 /// // If we instead pop these scores, they should come back in order.
197 /// assert_eq!(heap.pop(), Some(5));
198 /// assert_eq!(heap.pop(), Some(2));
199 /// assert_eq!(heap.pop(), Some(1));
200 /// assert_eq!(heap.pop(), None);
202 /// // We can clear the heap of any remaining items.
205 /// // The heap should now be empty.
206 /// assert!(heap.is_empty())
211 /// Either `std::cmp::Reverse` or a custom `Ord` implementation can be used to
212 /// make `BinaryHeap` a min-heap. This makes `heap.pop()` return the smallest
213 /// value instead of the greatest one.
216 /// use std::collections::BinaryHeap;
217 /// use std::cmp::Reverse;
219 /// let mut heap = BinaryHeap::new();
221 /// // Wrap values in `Reverse`
222 /// heap.push(Reverse(1));
223 /// heap.push(Reverse(5));
224 /// heap.push(Reverse(2));
226 /// // If we pop these scores now, they should come back in the reverse order.
227 /// assert_eq!(heap.pop(), Some(Reverse(1)));
228 /// assert_eq!(heap.pop(), Some(Reverse(2)));
229 /// assert_eq!(heap.pop(), Some(Reverse(5)));
230 /// assert_eq!(heap.pop(), None);
233 /// # Time complexity
235 /// | [push] | [pop] | [peek]/[peek\_mut] |
236 /// |--------|-----------|--------------------|
237 /// | O(1)~ | *O*(log(*n*)) | *O*(1) |
239 /// The value for `push` is an expected cost; the method documentation gives a
240 /// more detailed analysis.
242 /// [push]: BinaryHeap::push
243 /// [pop]: BinaryHeap::pop
244 /// [peek]: BinaryHeap::peek
245 /// [peek\_mut]: BinaryHeap::peek_mut
246 #[stable(feature = "rust1", since = "1.0.0")]
247 pub struct BinaryHeap
<T
> {
251 /// Structure wrapping a mutable reference to the greatest item on a
254 /// This `struct` is created by the [`peek_mut`] method on [`BinaryHeap`]. See
255 /// its documentation for more.
257 /// [`peek_mut`]: BinaryHeap::peek_mut
258 #[stable(feature = "binary_heap_peek_mut", since = "1.12.0")]
259 pub struct PeekMut
<'a
, T
: 'a
+ Ord
> {
260 heap
: &'a
mut BinaryHeap
<T
>,
264 #[stable(feature = "collection_debug", since = "1.17.0")]
265 impl<T
: Ord
+ fmt
::Debug
> fmt
::Debug
for PeekMut
<'_
, T
> {
266 fn fmt(&self, f
: &mut fmt
::Formatter
<'_
>) -> fmt
::Result
{
267 f
.debug_tuple("PeekMut").field(&self.heap
.data
[0]).finish()
271 #[stable(feature = "binary_heap_peek_mut", since = "1.12.0")]
272 impl<T
: Ord
> Drop
for PeekMut
<'_
, T
> {
275 self.heap
.sift_down(0);
280 #[stable(feature = "binary_heap_peek_mut", since = "1.12.0")]
281 impl<T
: Ord
> Deref
for PeekMut
<'_
, T
> {
283 fn deref(&self) -> &T
{
284 debug_assert
!(!self.heap
.is_empty());
285 // SAFE: PeekMut is only instantiated for non-empty heaps
286 unsafe { self.heap.data.get_unchecked(0) }
290 #[stable(feature = "binary_heap_peek_mut", since = "1.12.0")]
291 impl<T
: Ord
> DerefMut
for PeekMut
<'_
, T
> {
292 fn deref_mut(&mut self) -> &mut T
{
293 debug_assert
!(!self.heap
.is_empty());
295 // SAFE: PeekMut is only instantiated for non-empty heaps
296 unsafe { self.heap.data.get_unchecked_mut(0) }
300 impl<'a
, T
: Ord
> PeekMut
<'a
, T
> {
301 /// Removes the peeked value from the heap and returns it.
302 #[stable(feature = "binary_heap_peek_mut_pop", since = "1.18.0")]
303 pub fn pop(mut this
: PeekMut
<'a
, T
>) -> T
{
304 let value
= this
.heap
.pop().unwrap();
310 #[stable(feature = "rust1", since = "1.0.0")]
311 impl<T
: Clone
> Clone
for BinaryHeap
<T
> {
312 fn clone(&self) -> Self {
313 BinaryHeap { data: self.data.clone() }
316 fn clone_from(&mut self, source
: &Self) {
317 self.data
.clone_from(&source
.data
);
321 #[stable(feature = "rust1", since = "1.0.0")]
322 impl<T
: Ord
> Default
for BinaryHeap
<T
> {
323 /// Creates an empty `BinaryHeap<T>`.
325 fn default() -> BinaryHeap
<T
> {
330 #[stable(feature = "binaryheap_debug", since = "1.4.0")]
331 impl<T
: fmt
::Debug
> fmt
::Debug
for BinaryHeap
<T
> {
332 fn fmt(&self, f
: &mut fmt
::Formatter
<'_
>) -> fmt
::Result
{
333 f
.debug_list().entries(self.iter()).finish()
337 impl<T
: Ord
> BinaryHeap
<T
> {
338 /// Creates an empty `BinaryHeap` as a max-heap.
345 /// use std::collections::BinaryHeap;
346 /// let mut heap = BinaryHeap::new();
349 #[stable(feature = "rust1", since = "1.0.0")]
350 pub fn new() -> BinaryHeap
<T
> {
351 BinaryHeap { data: vec![] }
354 /// Creates an empty `BinaryHeap` with a specific capacity.
355 /// This preallocates enough memory for `capacity` elements,
356 /// so that the `BinaryHeap` does not have to be reallocated
357 /// until it contains at least that many values.
364 /// use std::collections::BinaryHeap;
365 /// let mut heap = BinaryHeap::with_capacity(10);
368 #[stable(feature = "rust1", since = "1.0.0")]
369 pub fn with_capacity(capacity
: usize) -> BinaryHeap
<T
> {
370 BinaryHeap { data: Vec::with_capacity(capacity) }
373 /// Returns a mutable reference to the greatest item in the binary heap, or
374 /// `None` if it is empty.
376 /// Note: If the `PeekMut` value is leaked, the heap may be in an
377 /// inconsistent state.
384 /// use std::collections::BinaryHeap;
385 /// let mut heap = BinaryHeap::new();
386 /// assert!(heap.peek_mut().is_none());
392 /// let mut val = heap.peek_mut().unwrap();
395 /// assert_eq!(heap.peek(), Some(&2));
398 /// # Time complexity
400 /// If the item is modified then the worst case time complexity is *O*(log(*n*)),
401 /// otherwise it's *O*(1).
402 #[stable(feature = "binary_heap_peek_mut", since = "1.12.0")]
403 pub fn peek_mut(&mut self) -> Option
<PeekMut
<'_
, T
>> {
404 if self.is_empty() { None }
else { Some(PeekMut { heap: self, sift: false }
) }
407 /// Removes the greatest item from the binary heap and returns it, or `None` if it
415 /// use std::collections::BinaryHeap;
416 /// let mut heap = BinaryHeap::from(vec![1, 3]);
418 /// assert_eq!(heap.pop(), Some(3));
419 /// assert_eq!(heap.pop(), Some(1));
420 /// assert_eq!(heap.pop(), None);
423 /// # Time complexity
425 /// The worst case cost of `pop` on a heap containing *n* elements is *O*(log(*n*)).
426 #[stable(feature = "rust1", since = "1.0.0")]
427 pub fn pop(&mut self) -> Option
<T
> {
428 self.data
.pop().map(|mut item
| {
429 if !self.is_empty() {
430 swap(&mut item
, &mut self.data
[0]);
431 self.sift_down_to_bottom(0);
437 /// Pushes an item onto the binary heap.
444 /// use std::collections::BinaryHeap;
445 /// let mut heap = BinaryHeap::new();
450 /// assert_eq!(heap.len(), 3);
451 /// assert_eq!(heap.peek(), Some(&5));
454 /// # Time complexity
456 /// The expected cost of `push`, averaged over every possible ordering of
457 /// the elements being pushed, and over a sufficiently large number of
458 /// pushes, is *O*(1). This is the most meaningful cost metric when pushing
459 /// elements that are *not* already in any sorted pattern.
461 /// The time complexity degrades if elements are pushed in predominantly
462 /// ascending order. In the worst case, elements are pushed in ascending
463 /// sorted order and the amortized cost per push is *O*(log(*n*)) against a heap
464 /// containing *n* elements.
466 /// The worst case cost of a *single* call to `push` is *O*(*n*). The worst case
467 /// occurs when capacity is exhausted and needs a resize. The resize cost
468 /// has been amortized in the previous figures.
469 #[stable(feature = "rust1", since = "1.0.0")]
470 pub fn push(&mut self, item
: T
) {
471 let old_len
= self.len();
472 self.data
.push(item
);
473 self.sift_up(0, old_len
);
476 /// Consumes the `BinaryHeap` and returns a vector in sorted
477 /// (ascending) order.
484 /// use std::collections::BinaryHeap;
486 /// let mut heap = BinaryHeap::from(vec![1, 2, 4, 5, 7]);
490 /// let vec = heap.into_sorted_vec();
491 /// assert_eq!(vec, [1, 2, 3, 4, 5, 6, 7]);
493 #[stable(feature = "binary_heap_extras_15", since = "1.5.0")]
494 pub fn into_sorted_vec(mut self) -> Vec
<T
> {
495 let mut end
= self.len();
498 self.data
.swap(0, end
);
499 self.sift_down_range(0, end
);
504 // The implementations of sift_up and sift_down use unsafe blocks in
505 // order to move an element out of the vector (leaving behind a
506 // hole), shift along the others and move the removed element back into the
507 // vector at the final location of the hole.
508 // The `Hole` type is used to represent this, and make sure
509 // the hole is filled back at the end of its scope, even on panic.
510 // Using a hole reduces the constant factor compared to using swaps,
511 // which involves twice as many moves.
512 fn sift_up(&mut self, start
: usize, pos
: usize) -> usize {
514 // Take out the value at `pos` and create a hole.
515 let mut hole
= Hole
::new(&mut self.data
, pos
);
517 while hole
.pos() > start
{
518 let parent
= (hole
.pos() - 1) / 2;
519 if hole
.element() <= hole
.get(parent
) {
522 hole
.move_to(parent
);
528 /// Take an element at `pos` and move it down the heap,
529 /// while its children are larger.
530 fn sift_down_range(&mut self, pos
: usize, end
: usize) {
532 let mut hole
= Hole
::new(&mut self.data
, pos
);
533 let mut child
= 2 * pos
+ 1;
535 let right
= child
+ 1;
536 // compare with the greater of the two children
537 if right
< end
&& hole
.get(child
) <= hole
.get(right
) {
540 // if we are already in order, stop.
541 if hole
.element() >= hole
.get(child
) {
545 child
= 2 * hole
.pos() + 1;
550 fn sift_down(&mut self, pos
: usize) {
551 let len
= self.len();
552 self.sift_down_range(pos
, len
);
555 /// Take an element at `pos` and move it all the way down the heap,
556 /// then sift it up to its position.
558 /// Note: This is faster when the element is known to be large / should
559 /// be closer to the bottom.
560 fn sift_down_to_bottom(&mut self, mut pos
: usize) {
561 let end
= self.len();
564 let mut hole
= Hole
::new(&mut self.data
, pos
);
565 let mut child
= 2 * pos
+ 1;
567 let right
= child
+ 1;
568 // compare with the greater of the two children
569 if right
< end
&& hole
.get(child
) <= hole
.get(right
) {
573 child
= 2 * hole
.pos() + 1;
577 self.sift_up(start
, pos
);
580 fn rebuild(&mut self) {
581 let mut n
= self.len() / 2;
588 /// Moves all the elements of `other` into `self`, leaving `other` empty.
595 /// use std::collections::BinaryHeap;
597 /// let v = vec![-10, 1, 2, 3, 3];
598 /// let mut a = BinaryHeap::from(v);
600 /// let v = vec![-20, 5, 43];
601 /// let mut b = BinaryHeap::from(v);
603 /// a.append(&mut b);
605 /// assert_eq!(a.into_sorted_vec(), [-20, -10, 1, 2, 3, 3, 5, 43]);
606 /// assert!(b.is_empty());
608 #[stable(feature = "binary_heap_append", since = "1.11.0")]
609 pub fn append(&mut self, other
: &mut Self) {
610 if self.len() < other
.len() {
614 if other
.is_empty() {
619 fn log2_fast(x
: usize) -> usize {
620 (usize::BITS
- x
.leading_zeros() - 1) as usize
623 // `rebuild` takes O(len1 + len2) operations
624 // and about 2 * (len1 + len2) comparisons in the worst case
625 // while `extend` takes O(len2 * log(len1)) operations
626 // and about 1 * len2 * log_2(len1) comparisons in the worst case,
627 // assuming len1 >= len2.
629 fn better_to_rebuild(len1
: usize, len2
: usize) -> bool
{
630 2 * (len1
+ len2
) < len2
* log2_fast(len1
)
633 if better_to_rebuild(self.len(), other
.len()) {
634 self.data
.append(&mut other
.data
);
637 self.extend(other
.drain());
641 /// Returns an iterator which retrieves elements in heap order.
642 /// The retrieved elements are removed from the original heap.
643 /// The remaining elements will be removed on drop in heap order.
646 /// * `.drain_sorted()` is *O*(*n* \* log(*n*)); much slower than `.drain()`.
647 /// You should use the latter for most cases.
654 /// #![feature(binary_heap_drain_sorted)]
655 /// use std::collections::BinaryHeap;
657 /// let mut heap = BinaryHeap::from(vec![1, 2, 3, 4, 5]);
658 /// assert_eq!(heap.len(), 5);
660 /// drop(heap.drain_sorted()); // removes all elements in heap order
661 /// assert_eq!(heap.len(), 0);
664 #[unstable(feature = "binary_heap_drain_sorted", issue = "59278")]
665 pub fn drain_sorted(&mut self) -> DrainSorted
<'_
, T
> {
666 DrainSorted { inner: self }
669 /// Retains only the elements specified by the predicate.
671 /// In other words, remove all elements `e` such that `f(&e)` returns
672 /// `false`. The elements are visited in unsorted (and unspecified) order.
679 /// #![feature(binary_heap_retain)]
680 /// use std::collections::BinaryHeap;
682 /// let mut heap = BinaryHeap::from(vec![-10, -5, 1, 2, 4, 13]);
684 /// heap.retain(|x| x % 2 == 0); // only keep even numbers
686 /// assert_eq!(heap.into_sorted_vec(), [-10, 2, 4])
688 #[unstable(feature = "binary_heap_retain", issue = "71503")]
689 pub fn retain
<F
>(&mut self, f
: F
)
691 F
: FnMut(&T
) -> bool
,
698 impl<T
> BinaryHeap
<T
> {
699 /// Returns an iterator visiting all values in the underlying vector, in
707 /// use std::collections::BinaryHeap;
708 /// let heap = BinaryHeap::from(vec![1, 2, 3, 4]);
710 /// // Print 1, 2, 3, 4 in arbitrary order
711 /// for x in heap.iter() {
712 /// println!("{}", x);
715 #[stable(feature = "rust1", since = "1.0.0")]
716 pub fn iter(&self) -> Iter
<'_
, T
> {
717 Iter { iter: self.data.iter() }
720 /// Returns an iterator which retrieves elements in heap order.
721 /// This method consumes the original heap.
728 /// #![feature(binary_heap_into_iter_sorted)]
729 /// use std::collections::BinaryHeap;
730 /// let heap = BinaryHeap::from(vec![1, 2, 3, 4, 5]);
732 /// assert_eq!(heap.into_iter_sorted().take(2).collect::<Vec<_>>(), vec![5, 4]);
734 #[unstable(feature = "binary_heap_into_iter_sorted", issue = "59278")]
735 pub fn into_iter_sorted(self) -> IntoIterSorted
<T
> {
736 IntoIterSorted { inner: self }
739 /// Returns the greatest item in the binary heap, or `None` if it is empty.
746 /// use std::collections::BinaryHeap;
747 /// let mut heap = BinaryHeap::new();
748 /// assert_eq!(heap.peek(), None);
753 /// assert_eq!(heap.peek(), Some(&5));
757 /// # Time complexity
759 /// Cost is *O*(1) in the worst case.
760 #[stable(feature = "rust1", since = "1.0.0")]
761 pub fn peek(&self) -> Option
<&T
> {
765 /// Returns the number of elements the binary heap can hold without reallocating.
772 /// use std::collections::BinaryHeap;
773 /// let mut heap = BinaryHeap::with_capacity(100);
774 /// assert!(heap.capacity() >= 100);
777 #[stable(feature = "rust1", since = "1.0.0")]
778 pub fn capacity(&self) -> usize {
782 /// Reserves the minimum capacity for exactly `additional` more elements to be inserted in the
783 /// given `BinaryHeap`. Does nothing if the capacity is already sufficient.
785 /// Note that the allocator may give the collection more space than it requests. Therefore
786 /// capacity can not be relied upon to be precisely minimal. Prefer [`reserve`] if future
787 /// insertions are expected.
791 /// Panics if the new capacity overflows `usize`.
798 /// use std::collections::BinaryHeap;
799 /// let mut heap = BinaryHeap::new();
800 /// heap.reserve_exact(100);
801 /// assert!(heap.capacity() >= 100);
805 /// [`reserve`]: BinaryHeap::reserve
806 #[stable(feature = "rust1", since = "1.0.0")]
807 pub fn reserve_exact(&mut self, additional
: usize) {
808 self.data
.reserve_exact(additional
);
811 /// Reserves capacity for at least `additional` more elements to be inserted in the
812 /// `BinaryHeap`. The collection may reserve more space to avoid frequent reallocations.
816 /// Panics if the new capacity overflows `usize`.
823 /// use std::collections::BinaryHeap;
824 /// let mut heap = BinaryHeap::new();
825 /// heap.reserve(100);
826 /// assert!(heap.capacity() >= 100);
829 #[stable(feature = "rust1", since = "1.0.0")]
830 pub fn reserve(&mut self, additional
: usize) {
831 self.data
.reserve(additional
);
834 /// Discards as much additional capacity as possible.
841 /// use std::collections::BinaryHeap;
842 /// let mut heap: BinaryHeap<i32> = BinaryHeap::with_capacity(100);
844 /// assert!(heap.capacity() >= 100);
845 /// heap.shrink_to_fit();
846 /// assert!(heap.capacity() == 0);
848 #[stable(feature = "rust1", since = "1.0.0")]
849 pub fn shrink_to_fit(&mut self) {
850 self.data
.shrink_to_fit();
853 /// Discards capacity with a lower bound.
855 /// The capacity will remain at least as large as both the length
856 /// and the supplied value.
858 /// Panics if the current capacity is smaller than the supplied
859 /// minimum capacity.
864 /// #![feature(shrink_to)]
865 /// use std::collections::BinaryHeap;
866 /// let mut heap: BinaryHeap<i32> = BinaryHeap::with_capacity(100);
868 /// assert!(heap.capacity() >= 100);
869 /// heap.shrink_to(10);
870 /// assert!(heap.capacity() >= 10);
873 #[unstable(feature = "shrink_to", reason = "new API", issue = "56431")]
874 pub fn shrink_to(&mut self, min_capacity
: usize) {
875 self.data
.shrink_to(min_capacity
)
878 /// Consumes the `BinaryHeap` and returns the underlying vector
879 /// in arbitrary order.
886 /// use std::collections::BinaryHeap;
887 /// let heap = BinaryHeap::from(vec![1, 2, 3, 4, 5, 6, 7]);
888 /// let vec = heap.into_vec();
890 /// // Will print in some order
892 /// println!("{}", x);
895 #[stable(feature = "binary_heap_extras_15", since = "1.5.0")]
896 pub fn into_vec(self) -> Vec
<T
> {
900 /// Returns the length of the binary heap.
907 /// use std::collections::BinaryHeap;
908 /// let heap = BinaryHeap::from(vec![1, 3]);
910 /// assert_eq!(heap.len(), 2);
912 #[stable(feature = "rust1", since = "1.0.0")]
913 pub fn len(&self) -> usize {
917 /// Checks if the binary heap is empty.
924 /// use std::collections::BinaryHeap;
925 /// let mut heap = BinaryHeap::new();
927 /// assert!(heap.is_empty());
933 /// assert!(!heap.is_empty());
935 #[stable(feature = "rust1", since = "1.0.0")]
936 pub fn is_empty(&self) -> bool
{
940 /// Clears the binary heap, returning an iterator over the removed elements.
942 /// The elements are removed in arbitrary order.
949 /// use std::collections::BinaryHeap;
950 /// let mut heap = BinaryHeap::from(vec![1, 3]);
952 /// assert!(!heap.is_empty());
954 /// for x in heap.drain() {
955 /// println!("{}", x);
958 /// assert!(heap.is_empty());
961 #[stable(feature = "drain", since = "1.6.0")]
962 pub fn drain(&mut self) -> Drain
<'_
, T
> {
963 Drain { iter: self.data.drain(..) }
966 /// Drops all items from the binary heap.
973 /// use std::collections::BinaryHeap;
974 /// let mut heap = BinaryHeap::from(vec![1, 3]);
976 /// assert!(!heap.is_empty());
980 /// assert!(heap.is_empty());
982 #[stable(feature = "rust1", since = "1.0.0")]
983 pub fn clear(&mut self) {
988 /// Hole represents a hole in a slice i.e., an index without valid value
989 /// (because it was moved from or duplicated).
990 /// In drop, `Hole` will restore the slice by filling the hole
991 /// position with the value that was originally removed.
992 struct Hole
<'a
, T
: 'a
> {
994 elt
: ManuallyDrop
<T
>,
998 impl<'a
, T
> Hole
<'a
, T
> {
999 /// Create a new `Hole` at index `pos`.
1001 /// Unsafe because pos must be within the data slice.
1003 unsafe fn new(data
: &'a
mut [T
], pos
: usize) -> Self {
1004 debug_assert
!(pos
< data
.len());
1005 // SAFE: pos should be inside the slice
1006 let elt
= unsafe { ptr::read(data.get_unchecked(pos)) }
;
1007 Hole { data, elt: ManuallyDrop::new(elt), pos }
1011 fn pos(&self) -> usize {
1015 /// Returns a reference to the element removed.
1017 fn element(&self) -> &T
{
1021 /// Returns a reference to the element at `index`.
1023 /// Unsafe because index must be within the data slice and not equal to pos.
1025 unsafe fn get(&self, index
: usize) -> &T
{
1026 debug_assert
!(index
!= self.pos
);
1027 debug_assert
!(index
< self.data
.len());
1028 unsafe { self.data.get_unchecked(index) }
1031 /// Move hole to new location
1033 /// Unsafe because index must be within the data slice and not equal to pos.
1035 unsafe fn move_to(&mut self, index
: usize) {
1036 debug_assert
!(index
!= self.pos
);
1037 debug_assert
!(index
< self.data
.len());
1039 let index_ptr
: *const _
= self.data
.get_unchecked(index
);
1040 let hole_ptr
= self.data
.get_unchecked_mut(self.pos
);
1041 ptr
::copy_nonoverlapping(index_ptr
, hole_ptr
, 1);
1047 impl<T
> Drop
for Hole
<'_
, T
> {
1049 fn drop(&mut self) {
1050 // fill the hole again
1053 ptr
::copy_nonoverlapping(&*self.elt
, self.data
.get_unchecked_mut(pos
), 1);
1058 /// An iterator over the elements of a `BinaryHeap`.
1060 /// This `struct` is created by [`BinaryHeap::iter()`]. See its
1061 /// documentation for more.
1063 /// [`iter`]: BinaryHeap::iter
1064 #[stable(feature = "rust1", since = "1.0.0")]
1065 pub struct Iter
<'a
, T
: 'a
> {
1066 iter
: slice
::Iter
<'a
, T
>,
1069 #[stable(feature = "collection_debug", since = "1.17.0")]
1070 impl<T
: fmt
::Debug
> fmt
::Debug
for Iter
<'_
, T
> {
1071 fn fmt(&self, f
: &mut fmt
::Formatter
<'_
>) -> fmt
::Result
{
1072 f
.debug_tuple("Iter").field(&self.iter
.as_slice()).finish()
1076 // FIXME(#26925) Remove in favor of `#[derive(Clone)]`
1077 #[stable(feature = "rust1", since = "1.0.0")]
1078 impl<T
> Clone
for Iter
<'_
, T
> {
1079 fn clone(&self) -> Self {
1080 Iter { iter: self.iter.clone() }
1084 #[stable(feature = "rust1", since = "1.0.0")]
1085 impl<'a
, T
> Iterator
for Iter
<'a
, T
> {
1089 fn next(&mut self) -> Option
<&'a T
> {
1094 fn size_hint(&self) -> (usize, Option
<usize>) {
1095 self.iter
.size_hint()
1099 fn last(self) -> Option
<&'a T
> {
1104 #[stable(feature = "rust1", since = "1.0.0")]
1105 impl<'a
, T
> DoubleEndedIterator
for Iter
<'a
, T
> {
1107 fn next_back(&mut self) -> Option
<&'a T
> {
1108 self.iter
.next_back()
1112 #[stable(feature = "rust1", since = "1.0.0")]
1113 impl<T
> ExactSizeIterator
for Iter
<'_
, T
> {
1114 fn is_empty(&self) -> bool
{
1115 self.iter
.is_empty()
1119 #[stable(feature = "fused", since = "1.26.0")]
1120 impl<T
> FusedIterator
for Iter
<'_
, T
> {}
1122 /// An owning iterator over the elements of a `BinaryHeap`.
1124 /// This `struct` is created by [`BinaryHeap::into_iter()`]
1125 /// (provided by the `IntoIterator` trait). See its documentation for more.
1127 /// [`into_iter`]: BinaryHeap::into_iter
1128 #[stable(feature = "rust1", since = "1.0.0")]
1130 pub struct IntoIter
<T
> {
1131 iter
: vec
::IntoIter
<T
>,
1134 #[stable(feature = "collection_debug", since = "1.17.0")]
1135 impl<T
: fmt
::Debug
> fmt
::Debug
for IntoIter
<T
> {
1136 fn fmt(&self, f
: &mut fmt
::Formatter
<'_
>) -> fmt
::Result
{
1137 f
.debug_tuple("IntoIter").field(&self.iter
.as_slice()).finish()
1141 #[stable(feature = "rust1", since = "1.0.0")]
1142 impl<T
> Iterator
for IntoIter
<T
> {
1146 fn next(&mut self) -> Option
<T
> {
1151 fn size_hint(&self) -> (usize, Option
<usize>) {
1152 self.iter
.size_hint()
1156 #[stable(feature = "rust1", since = "1.0.0")]
1157 impl<T
> DoubleEndedIterator
for IntoIter
<T
> {
1159 fn next_back(&mut self) -> Option
<T
> {
1160 self.iter
.next_back()
1164 #[stable(feature = "rust1", since = "1.0.0")]
1165 impl<T
> ExactSizeIterator
for IntoIter
<T
> {
1166 fn is_empty(&self) -> bool
{
1167 self.iter
.is_empty()
1171 #[stable(feature = "fused", since = "1.26.0")]
1172 impl<T
> FusedIterator
for IntoIter
<T
> {}
1174 #[unstable(issue = "none", feature = "inplace_iteration")]
1175 unsafe impl<T
> SourceIter
for IntoIter
<T
> {
1176 type Source
= IntoIter
<T
>;
1179 unsafe fn as_inner(&mut self) -> &mut Self::Source
{
1184 #[unstable(issue = "none", feature = "inplace_iteration")]
1185 unsafe impl<I
> InPlaceIterable
for IntoIter
<I
> {}
1187 impl<I
> AsIntoIter
for IntoIter
<I
> {
1190 fn as_into_iter(&mut self) -> &mut vec
::IntoIter
<Self::Item
> {
1195 #[unstable(feature = "binary_heap_into_iter_sorted", issue = "59278")]
1196 #[derive(Clone, Debug)]
1197 pub struct IntoIterSorted
<T
> {
1198 inner
: BinaryHeap
<T
>,
1201 #[unstable(feature = "binary_heap_into_iter_sorted", issue = "59278")]
1202 impl<T
: Ord
> Iterator
for IntoIterSorted
<T
> {
1206 fn next(&mut self) -> Option
<T
> {
1211 fn size_hint(&self) -> (usize, Option
<usize>) {
1212 let exact
= self.inner
.len();
1213 (exact
, Some(exact
))
1217 #[unstable(feature = "binary_heap_into_iter_sorted", issue = "59278")]
1218 impl<T
: Ord
> ExactSizeIterator
for IntoIterSorted
<T
> {}
1220 #[unstable(feature = "binary_heap_into_iter_sorted", issue = "59278")]
1221 impl<T
: Ord
> FusedIterator
for IntoIterSorted
<T
> {}
1223 #[unstable(feature = "trusted_len", issue = "37572")]
1224 unsafe impl<T
: Ord
> TrustedLen
for IntoIterSorted
<T
> {}
1226 /// A draining iterator over the elements of a `BinaryHeap`.
1228 /// This `struct` is created by [`BinaryHeap::drain()`]. See its
1229 /// documentation for more.
1231 /// [`drain`]: BinaryHeap::drain
1232 #[stable(feature = "drain", since = "1.6.0")]
1234 pub struct Drain
<'a
, T
: 'a
> {
1235 iter
: vec
::Drain
<'a
, T
>,
1238 #[stable(feature = "drain", since = "1.6.0")]
1239 impl<T
> Iterator
for Drain
<'_
, T
> {
1243 fn next(&mut self) -> Option
<T
> {
1248 fn size_hint(&self) -> (usize, Option
<usize>) {
1249 self.iter
.size_hint()
1253 #[stable(feature = "drain", since = "1.6.0")]
1254 impl<T
> DoubleEndedIterator
for Drain
<'_
, T
> {
1256 fn next_back(&mut self) -> Option
<T
> {
1257 self.iter
.next_back()
1261 #[stable(feature = "drain", since = "1.6.0")]
1262 impl<T
> ExactSizeIterator
for Drain
<'_
, T
> {
1263 fn is_empty(&self) -> bool
{
1264 self.iter
.is_empty()
1268 #[stable(feature = "fused", since = "1.26.0")]
1269 impl<T
> FusedIterator
for Drain
<'_
, T
> {}
1271 /// A draining iterator over the elements of a `BinaryHeap`.
1273 /// This `struct` is created by [`BinaryHeap::drain_sorted()`]. See its
1274 /// documentation for more.
1276 /// [`drain_sorted`]: BinaryHeap::drain_sorted
1277 #[unstable(feature = "binary_heap_drain_sorted", issue = "59278")]
1279 pub struct DrainSorted
<'a
, T
: Ord
> {
1280 inner
: &'a
mut BinaryHeap
<T
>,
1283 #[unstable(feature = "binary_heap_drain_sorted", issue = "59278")]
1284 impl<'a
, T
: Ord
> Drop
for DrainSorted
<'a
, T
> {
1285 /// Removes heap elements in heap order.
1286 fn drop(&mut self) {
1287 struct DropGuard
<'r
, 'a
, T
: Ord
>(&'r
mut DrainSorted
<'a
, T
>);
1289 impl<'r
, 'a
, T
: Ord
> Drop
for DropGuard
<'r
, 'a
, T
> {
1290 fn drop(&mut self) {
1291 while self.0.inner
.pop().is_some() {}
1295 while let Some(item
) = self.inner
.pop() {
1296 let guard
= DropGuard(self);
1303 #[unstable(feature = "binary_heap_drain_sorted", issue = "59278")]
1304 impl<T
: Ord
> Iterator
for DrainSorted
<'_
, T
> {
1308 fn next(&mut self) -> Option
<T
> {
1313 fn size_hint(&self) -> (usize, Option
<usize>) {
1314 let exact
= self.inner
.len();
1315 (exact
, Some(exact
))
1319 #[unstable(feature = "binary_heap_drain_sorted", issue = "59278")]
1320 impl<T
: Ord
> ExactSizeIterator
for DrainSorted
<'_
, T
> {}
1322 #[unstable(feature = "binary_heap_drain_sorted", issue = "59278")]
1323 impl<T
: Ord
> FusedIterator
for DrainSorted
<'_
, T
> {}
1325 #[unstable(feature = "trusted_len", issue = "37572")]
1326 unsafe impl<T
: Ord
> TrustedLen
for DrainSorted
<'_
, T
> {}
1328 #[stable(feature = "binary_heap_extras_15", since = "1.5.0")]
1329 impl<T
: Ord
> From
<Vec
<T
>> for BinaryHeap
<T
> {
1330 /// Converts a `Vec<T>` into a `BinaryHeap<T>`.
1332 /// This conversion happens in-place, and has *O*(*n*) time complexity.
1333 fn from(vec
: Vec
<T
>) -> BinaryHeap
<T
> {
1334 let mut heap
= BinaryHeap { data: vec }
;
1340 #[stable(feature = "binary_heap_extras_15", since = "1.5.0")]
1341 impl<T
> From
<BinaryHeap
<T
>> for Vec
<T
> {
1342 /// Converts a `BinaryHeap<T>` into a `Vec<T>`.
1344 /// This conversion requires no data movement or allocation, and has
1345 /// constant time complexity.
1346 fn from(heap
: BinaryHeap
<T
>) -> Vec
<T
> {
1351 #[stable(feature = "rust1", since = "1.0.0")]
1352 impl<T
: Ord
> FromIterator
<T
> for BinaryHeap
<T
> {
1353 fn from_iter
<I
: IntoIterator
<Item
= T
>>(iter
: I
) -> BinaryHeap
<T
> {
1354 BinaryHeap
::from(iter
.into_iter().collect
::<Vec
<_
>>())
1358 #[stable(feature = "rust1", since = "1.0.0")]
1359 impl<T
> IntoIterator
for BinaryHeap
<T
> {
1361 type IntoIter
= IntoIter
<T
>;
1363 /// Creates a consuming iterator, that is, one that moves each value out of
1364 /// the binary heap in arbitrary order. The binary heap cannot be used
1365 /// after calling this.
1372 /// use std::collections::BinaryHeap;
1373 /// let heap = BinaryHeap::from(vec![1, 2, 3, 4]);
1375 /// // Print 1, 2, 3, 4 in arbitrary order
1376 /// for x in heap.into_iter() {
1377 /// // x has type i32, not &i32
1378 /// println!("{}", x);
1381 fn into_iter(self) -> IntoIter
<T
> {
1382 IntoIter { iter: self.data.into_iter() }
1386 #[stable(feature = "rust1", since = "1.0.0")]
1387 impl<'a
, T
> IntoIterator
for &'a BinaryHeap
<T
> {
1389 type IntoIter
= Iter
<'a
, T
>;
1391 fn into_iter(self) -> Iter
<'a
, T
> {
1396 #[stable(feature = "rust1", since = "1.0.0")]
1397 impl<T
: Ord
> Extend
<T
> for BinaryHeap
<T
> {
1399 fn extend
<I
: IntoIterator
<Item
= T
>>(&mut self, iter
: I
) {
1400 <Self as SpecExtend
<I
>>::spec_extend(self, iter
);
1404 fn extend_one(&mut self, item
: T
) {
1409 fn extend_reserve(&mut self, additional
: usize) {
1410 self.reserve(additional
);
1414 impl<T
: Ord
, I
: IntoIterator
<Item
= T
>> SpecExtend
<I
> for BinaryHeap
<T
> {
1415 default fn spec_extend(&mut self, iter
: I
) {
1416 self.extend_desugared(iter
.into_iter());
1420 impl<T
: Ord
> SpecExtend
<BinaryHeap
<T
>> for BinaryHeap
<T
> {
1421 fn spec_extend(&mut self, ref mut other
: BinaryHeap
<T
>) {
1426 impl<T
: Ord
> BinaryHeap
<T
> {
1427 fn extend_desugared
<I
: IntoIterator
<Item
= T
>>(&mut self, iter
: I
) {
1428 let iterator
= iter
.into_iter();
1429 let (lower
, _
) = iterator
.size_hint();
1431 self.reserve(lower
);
1433 iterator
.for_each(move |elem
| self.push(elem
));
1437 #[stable(feature = "extend_ref", since = "1.2.0")]
1438 impl<'a
, T
: 'a
+ Ord
+ Copy
> Extend
<&'a T
> for BinaryHeap
<T
> {
1439 fn extend
<I
: IntoIterator
<Item
= &'a T
>>(&mut self, iter
: I
) {
1440 self.extend(iter
.into_iter().cloned());
1444 fn extend_one(&mut self, &item
: &'a T
) {
1449 fn extend_reserve(&mut self, additional
: usize) {
1450 self.reserve(additional
);