1 // Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
11 //! A priority queue implemented with a binary heap.
13 //! Insertion and popping the largest element have `O(log n)` time complexity.
14 //! Checking the largest element is `O(1)`. Converting a vector to a binary heap
15 //! can be done in-place, and has `O(n)` complexity. A binary heap can also be
16 //! converted to a sorted vector in-place, allowing it to be used for an `O(n
17 //! log n)` in-place heapsort.
21 //! This is a larger example that implements [Dijkstra's algorithm][dijkstra]
22 //! to solve the [shortest path problem][sssp] on a [directed graph][dir_graph].
23 //! It shows how to use `BinaryHeap` with custom types.
25 //! [dijkstra]: http://en.wikipedia.org/wiki/Dijkstra%27s_algorithm
26 //! [sssp]: http://en.wikipedia.org/wiki/Shortest_path_problem
27 //! [dir_graph]: http://en.wikipedia.org/wiki/Directed_graph
30 //! use std::cmp::Ordering;
31 //! use std::collections::BinaryHeap;
34 //! #[derive(Copy, Clone, Eq, PartialEq)]
40 //! // The priority queue depends on `Ord`.
41 //! // Explicitly implement the trait so the queue becomes a min-heap
42 //! // instead of a max-heap.
43 //! impl Ord for State {
44 //! fn cmp(&self, other: &State) -> Ordering {
45 //! // Notice that the we flip the ordering here
46 //! other.cost.cmp(&self.cost)
50 //! // `PartialOrd` needs to be implemented as well.
51 //! impl PartialOrd for State {
52 //! fn partial_cmp(&self, other: &State) -> Option<Ordering> {
53 //! Some(self.cmp(other))
57 //! // Each node is represented as an `usize`, for a shorter implementation.
63 //! // Dijkstra's shortest path algorithm.
65 //! // Start at `start` and use `dist` to track the current shortest distance
66 //! // to each node. This implementation isn't memory-efficient as it may leave duplicate
67 //! // nodes in the queue. It also uses `usize::MAX` as a sentinel value,
68 //! // for a simpler implementation.
69 //! fn shortest_path(adj_list: &Vec<Vec<Edge>>, start: usize, goal: usize) -> Option<usize> {
70 //! // dist[node] = current shortest distance from `start` to `node`
71 //! let mut dist: Vec<_> = (0..adj_list.len()).map(|_| usize::MAX).collect();
73 //! let mut heap = BinaryHeap::new();
75 //! // We're at `start`, with a zero cost
77 //! heap.push(State { cost: 0, position: start });
79 //! // Examine the frontier with lower cost nodes first (min-heap)
80 //! while let Some(State { cost, position }) = heap.pop() {
81 //! // Alternatively we could have continued to find all shortest paths
82 //! if position == goal { return Some(cost); }
84 //! // Important as we may have already found a better way
85 //! if cost > dist[position] { continue; }
87 //! // For each node we can reach, see if we can find a way with
88 //! // a lower cost going through this node
89 //! for edge in &adj_list[position] {
90 //! let next = State { cost: cost + edge.cost, position: edge.node };
92 //! // If so, add it to the frontier and continue
93 //! if next.cost < dist[next.position] {
95 //! // Relaxation, we have now found a better way
96 //! dist[next.position] = next.cost;
101 //! // Goal not reachable
106 //! // This is the directed graph we're going to use.
107 //! // The node numbers correspond to the different states,
108 //! // and the edge weights symbolize the cost of moving
109 //! // from one node to another.
110 //! // Note that the edges are one-way.
113 //! // +-----------------+
116 //! // 0 -----> 1 -----> 3 ---> 4
120 //! // +------> 2 -------+ |
122 //! // +---------------+
124 //! // The graph is represented as an adjacency list where each index,
125 //! // corresponding to a node value, has a list of outgoing edges.
126 //! // Chosen for its efficiency.
127 //! let graph = vec![
129 //! vec![Edge { node: 2, cost: 10 },
130 //! Edge { node: 1, cost: 1 }],
132 //! vec![Edge { node: 3, cost: 2 }],
134 //! vec![Edge { node: 1, cost: 1 },
135 //! Edge { node: 3, cost: 3 },
136 //! Edge { node: 4, cost: 1 }],
138 //! vec![Edge { node: 0, cost: 7 },
139 //! Edge { node: 4, cost: 2 }],
143 //! assert_eq!(shortest_path(&graph, 0, 1), Some(1));
144 //! assert_eq!(shortest_path(&graph, 0, 3), Some(3));
145 //! assert_eq!(shortest_path(&graph, 3, 0), Some(7));
146 //! assert_eq!(shortest_path(&graph, 0, 4), Some(5));
147 //! assert_eq!(shortest_path(&graph, 4, 0), None);
151 #![allow(missing_docs)]
152 #![stable(feature = "rust1", since = "1.0.0")]
154 use core
::ops
::{Deref, DerefMut, Place, Placer, InPlace}
;
155 use core
::iter
::{FromIterator, FusedIterator}
;
156 use core
::mem
::{swap, size_of}
;
161 use vec
::{self, Vec}
;
163 use super::SpecExtend
;
165 /// A priority queue implemented with a binary heap.
167 /// This will be a max-heap.
169 /// It is a logic error for an item to be modified in such a way that the
170 /// item's ordering relative to any other item, as determined by the `Ord`
171 /// trait, changes while it is in the heap. This is normally only possible
172 /// through `Cell`, `RefCell`, global state, I/O, or unsafe code.
177 /// use std::collections::BinaryHeap;
179 /// // Type inference lets us omit an explicit type signature (which
180 /// // would be `BinaryHeap<i32>` in this example).
181 /// let mut heap = BinaryHeap::new();
183 /// // We can use peek to look at the next item in the heap. In this case,
184 /// // there's no items in there yet so we get None.
185 /// assert_eq!(heap.peek(), None);
187 /// // Let's add some scores...
192 /// // Now peek shows the most important item in the heap.
193 /// assert_eq!(heap.peek(), Some(&5));
195 /// // We can check the length of a heap.
196 /// assert_eq!(heap.len(), 3);
198 /// // We can iterate over the items in the heap, although they are returned in
199 /// // a random order.
201 /// println!("{}", x);
204 /// // If we instead pop these scores, they should come back in order.
205 /// assert_eq!(heap.pop(), Some(5));
206 /// assert_eq!(heap.pop(), Some(2));
207 /// assert_eq!(heap.pop(), Some(1));
208 /// assert_eq!(heap.pop(), None);
210 /// // We can clear the heap of any remaining items.
213 /// // The heap should now be empty.
214 /// assert!(heap.is_empty())
216 #[stable(feature = "rust1", since = "1.0.0")]
217 pub struct BinaryHeap
<T
> {
221 /// A container object that represents the result of the [`peek_mut()`] method
222 /// on `BinaryHeap`. See its documentation for details.
224 /// [`peek_mut()`]: struct.BinaryHeap.html#method.peek_mut
225 #[stable(feature = "binary_heap_peek_mut", since = "1.12.0")]
226 pub struct PeekMut
<'a
, T
: 'a
+ Ord
> {
227 heap
: &'a
mut BinaryHeap
<T
>,
231 #[stable(feature = "collection_debug", since = "1.17.0")]
232 impl<'a
, T
: Ord
+ fmt
::Debug
> fmt
::Debug
for PeekMut
<'a
, T
> {
233 fn fmt(&self, f
: &mut fmt
::Formatter
) -> fmt
::Result
{
234 f
.debug_tuple("PeekMut")
235 .field(&self.heap
.data
[0])
240 #[stable(feature = "binary_heap_peek_mut", since = "1.12.0")]
241 impl<'a
, T
: Ord
> Drop
for PeekMut
<'a
, T
> {
244 self.heap
.sift_down(0);
249 #[stable(feature = "binary_heap_peek_mut", since = "1.12.0")]
250 impl<'a
, T
: Ord
> Deref
for PeekMut
<'a
, T
> {
252 fn deref(&self) -> &T
{
257 #[stable(feature = "binary_heap_peek_mut", since = "1.12.0")]
258 impl<'a
, T
: Ord
> DerefMut
for PeekMut
<'a
, T
> {
259 fn deref_mut(&mut self) -> &mut T
{
260 &mut self.heap
.data
[0]
264 impl<'a
, T
: Ord
> PeekMut
<'a
, T
> {
265 /// Removes the peeked value from the heap and returns it.
266 #[unstable(feature = "binary_heap_peek_mut_pop", issue = "38863")]
267 pub fn pop(mut this
: PeekMut
<'a
, T
>) -> T
{
268 let value
= this
.heap
.pop().unwrap();
274 #[stable(feature = "rust1", since = "1.0.0")]
275 impl<T
: Clone
> Clone
for BinaryHeap
<T
> {
276 fn clone(&self) -> Self {
277 BinaryHeap { data: self.data.clone() }
280 fn clone_from(&mut self, source
: &Self) {
281 self.data
.clone_from(&source
.data
);
285 #[stable(feature = "rust1", since = "1.0.0")]
286 impl<T
: Ord
> Default
for BinaryHeap
<T
> {
287 /// Creates an empty `BinaryHeap<T>`.
289 fn default() -> BinaryHeap
<T
> {
294 #[stable(feature = "binaryheap_debug", since = "1.4.0")]
295 impl<T
: fmt
::Debug
+ Ord
> fmt
::Debug
for BinaryHeap
<T
> {
296 fn fmt(&self, f
: &mut fmt
::Formatter
) -> fmt
::Result
{
297 f
.debug_list().entries(self.iter()).finish()
301 impl<T
: Ord
> BinaryHeap
<T
> {
302 /// Creates an empty `BinaryHeap` as a max-heap.
309 /// use std::collections::BinaryHeap;
310 /// let mut heap = BinaryHeap::new();
313 #[stable(feature = "rust1", since = "1.0.0")]
314 pub fn new() -> BinaryHeap
<T
> {
315 BinaryHeap { data: vec![] }
318 /// Creates an empty `BinaryHeap` with a specific capacity.
319 /// This preallocates enough memory for `capacity` elements,
320 /// so that the `BinaryHeap` does not have to be reallocated
321 /// until it contains at least that many values.
328 /// use std::collections::BinaryHeap;
329 /// let mut heap = BinaryHeap::with_capacity(10);
332 #[stable(feature = "rust1", since = "1.0.0")]
333 pub fn with_capacity(capacity
: usize) -> BinaryHeap
<T
> {
334 BinaryHeap { data: Vec::with_capacity(capacity) }
337 /// Returns an iterator visiting all values in the underlying vector, in
345 /// use std::collections::BinaryHeap;
346 /// let heap = BinaryHeap::from(vec![1, 2, 3, 4]);
348 /// // Print 1, 2, 3, 4 in arbitrary order
349 /// for x in heap.iter() {
350 /// println!("{}", x);
353 #[stable(feature = "rust1", since = "1.0.0")]
354 pub fn iter(&self) -> Iter
<T
> {
355 Iter { iter: self.data.iter() }
358 /// Returns the greatest item in the binary heap, or `None` if it is empty.
365 /// use std::collections::BinaryHeap;
366 /// let mut heap = BinaryHeap::new();
367 /// assert_eq!(heap.peek(), None);
372 /// assert_eq!(heap.peek(), Some(&5));
375 #[stable(feature = "rust1", since = "1.0.0")]
376 pub fn peek(&self) -> Option
<&T
> {
380 /// Returns a mutable reference to the greatest item in the binary heap, or
381 /// `None` if it is empty.
383 /// Note: If the `PeekMut` value is leaked, the heap may be in an
384 /// inconsistent state.
391 /// use std::collections::BinaryHeap;
392 /// let mut heap = BinaryHeap::new();
393 /// assert!(heap.peek_mut().is_none());
399 /// let mut val = heap.peek_mut().unwrap();
402 /// assert_eq!(heap.peek(), Some(&2));
404 #[stable(feature = "binary_heap_peek_mut", since = "1.12.0")]
405 pub fn peek_mut(&mut self) -> Option
<PeekMut
<T
>> {
416 /// Returns the number of elements the binary heap can hold without reallocating.
423 /// use std::collections::BinaryHeap;
424 /// let mut heap = BinaryHeap::with_capacity(100);
425 /// assert!(heap.capacity() >= 100);
428 #[stable(feature = "rust1", since = "1.0.0")]
429 pub fn capacity(&self) -> usize {
433 /// Reserves the minimum capacity for exactly `additional` more elements to be inserted in the
434 /// given `BinaryHeap`. Does nothing if the capacity is already sufficient.
436 /// Note that the allocator may give the collection more space than it requests. Therefore
437 /// capacity can not be relied upon to be precisely minimal. Prefer `reserve` if future
438 /// insertions are expected.
442 /// Panics if the new capacity overflows `usize`.
449 /// use std::collections::BinaryHeap;
450 /// let mut heap = BinaryHeap::new();
451 /// heap.reserve_exact(100);
452 /// assert!(heap.capacity() >= 100);
455 #[stable(feature = "rust1", since = "1.0.0")]
456 pub fn reserve_exact(&mut self, additional
: usize) {
457 self.data
.reserve_exact(additional
);
460 /// Reserves capacity for at least `additional` more elements to be inserted in the
461 /// `BinaryHeap`. The collection may reserve more space to avoid frequent reallocations.
465 /// Panics if the new capacity overflows `usize`.
472 /// use std::collections::BinaryHeap;
473 /// let mut heap = BinaryHeap::new();
474 /// heap.reserve(100);
475 /// assert!(heap.capacity() >= 100);
478 #[stable(feature = "rust1", since = "1.0.0")]
479 pub fn reserve(&mut self, additional
: usize) {
480 self.data
.reserve(additional
);
483 /// Discards as much additional capacity as possible.
490 /// use std::collections::BinaryHeap;
491 /// let mut heap: BinaryHeap<i32> = BinaryHeap::with_capacity(100);
493 /// assert!(heap.capacity() >= 100);
494 /// heap.shrink_to_fit();
495 /// assert!(heap.capacity() == 0);
497 #[stable(feature = "rust1", since = "1.0.0")]
498 pub fn shrink_to_fit(&mut self) {
499 self.data
.shrink_to_fit();
502 /// Removes the greatest item from the binary heap and returns it, or `None` if it
510 /// use std::collections::BinaryHeap;
511 /// let mut heap = BinaryHeap::from(vec![1, 3]);
513 /// assert_eq!(heap.pop(), Some(3));
514 /// assert_eq!(heap.pop(), Some(1));
515 /// assert_eq!(heap.pop(), None);
517 #[stable(feature = "rust1", since = "1.0.0")]
518 pub fn pop(&mut self) -> Option
<T
> {
519 self.data
.pop().map(|mut item
| {
520 if !self.is_empty() {
521 swap(&mut item
, &mut self.data
[0]);
522 self.sift_down_to_bottom(0);
528 /// Pushes an item onto the binary heap.
535 /// use std::collections::BinaryHeap;
536 /// let mut heap = BinaryHeap::new();
541 /// assert_eq!(heap.len(), 3);
542 /// assert_eq!(heap.peek(), Some(&5));
544 #[stable(feature = "rust1", since = "1.0.0")]
545 pub fn push(&mut self, item
: T
) {
546 let old_len
= self.len();
547 self.data
.push(item
);
548 self.sift_up(0, old_len
);
551 /// Pushes an item onto the binary heap, then pops the greatest item off the queue in
552 /// an optimized fashion.
559 /// #![feature(binary_heap_extras)]
560 /// #![allow(deprecated)]
562 /// use std::collections::BinaryHeap;
563 /// let mut heap = BinaryHeap::new();
567 /// assert_eq!(heap.push_pop(3), 5);
568 /// assert_eq!(heap.push_pop(9), 9);
569 /// assert_eq!(heap.len(), 2);
570 /// assert_eq!(heap.peek(), Some(&3));
572 #[unstable(feature = "binary_heap_extras",
573 reason
= "needs to be audited",
575 #[rustc_deprecated(since = "1.13.0", reason = "use `peek_mut` instead")]
576 pub fn push_pop(&mut self, mut item
: T
) -> T
{
577 match self.data
.get_mut(0) {
581 swap(&mut item
, top
);
592 /// Pops the greatest item off the binary heap, then pushes an item onto the queue in
593 /// an optimized fashion. The push is done regardless of whether the binary heap
601 /// #![feature(binary_heap_extras)]
602 /// #![allow(deprecated)]
604 /// use std::collections::BinaryHeap;
605 /// let mut heap = BinaryHeap::new();
607 /// assert_eq!(heap.replace(1), None);
608 /// assert_eq!(heap.replace(3), Some(1));
609 /// assert_eq!(heap.len(), 1);
610 /// assert_eq!(heap.peek(), Some(&3));
612 #[unstable(feature = "binary_heap_extras",
613 reason
= "needs to be audited",
615 #[rustc_deprecated(since = "1.13.0", reason = "use `peek_mut` instead")]
616 pub fn replace(&mut self, mut item
: T
) -> Option
<T
> {
617 if !self.is_empty() {
618 swap(&mut item
, &mut self.data
[0]);
627 /// Consumes the `BinaryHeap` and returns the underlying vector
628 /// in arbitrary order.
635 /// use std::collections::BinaryHeap;
636 /// let heap = BinaryHeap::from(vec![1, 2, 3, 4, 5, 6, 7]);
637 /// let vec = heap.into_vec();
639 /// // Will print in some order
641 /// println!("{}", x);
644 #[stable(feature = "binary_heap_extras_15", since = "1.5.0")]
645 pub fn into_vec(self) -> Vec
<T
> {
649 /// Consumes the `BinaryHeap` and returns a vector in sorted
650 /// (ascending) order.
657 /// use std::collections::BinaryHeap;
659 /// let mut heap = BinaryHeap::from(vec![1, 2, 4, 5, 7]);
663 /// let vec = heap.into_sorted_vec();
664 /// assert_eq!(vec, [1, 2, 3, 4, 5, 6, 7]);
666 #[stable(feature = "binary_heap_extras_15", since = "1.5.0")]
667 pub fn into_sorted_vec(mut self) -> Vec
<T
> {
668 let mut end
= self.len();
671 self.data
.swap(0, end
);
672 self.sift_down_range(0, end
);
677 // The implementations of sift_up and sift_down use unsafe blocks in
678 // order to move an element out of the vector (leaving behind a
679 // hole), shift along the others and move the removed element back into the
680 // vector at the final location of the hole.
681 // The `Hole` type is used to represent this, and make sure
682 // the hole is filled back at the end of its scope, even on panic.
683 // Using a hole reduces the constant factor compared to using swaps,
684 // which involves twice as many moves.
685 fn sift_up(&mut self, start
: usize, pos
: usize) -> usize {
687 // Take out the value at `pos` and create a hole.
688 let mut hole
= Hole
::new(&mut self.data
, pos
);
690 while hole
.pos() > start
{
691 let parent
= (hole
.pos() - 1) / 2;
692 if hole
.element() <= hole
.get(parent
) {
695 hole
.move_to(parent
);
701 /// Take an element at `pos` and move it down the heap,
702 /// while its children are larger.
703 fn sift_down_range(&mut self, pos
: usize, end
: usize) {
705 let mut hole
= Hole
::new(&mut self.data
, pos
);
706 let mut child
= 2 * pos
+ 1;
708 let right
= child
+ 1;
709 // compare with the greater of the two children
710 if right
< end
&& !(hole
.get(child
) > hole
.get(right
)) {
713 // if we are already in order, stop.
714 if hole
.element() >= hole
.get(child
) {
718 child
= 2 * hole
.pos() + 1;
723 fn sift_down(&mut self, pos
: usize) {
724 let len
= self.len();
725 self.sift_down_range(pos
, len
);
728 /// Take an element at `pos` and move it all the way down the heap,
729 /// then sift it up to its position.
731 /// Note: This is faster when the element is known to be large / should
732 /// be closer to the bottom.
733 fn sift_down_to_bottom(&mut self, mut pos
: usize) {
734 let end
= self.len();
737 let mut hole
= Hole
::new(&mut self.data
, pos
);
738 let mut child
= 2 * pos
+ 1;
740 let right
= child
+ 1;
741 // compare with the greater of the two children
742 if right
< end
&& !(hole
.get(child
) > hole
.get(right
)) {
746 child
= 2 * hole
.pos() + 1;
750 self.sift_up(start
, pos
);
753 /// Returns the length of the binary heap.
760 /// use std::collections::BinaryHeap;
761 /// let heap = BinaryHeap::from(vec![1, 3]);
763 /// assert_eq!(heap.len(), 2);
765 #[stable(feature = "rust1", since = "1.0.0")]
766 pub fn len(&self) -> usize {
770 /// Checks if the binary heap is empty.
777 /// use std::collections::BinaryHeap;
778 /// let mut heap = BinaryHeap::new();
780 /// assert!(heap.is_empty());
786 /// assert!(!heap.is_empty());
788 #[stable(feature = "rust1", since = "1.0.0")]
789 pub fn is_empty(&self) -> bool
{
793 /// Clears the binary heap, returning an iterator over the removed elements.
795 /// The elements are removed in arbitrary order.
802 /// use std::collections::BinaryHeap;
803 /// let mut heap = BinaryHeap::from(vec![1, 3]);
805 /// assert!(!heap.is_empty());
807 /// for x in heap.drain() {
808 /// println!("{}", x);
811 /// assert!(heap.is_empty());
814 #[stable(feature = "drain", since = "1.6.0")]
815 pub fn drain(&mut self) -> Drain
<T
> {
816 Drain { iter: self.data.drain(..) }
819 /// Drops all items from the binary heap.
826 /// use std::collections::BinaryHeap;
827 /// let mut heap = BinaryHeap::from(vec![1, 3]);
829 /// assert!(!heap.is_empty());
833 /// assert!(heap.is_empty());
835 #[stable(feature = "rust1", since = "1.0.0")]
836 pub fn clear(&mut self) {
840 fn rebuild(&mut self) {
841 let mut n
= self.len() / 2;
848 /// Moves all the elements of `other` into `self`, leaving `other` empty.
855 /// use std::collections::BinaryHeap;
857 /// let v = vec![-10, 1, 2, 3, 3];
858 /// let mut a = BinaryHeap::from(v);
860 /// let v = vec![-20, 5, 43];
861 /// let mut b = BinaryHeap::from(v);
863 /// a.append(&mut b);
865 /// assert_eq!(a.into_sorted_vec(), [-20, -10, 1, 2, 3, 3, 5, 43]);
866 /// assert!(b.is_empty());
868 #[stable(feature = "binary_heap_append", since = "1.11.0")]
869 pub fn append(&mut self, other
: &mut Self) {
870 if self.len() < other
.len() {
874 if other
.is_empty() {
879 fn log2_fast(x
: usize) -> usize {
880 8 * size_of
::<usize>() - (x
.leading_zeros() as usize) - 1
883 // `rebuild` takes O(len1 + len2) operations
884 // and about 2 * (len1 + len2) comparisons in the worst case
885 // while `extend` takes O(len2 * log_2(len1)) operations
886 // and about 1 * len2 * log_2(len1) comparisons in the worst case,
887 // assuming len1 >= len2.
889 fn better_to_rebuild(len1
: usize, len2
: usize) -> bool
{
890 2 * (len1
+ len2
) < len2
* log2_fast(len1
)
893 if better_to_rebuild(self.len(), other
.len()) {
894 self.data
.append(&mut other
.data
);
897 self.extend(other
.drain());
902 /// Hole represents a hole in a slice i.e. an index without valid value
903 /// (because it was moved from or duplicated).
904 /// In drop, `Hole` will restore the slice by filling the hole
905 /// position with the value that was originally removed.
906 struct Hole
<'a
, T
: 'a
> {
908 /// `elt` is always `Some` from new until drop.
913 impl<'a
, T
> Hole
<'a
, T
> {
914 /// Create a new Hole at index `pos`.
916 /// Unsafe because pos must be within the data slice.
918 unsafe fn new(data
: &'a
mut [T
], pos
: usize) -> Self {
919 debug_assert
!(pos
< data
.len());
920 let elt
= ptr
::read(&data
[pos
]);
929 fn pos(&self) -> usize {
933 /// Return a reference to the element removed
935 fn element(&self) -> &T
{
936 self.elt
.as_ref().unwrap()
939 /// Return a reference to the element at `index`.
941 /// Unsafe because index must be within the data slice and not equal to pos.
943 unsafe fn get(&self, index
: usize) -> &T
{
944 debug_assert
!(index
!= self.pos
);
945 debug_assert
!(index
< self.data
.len());
946 self.data
.get_unchecked(index
)
949 /// Move hole to new location
951 /// Unsafe because index must be within the data slice and not equal to pos.
953 unsafe fn move_to(&mut self, index
: usize) {
954 debug_assert
!(index
!= self.pos
);
955 debug_assert
!(index
< self.data
.len());
956 let index_ptr
: *const _
= self.data
.get_unchecked(index
);
957 let hole_ptr
= self.data
.get_unchecked_mut(self.pos
);
958 ptr
::copy_nonoverlapping(index_ptr
, hole_ptr
, 1);
963 impl<'a
, T
> Drop
for Hole
<'a
, T
> {
966 // fill the hole again
969 ptr
::write(self.data
.get_unchecked_mut(pos
), self.elt
.take().unwrap());
974 /// `BinaryHeap` iterator.
975 #[stable(feature = "rust1", since = "1.0.0")]
976 pub struct Iter
<'a
, T
: 'a
> {
977 iter
: slice
::Iter
<'a
, T
>,
980 #[stable(feature = "collection_debug", since = "1.17.0")]
981 impl<'a
, T
: 'a
+ fmt
::Debug
> fmt
::Debug
for Iter
<'a
, T
> {
982 fn fmt(&self, f
: &mut fmt
::Formatter
) -> fmt
::Result
{
983 f
.debug_tuple("Iter")
984 .field(&self.iter
.as_slice())
989 // FIXME(#19839) Remove in favor of `#[derive(Clone)]`
990 #[stable(feature = "rust1", since = "1.0.0")]
991 impl<'a
, T
> Clone
for Iter
<'a
, T
> {
992 fn clone(&self) -> Iter
<'a
, T
> {
993 Iter { iter: self.iter.clone() }
997 #[stable(feature = "rust1", since = "1.0.0")]
998 impl<'a
, T
> Iterator
for Iter
<'a
, T
> {
1002 fn next(&mut self) -> Option
<&'a T
> {
1007 fn size_hint(&self) -> (usize, Option
<usize>) {
1008 self.iter
.size_hint()
1012 #[stable(feature = "rust1", since = "1.0.0")]
1013 impl<'a
, T
> DoubleEndedIterator
for Iter
<'a
, T
> {
1015 fn next_back(&mut self) -> Option
<&'a T
> {
1016 self.iter
.next_back()
1020 #[stable(feature = "rust1", since = "1.0.0")]
1021 impl<'a
, T
> ExactSizeIterator
for Iter
<'a
, T
> {
1022 fn is_empty(&self) -> bool
{
1023 self.iter
.is_empty()
1027 #[unstable(feature = "fused", issue = "35602")]
1028 impl<'a
, T
> FusedIterator
for Iter
<'a
, T
> {}
1030 /// An iterator that moves out of a `BinaryHeap`.
1031 #[stable(feature = "rust1", since = "1.0.0")]
1033 pub struct IntoIter
<T
> {
1034 iter
: vec
::IntoIter
<T
>,
1037 #[stable(feature = "collection_debug", since = "1.17.0")]
1038 impl<T
: fmt
::Debug
> fmt
::Debug
for IntoIter
<T
> {
1039 fn fmt(&self, f
: &mut fmt
::Formatter
) -> fmt
::Result
{
1040 f
.debug_tuple("IntoIter")
1041 .field(&self.iter
.as_slice())
1046 #[stable(feature = "rust1", since = "1.0.0")]
1047 impl<T
> Iterator
for IntoIter
<T
> {
1051 fn next(&mut self) -> Option
<T
> {
1056 fn size_hint(&self) -> (usize, Option
<usize>) {
1057 self.iter
.size_hint()
1061 #[stable(feature = "rust1", since = "1.0.0")]
1062 impl<T
> DoubleEndedIterator
for IntoIter
<T
> {
1064 fn next_back(&mut self) -> Option
<T
> {
1065 self.iter
.next_back()
1069 #[stable(feature = "rust1", since = "1.0.0")]
1070 impl<T
> ExactSizeIterator
for IntoIter
<T
> {
1071 fn is_empty(&self) -> bool
{
1072 self.iter
.is_empty()
1076 #[unstable(feature = "fused", issue = "35602")]
1077 impl<T
> FusedIterator
for IntoIter
<T
> {}
1079 /// An iterator that drains a `BinaryHeap`.
1080 #[stable(feature = "drain", since = "1.6.0")]
1082 pub struct Drain
<'a
, T
: 'a
> {
1083 iter
: vec
::Drain
<'a
, T
>,
1086 #[stable(feature = "drain", since = "1.6.0")]
1087 impl<'a
, T
: 'a
> Iterator
for Drain
<'a
, T
> {
1091 fn next(&mut self) -> Option
<T
> {
1096 fn size_hint(&self) -> (usize, Option
<usize>) {
1097 self.iter
.size_hint()
1101 #[stable(feature = "drain", since = "1.6.0")]
1102 impl<'a
, T
: 'a
> DoubleEndedIterator
for Drain
<'a
, T
> {
1104 fn next_back(&mut self) -> Option
<T
> {
1105 self.iter
.next_back()
1109 #[stable(feature = "drain", since = "1.6.0")]
1110 impl<'a
, T
: 'a
> ExactSizeIterator
for Drain
<'a
, T
> {
1111 fn is_empty(&self) -> bool
{
1112 self.iter
.is_empty()
1116 #[unstable(feature = "fused", issue = "35602")]
1117 impl<'a
, T
: 'a
> FusedIterator
for Drain
<'a
, T
> {}
1119 #[stable(feature = "binary_heap_extras_15", since = "1.5.0")]
1120 impl<T
: Ord
> From
<Vec
<T
>> for BinaryHeap
<T
> {
1121 fn from(vec
: Vec
<T
>) -> BinaryHeap
<T
> {
1122 let mut heap
= BinaryHeap { data: vec }
;
1128 #[stable(feature = "binary_heap_extras_15", since = "1.5.0")]
1129 impl<T
> From
<BinaryHeap
<T
>> for Vec
<T
> {
1130 fn from(heap
: BinaryHeap
<T
>) -> Vec
<T
> {
1135 #[stable(feature = "rust1", since = "1.0.0")]
1136 impl<T
: Ord
> FromIterator
<T
> for BinaryHeap
<T
> {
1137 fn from_iter
<I
: IntoIterator
<Item
= T
>>(iter
: I
) -> BinaryHeap
<T
> {
1138 BinaryHeap
::from(iter
.into_iter().collect
::<Vec
<_
>>())
1142 #[stable(feature = "rust1", since = "1.0.0")]
1143 impl<T
: Ord
> IntoIterator
for BinaryHeap
<T
> {
1145 type IntoIter
= IntoIter
<T
>;
1147 /// Creates a consuming iterator, that is, one that moves each value out of
1148 /// the binary heap in arbitrary order. The binary heap cannot be used
1149 /// after calling this.
1156 /// use std::collections::BinaryHeap;
1157 /// let heap = BinaryHeap::from(vec![1, 2, 3, 4]);
1159 /// // Print 1, 2, 3, 4 in arbitrary order
1160 /// for x in heap.into_iter() {
1161 /// // x has type i32, not &i32
1162 /// println!("{}", x);
1165 fn into_iter(self) -> IntoIter
<T
> {
1166 IntoIter { iter: self.data.into_iter() }
1170 #[stable(feature = "rust1", since = "1.0.0")]
1171 impl<'a
, T
> IntoIterator
for &'a BinaryHeap
<T
>
1175 type IntoIter
= Iter
<'a
, T
>;
1177 fn into_iter(self) -> Iter
<'a
, T
> {
1182 #[stable(feature = "rust1", since = "1.0.0")]
1183 impl<T
: Ord
> Extend
<T
> for BinaryHeap
<T
> {
1185 fn extend
<I
: IntoIterator
<Item
= T
>>(&mut self, iter
: I
) {
1186 <Self as SpecExtend
<I
>>::spec_extend(self, iter
);
1190 impl<T
: Ord
, I
: IntoIterator
<Item
= T
>> SpecExtend
<I
> for BinaryHeap
<T
> {
1191 default fn spec_extend(&mut self, iter
: I
) {
1192 self.extend_desugared(iter
.into_iter());
1196 impl<T
: Ord
> SpecExtend
<BinaryHeap
<T
>> for BinaryHeap
<T
> {
1197 fn spec_extend(&mut self, ref mut other
: BinaryHeap
<T
>) {
1202 impl<T
: Ord
> BinaryHeap
<T
> {
1203 fn extend_desugared
<I
: IntoIterator
<Item
= T
>>(&mut self, iter
: I
) {
1204 let iterator
= iter
.into_iter();
1205 let (lower
, _
) = iterator
.size_hint();
1207 self.reserve(lower
);
1209 for elem
in iterator
{
1215 #[stable(feature = "extend_ref", since = "1.2.0")]
1216 impl<'a
, T
: 'a
+ Ord
+ Copy
> Extend
<&'a T
> for BinaryHeap
<T
> {
1217 fn extend
<I
: IntoIterator
<Item
= &'a T
>>(&mut self, iter
: I
) {
1218 self.extend(iter
.into_iter().cloned());
1222 #[unstable(feature = "collection_placement",
1223 reason
= "placement protocol is subject to change",
1225 pub struct BinaryHeapPlace
<'a
, T
: 'a
>
1226 where T
: Clone
+ Ord
{
1227 heap
: *mut BinaryHeap
<T
>,
1228 place
: vec
::PlaceBack
<'a
, T
>,
1231 #[unstable(feature = "collection_placement",
1232 reason
= "placement protocol is subject to change",
1234 impl<'a
, T
: Clone
+ Ord
+ fmt
::Debug
> fmt
::Debug
for BinaryHeapPlace
<'a
, T
> {
1235 fn fmt(&self, f
: &mut fmt
::Formatter
) -> fmt
::Result
{
1236 f
.debug_tuple("BinaryHeapPlace")
1242 #[unstable(feature = "collection_placement",
1243 reason
= "placement protocol is subject to change",
1245 impl<'a
, T
: 'a
> Placer
<T
> for &'a
mut BinaryHeap
<T
>
1246 where T
: Clone
+ Ord
{
1247 type Place
= BinaryHeapPlace
<'a
, T
>;
1249 fn make_place(self) -> Self::Place
{
1250 let ptr
= self as *mut BinaryHeap
<T
>;
1251 let place
= Placer
::make_place(self.data
.place_back());
1259 #[unstable(feature = "collection_placement",
1260 reason
= "placement protocol is subject to change",
1262 impl<'a
, T
> Place
<T
> for BinaryHeapPlace
<'a
, T
>
1263 where T
: Clone
+ Ord
{
1264 fn pointer(&mut self) -> *mut T
{
1265 self.place
.pointer()
1269 #[unstable(feature = "collection_placement",
1270 reason
= "placement protocol is subject to change",
1272 impl<'a
, T
> InPlace
<T
> for BinaryHeapPlace
<'a
, T
>
1273 where T
: Clone
+ Ord
{
1276 unsafe fn finalize(self) -> &'a T
{
1277 self.place
.finalize();
1279 let heap
: &mut BinaryHeap
<T
> = &mut *self.heap
;
1280 let len
= heap
.len();
1281 let i
= heap
.sift_up(0, len
- 1);
1282 heap
.data
.get_unchecked(i
)