binary_heap_plus/binary_heap.rs
1//! A priority queue implemented with a binary heap.
2//!
3//! Note: This version is folked from Rust standartd library, which only supports
4//! max heap.
5//!
6//! Insertion and popping the largest element have *O*(log(*n*)) time complexity.
7//! Checking the largest element is *O*(1). Converting a vector to a binary heap
8//! can be done in-place, and has *O*(*n*) complexity. A binary heap can also be
9//! converted to a sorted vector in-place, allowing it to be used for an *O*(*n* * log(*n*))
10//! in-place heapsort.
11//!
12//! # Examples
13//!
14//! This is a larger example that implements [Dijkstra's algorithm][dijkstra]
15//! to solve the [shortest path problem][sssp] on a [directed graph][dir_graph].
16//! It shows how to use [`BinaryHeap`] with custom types.
17//!
18//! [dijkstra]: https://en.wikipedia.org/wiki/Dijkstra%27s_algorithm
19//! [sssp]: https://en.wikipedia.org/wiki/Shortest_path_problem
20//! [dir_graph]: https://en.wikipedia.org/wiki/Directed_graph
21//!
22//! ```
23//! use std::cmp::Ordering;
24//! use binary_heap_plus::BinaryHeap;
25//!
26//! #[derive(Copy, Clone, Eq, PartialEq)]
27//! struct State {
28//! cost: usize,
29//! position: usize,
30//! }
31//!
32//! // The priority queue depends on `Ord`.
33//! // Explicitly implement the trait so the queue becomes a min-heap
34//! // instead of a max-heap.
35//! impl Ord for State {
36//! fn cmp(&self, other: &Self) -> Ordering {
37//! // Notice that the we flip the ordering on costs.
38//! // In case of a tie we compare positions - this step is necessary
39//! // to make implementations of `PartialEq` and `Ord` consistent.
40//! other.cost.cmp(&self.cost)
41//! .then_with(|| self.position.cmp(&other.position))
42//! }
43//! }
44//!
45//! // `PartialOrd` needs to be implemented as well.
46//! impl PartialOrd for State {
47//! fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
48//! Some(self.cmp(other))
49//! }
50//! }
51//!
52//! // Each node is represented as a `usize`, for a shorter implementation.
53//! struct Edge {
54//! node: usize,
55//! cost: usize,
56//! }
57//!
58//! // Dijkstra's shortest path algorithm.
59//!
60//! // Start at `start` and use `dist` to track the current shortest distance
61//! // to each node. This implementation isn't memory-efficient as it may leave duplicate
62//! // nodes in the queue. It also uses `usize::MAX` as a sentinel value,
63//! // for a simpler implementation.
64//! fn shortest_path(adj_list: &Vec<Vec<Edge>>, start: usize, goal: usize) -> Option<usize> {
65//! // dist[node] = current shortest distance from `start` to `node`
66//! let mut dist: Vec<_> = (0..adj_list.len()).map(|_| usize::MAX).collect();
67//!
68//! let mut heap = BinaryHeap::new();
69//!
70//! // We're at `start`, with a zero cost
71//! dist[start] = 0;
72//! heap.push(State { cost: 0, position: start });
73//!
74//! // Examine the frontier with lower cost nodes first (min-heap)
75//! while let Some(State { cost, position }) = heap.pop() {
76//! // Alternatively we could have continued to find all shortest paths
77//! if position == goal { return Some(cost); }
78//!
79//! // Important as we may have already found a better way
80//! if cost > dist[position] { continue; }
81//!
82//! // For each node we can reach, see if we can find a way with
83//! // a lower cost going through this node
84//! for edge in &adj_list[position] {
85//! let next = State { cost: cost + edge.cost, position: edge.node };
86//!
87//! // If so, add it to the frontier and continue
88//! if next.cost < dist[next.position] {
89//! heap.push(next);
90//! // Relaxation, we have now found a better way
91//! dist[next.position] = next.cost;
92//! }
93//! }
94//! }
95//!
96//! // Goal not reachable
97//! None
98//! }
99//!
100//! fn main() {
101//! // This is the directed graph we're going to use.
102//! // The node numbers correspond to the different states,
103//! // and the edge weights symbolize the cost of moving
104//! // from one node to another.
105//! // Note that the edges are one-way.
106//! //
107//! // 7
108//! // +-----------------+
109//! // | |
110//! // v 1 2 | 2
111//! // 0 -----> 1 -----> 3 ---> 4
112//! // | ^ ^ ^
113//! // | | 1 | |
114//! // | | | 3 | 1
115//! // +------> 2 -------+ |
116//! // 10 | |
117//! // +---------------+
118//! //
119//! // The graph is represented as an adjacency list where each index,
120//! // corresponding to a node value, has a list of outgoing edges.
121//! // Chosen for its efficiency.
122//! let graph = vec![
123//! // Node 0
124//! vec![Edge { node: 2, cost: 10 },
125//! Edge { node: 1, cost: 1 }],
126//! // Node 1
127//! vec![Edge { node: 3, cost: 2 }],
128//! // Node 2
129//! vec![Edge { node: 1, cost: 1 },
130//! Edge { node: 3, cost: 3 },
131//! Edge { node: 4, cost: 1 }],
132//! // Node 3
133//! vec![Edge { node: 0, cost: 7 },
134//! Edge { node: 4, cost: 2 }],
135//! // Node 4
136//! vec![]];
137//!
138//! assert_eq!(shortest_path(&graph, 0, 1), Some(1));
139//! assert_eq!(shortest_path(&graph, 0, 3), Some(3));
140//! assert_eq!(shortest_path(&graph, 3, 0), Some(7));
141//! assert_eq!(shortest_path(&graph, 0, 4), Some(5));
142//! assert_eq!(shortest_path(&graph, 4, 0), None);
143//! }
144//! ```
145
146#![deny(unsafe_op_in_unsafe_fn)]
147#![allow(clippy::needless_doctest_main)]
148#![allow(missing_docs)]
149// #![stable(feature = "rust1", since = "1.0.0")]
150
151// use core::ops::{Deref, DerefMut, Place, Placer, InPlace};
152// use core::iter::{FromIterator, FusedIterator};
153use std::cmp::Ordering;
154use std::iter::FromIterator;
155use std::slice;
156// use std::iter::FusedIterator;
157// use std::vec::Drain;
158use compare::Compare;
159use core::fmt;
160use core::mem::{swap, ManuallyDrop};
161use core::ptr;
162#[cfg(feature = "serde")]
163use serde::{Deserialize, Serialize};
164use std::ops::Deref;
165use std::ops::DerefMut;
166use std::vec;
167
168// use slice;
169// use vec::{self, Vec};
170
171// use super::SpecExtend;
172
173/// A priority queue implemented with a binary heap.
174///
175/// This will be a max-heap.
176///
177/// It is a logic error for an item to be modified in such a way that the
178/// item's ordering relative to any other item, as determined by the [`Ord`]
179/// trait, changes while it is in the heap. This is normally only possible
180/// through [`Cell`], [`RefCell`], global state, I/O, or unsafe code. The
181/// behavior resulting from such a logic error is not specified (it
182/// could include panics, incorrect results, aborts, memory leaks, or
183/// non-termination) but will not be undefined behavior.
184///
185/// # Examples
186///
187/// ```
188/// use binary_heap_plus::BinaryHeap;
189///
190/// // Type inference lets us omit an explicit type signature (which
191/// // would be `BinaryHeap<i32, MaxComparator>` in this example).
192/// let mut heap = BinaryHeap::new();
193///
194/// // We can use peek to look at the next item in the heap. In this case,
195/// // there's no items in there yet so we get None.
196/// assert_eq!(heap.peek(), None);
197///
198/// // Let's add some scores...
199/// heap.push(1);
200/// heap.push(5);
201/// heap.push(2);
202///
203/// // Now peek shows the most important item in the heap.
204/// assert_eq!(heap.peek(), Some(&5));
205///
206/// // We can check the length of a heap.
207/// assert_eq!(heap.len(), 3);
208///
209/// // We can iterate over the items in the heap, although they are returned in
210/// // a random order.
211/// for x in &heap {
212/// println!("{}", x);
213/// }
214///
215/// // If we instead pop these scores, they should come back in order.
216/// assert_eq!(heap.pop(), Some(5));
217/// assert_eq!(heap.pop(), Some(2));
218/// assert_eq!(heap.pop(), Some(1));
219/// assert_eq!(heap.pop(), None);
220///
221/// // We can clear the heap of any remaining items.
222/// heap.clear();
223///
224/// // The heap should now be empty.
225/// assert!(heap.is_empty())
226/// ```
227///
228/// A `BinaryHeap` with a known list of items can be initialized from an array:
229///
230/// ```
231/// use binary_heap_plus::BinaryHeap;
232///
233/// // This will create a max-heap.
234/// let heap = BinaryHeap::from([1, 5, 2]);
235/// ```
236///
237/// ## Min-heap
238///
239/// `BinaryHeap` can also act as a min-heap without requiring [`Reverse`] or a custom [`Ord`]
240/// implementation.
241///
242/// ```
243/// use binary_heap_plus::BinaryHeap;
244///
245/// let mut heap = BinaryHeap::new_min();
246///
247/// // There is no need to wrap values in `Reverse`
248/// heap.push(1);
249/// heap.push(5);
250/// heap.push(2);
251///
252/// // If we pop these scores now, they should come back in the reverse order.
253/// assert_eq!(heap.pop(), Some(1));
254/// assert_eq!(heap.pop(), Some(2));
255/// assert_eq!(heap.pop(), Some(5));
256/// assert_eq!(heap.pop(), None);
257/// ```
258///
259/// # Time complexity
260///
261/// | [push] | [pop] | [peek]/[peek\_mut] |
262/// |---------|---------------|--------------------|
263/// | *O*(1)~ | *O*(log(*n*)) | *O*(1) |
264///
265/// The value for `push` is an expected cost; the method documentation gives a
266/// more detailed analysis.
267///
268/// [`Reverse`]: https://doc.rust-lang.org/stable/core/cmp/struct.Reverse.html
269/// [`Ord`]: https://doc.rust-lang.org/stable/core/cmp/trait.Ord.html
270/// [`Cell`]: https://doc.rust-lang.org/stable/core/cell/struct.Cell.html
271/// [`RefCell`]: https://doc.rust-lang.org/stable/core/cell/struct.RefCell.html
272/// [push]: BinaryHeap::push
273/// [pop]: BinaryHeap::pop
274/// [peek]: BinaryHeap::peek
275/// [peek\_mut]: BinaryHeap::peek_mut
276// #[stable(feature = "rust1", since = "1.0.0")]
277#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
278pub struct BinaryHeap<T, C = MaxComparator> {
279 data: Vec<T>,
280 cmp: C,
281}
282
283/// For `T` that implements `Ord`, you can use this struct to quickly
284/// set up a max heap.
285#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
286#[derive(Clone, Copy, Default, PartialEq, Eq, Debug)]
287pub struct MaxComparator;
288
289impl<T: Ord> Compare<T> for MaxComparator {
290 fn compare(&self, a: &T, b: &T) -> Ordering {
291 a.cmp(b)
292 }
293}
294
295/// For `T` that implements `Ord`, you can use this struct to quickly
296/// set up a min heap.
297#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
298#[derive(Clone, Copy, Default, PartialEq, Eq, Debug)]
299pub struct MinComparator;
300
301impl<T: Ord> Compare<T> for MinComparator {
302 fn compare(&self, a: &T, b: &T) -> Ordering {
303 b.cmp(a)
304 }
305}
306
307/// The comparator defined by closure
308#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
309#[derive(Clone, Copy, Default, PartialEq, Eq, Debug)]
310pub struct FnComparator<F>(pub F);
311
312impl<T, F> Compare<T> for FnComparator<F>
313where
314 F: Fn(&T, &T) -> Ordering,
315{
316 fn compare(&self, a: &T, b: &T) -> Ordering {
317 self.0(a, b)
318 }
319}
320
321/// The comparator ordered by key
322#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
323#[derive(Clone, Copy, Default, PartialEq, Eq, Debug)]
324pub struct KeyComparator<F>(pub F);
325
326impl<K: Ord, T, F> Compare<T> for KeyComparator<F>
327where
328 F: Fn(&T) -> K,
329{
330 fn compare(&self, a: &T, b: &T) -> Ordering {
331 self.0(a).cmp(&self.0(b))
332 }
333}
334
335/// Structure wrapping a mutable reference to the greatest item on a
336/// `BinaryHeap`.
337///
338/// This `struct` is created by the [`peek_mut`] method on [`BinaryHeap`]. See
339/// its documentation for more.
340///
341/// [`peek_mut`]: BinaryHeap::peek_mut
342// #[stable(feature = "binary_heap_peek_mut", since = "1.12.0")]
343pub struct PeekMut<'a, T: 'a, C: 'a + Compare<T>> {
344 heap: &'a mut BinaryHeap<T, C>,
345 sift: bool,
346}
347
348// #[stable(feature = "collection_debug", since = "1.17.0")]
349impl<T: fmt::Debug, C: Compare<T>> fmt::Debug for PeekMut<'_, T, C> {
350 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
351 f.debug_tuple("PeekMut").field(&self.heap.data[0]).finish()
352 }
353}
354
355// #[stable(feature = "binary_heap_peek_mut", since = "1.12.0")]
356impl<T, C: Compare<T>> Drop for PeekMut<'_, T, C> {
357 fn drop(&mut self) {
358 if self.sift {
359 // SAFETY: PeekMut is only instantiated for non-empty heaps.
360 unsafe { self.heap.sift_down(0) };
361 }
362 }
363}
364
365// #[stable(feature = "binary_heap_peek_mut", since = "1.12.0")]
366impl<T, C: Compare<T>> Deref for PeekMut<'_, T, C> {
367 type Target = T;
368 fn deref(&self) -> &T {
369 debug_assert!(!self.heap.is_empty());
370 // SAFE: PeekMut is only instantiated for non-empty heaps
371 unsafe { self.heap.data.get_unchecked(0) }
372 }
373}
374
375// #[stable(feature = "binary_heap_peek_mut", since = "1.12.0")]
376impl<T, C: Compare<T>> DerefMut for PeekMut<'_, T, C> {
377 fn deref_mut(&mut self) -> &mut T {
378 debug_assert!(!self.heap.is_empty());
379 self.sift = true;
380 // SAFE: PeekMut is only instantiated for non-empty heaps
381 unsafe { self.heap.data.get_unchecked_mut(0) }
382 }
383}
384
385impl<'a, T, C: Compare<T>> PeekMut<'a, T, C> {
386 /// Removes the peeked value from the heap and returns it.
387 // #[stable(feature = "binary_heap_peek_mut_pop", since = "1.18.0")]
388 pub fn pop(mut this: PeekMut<'a, T, C>) -> T {
389 let value = this.heap.pop().unwrap();
390 this.sift = false;
391 value
392 }
393}
394
395// #[stable(feature = "rust1", since = "1.0.0")]
396impl<T: Clone, C: Clone> Clone for BinaryHeap<T, C> {
397 fn clone(&self) -> Self {
398 BinaryHeap {
399 data: self.data.clone(),
400 cmp: self.cmp.clone(),
401 }
402 }
403
404 fn clone_from(&mut self, source: &Self) {
405 self.data.clone_from(&source.data);
406 }
407}
408
409// #[stable(feature = "rust1", since = "1.0.0")]
410impl<T: Ord> Default for BinaryHeap<T> {
411 /// Creates an empty `BinaryHeap<T>`.
412 #[inline]
413 fn default() -> BinaryHeap<T> {
414 BinaryHeap::new()
415 }
416}
417
418// #[stable(feature = "binaryheap_debug", since = "1.4.0")]
419impl<T: fmt::Debug, C> fmt::Debug for BinaryHeap<T, C> {
420 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
421 f.debug_list().entries(self.iter()).finish()
422 }
423}
424
425impl<T, C: Compare<T> + Default> BinaryHeap<T, C> {
426 /// Generic constructor for `BinaryHeap` from [`Vec`].
427 ///
428 /// Because `BinaryHeap` stores the elements in its internal `Vec`,
429 /// it's natural to construct it from `Vec`.
430 ///
431 /// [`Vec`]: https://doc.rust-lang.org/stable/std/vec/struct.Vec.html
432 pub fn from_vec(vec: Vec<T>) -> Self {
433 BinaryHeap::from_vec_cmp(vec, C::default())
434 }
435}
436
437impl<T, C: Compare<T>> BinaryHeap<T, C> {
438 /// Generic constructor for `BinaryHeap` from [`Vec`] and comparator.
439 ///
440 /// Because `BinaryHeap` stores the elements in its internal `Vec`,
441 /// it's natural to construct it from `Vec`.
442 ///
443 /// [`Vec`]: https://doc.rust-lang.org/stable/std/vec/struct.Vec.html
444 pub fn from_vec_cmp(vec: Vec<T>, cmp: C) -> Self {
445 unsafe { BinaryHeap::from_vec_cmp_raw(vec, cmp, true) }
446 }
447
448 /// Generic constructor for `BinaryHeap` from [`Vec`] and comparator.
449 ///
450 /// Because `BinaryHeap` stores the elements in its internal `Vec`,
451 /// it's natural to construct it from `Vec`.
452 ///
453 /// # Safety
454 /// User is responsible for providing valid `rebuild` value.
455 ///
456 /// [`Vec`]: https://doc.rust-lang.org/stable/std/vec/struct.Vec.html
457 pub unsafe fn from_vec_cmp_raw(vec: Vec<T>, cmp: C, rebuild: bool) -> Self {
458 let mut heap = BinaryHeap { data: vec, cmp };
459 if rebuild && !heap.data.is_empty() {
460 heap.rebuild();
461 }
462 heap
463 }
464}
465
466impl<T: Ord> BinaryHeap<T> {
467 /// Creates an empty `BinaryHeap`.
468 ///
469 /// This default version will create a max-heap.
470 ///
471 /// # Examples
472 ///
473 /// Basic usage:
474 ///
475 /// ```
476 /// use binary_heap_plus::BinaryHeap;
477 /// let mut heap = BinaryHeap::new();
478 /// heap.push(3);
479 /// heap.push(1);
480 /// heap.push(5);
481 /// assert_eq!(heap.pop(), Some(5));
482 /// ```
483 // #[stable(feature = "rust1", since = "1.0.0")]
484 #[must_use]
485 pub fn new() -> Self {
486 BinaryHeap::from_vec(vec![])
487 }
488
489 /// Creates an empty `BinaryHeap` with a specific capacity.
490 /// This preallocates enough memory for `capacity` elements,
491 /// so that the `BinaryHeap` does not have to be reallocated
492 /// until it contains at least that many values.
493 ///
494 /// This default version will create a max-heap.
495 ///
496 /// # Examples
497 ///
498 /// Basic usage:
499 ///
500 /// ```
501 /// use binary_heap_plus::BinaryHeap;
502 /// let mut heap = BinaryHeap::with_capacity(10);
503 /// assert_eq!(heap.capacity(), 10);
504 /// heap.push(3);
505 /// heap.push(1);
506 /// heap.push(5);
507 /// assert_eq!(heap.pop(), Some(5));
508 /// ```
509 // #[stable(feature = "rust1", since = "1.0.0")]
510 #[must_use]
511 pub fn with_capacity(capacity: usize) -> Self {
512 BinaryHeap::from_vec(Vec::with_capacity(capacity))
513 }
514}
515
516impl<T: Ord> BinaryHeap<T, MinComparator> {
517 /// Creates an empty `BinaryHeap`.
518 ///
519 /// The `_min()` version will create a min-heap.
520 ///
521 /// # Examples
522 ///
523 /// Basic usage:
524 ///
525 /// ```
526 /// use binary_heap_plus::BinaryHeap;
527 /// let mut heap = BinaryHeap::new_min();
528 /// heap.push(3);
529 /// heap.push(1);
530 /// heap.push(5);
531 /// assert_eq!(heap.pop(), Some(1));
532 /// ```
533 #[must_use]
534 pub fn new_min() -> Self {
535 BinaryHeap::from_vec(vec![])
536 }
537
538 /// Creates an empty `BinaryHeap` with a specific capacity.
539 /// This preallocates enough memory for `capacity` elements,
540 /// so that the `BinaryHeap` does not have to be reallocated
541 /// until it contains at least that many values.
542 ///
543 /// The `_min()` version will create a min-heap.
544 ///
545 /// # Examples
546 ///
547 /// Basic usage:
548 ///
549 /// ```
550 /// use binary_heap_plus::BinaryHeap;
551 /// let mut heap = BinaryHeap::with_capacity_min(10);
552 /// assert_eq!(heap.capacity(), 10);
553 /// heap.push(3);
554 /// heap.push(1);
555 /// heap.push(5);
556 /// assert_eq!(heap.pop(), Some(1));
557 /// ```
558 #[must_use]
559 pub fn with_capacity_min(capacity: usize) -> Self {
560 BinaryHeap::from_vec(Vec::with_capacity(capacity))
561 }
562}
563
564impl<T, F> BinaryHeap<T, FnComparator<F>>
565where
566 F: Fn(&T, &T) -> Ordering,
567{
568 /// Creates an empty `BinaryHeap`.
569 ///
570 /// The `_by()` version will create a heap ordered by given closure.
571 ///
572 /// # Examples
573 ///
574 /// Basic usage:
575 ///
576 /// ```
577 /// use binary_heap_plus::BinaryHeap;
578 /// let mut heap = BinaryHeap::new_by(|a: &i32, b: &i32| b.cmp(a));
579 /// heap.push(3);
580 /// heap.push(1);
581 /// heap.push(5);
582 /// assert_eq!(heap.pop(), Some(1));
583 /// ```
584 #[must_use]
585 pub fn new_by(f: F) -> Self {
586 BinaryHeap::from_vec_cmp(vec![], FnComparator(f))
587 }
588
589 /// Creates an empty `BinaryHeap` with a specific capacity.
590 /// This preallocates enough memory for `capacity` elements,
591 /// so that the `BinaryHeap` does not have to be reallocated
592 /// until it contains at least that many values.
593 ///
594 /// The `_by()` version will create a heap ordered by given closure.
595 ///
596 /// # Examples
597 ///
598 /// Basic usage:
599 ///
600 /// ```
601 /// use binary_heap_plus::BinaryHeap;
602 /// let mut heap = BinaryHeap::with_capacity_by(10, |a: &i32, b: &i32| b.cmp(a));
603 /// assert_eq!(heap.capacity(), 10);
604 /// heap.push(3);
605 /// heap.push(1);
606 /// heap.push(5);
607 /// assert_eq!(heap.pop(), Some(1));
608 /// ```
609 #[must_use]
610 pub fn with_capacity_by(capacity: usize, f: F) -> Self {
611 BinaryHeap::from_vec_cmp(Vec::with_capacity(capacity), FnComparator(f))
612 }
613}
614
615impl<T, F, K: Ord> BinaryHeap<T, KeyComparator<F>>
616where
617 F: Fn(&T) -> K,
618{
619 /// Creates an empty `BinaryHeap`.
620 ///
621 /// The `_by_key()` version will create a heap ordered by key converted by given closure.
622 ///
623 /// # Examples
624 ///
625 /// Basic usage:
626 ///
627 /// ```
628 /// use binary_heap_plus::BinaryHeap;
629 /// let mut heap = BinaryHeap::new_by_key(|a: &i32| a % 4);
630 /// heap.push(3);
631 /// heap.push(1);
632 /// heap.push(5);
633 /// assert_eq!(heap.pop(), Some(3));
634 /// ```
635 #[must_use]
636 pub fn new_by_key(f: F) -> Self {
637 BinaryHeap::from_vec_cmp(vec![], KeyComparator(f))
638 }
639
640 /// Creates an empty `BinaryHeap` with a specific capacity.
641 /// This preallocates enough memory for `capacity` elements,
642 /// so that the `BinaryHeap` does not have to be reallocated
643 /// until it contains at least that many values.
644 ///
645 /// The `_by_key()` version will create a heap ordered by key coverted by given closure.
646 ///
647 /// # Examples
648 ///
649 /// Basic usage:
650 ///
651 /// ```
652 /// use binary_heap_plus::BinaryHeap;
653 /// let mut heap = BinaryHeap::with_capacity_by_key(10, |a: &i32| a % 4);
654 /// assert_eq!(heap.capacity(), 10);
655 /// heap.push(3);
656 /// heap.push(1);
657 /// heap.push(5);
658 /// assert_eq!(heap.pop(), Some(3));
659 /// ```
660 #[must_use]
661 pub fn with_capacity_by_key(capacity: usize, f: F) -> Self {
662 BinaryHeap::from_vec_cmp(Vec::with_capacity(capacity), KeyComparator(f))
663 }
664}
665
666impl<T, C: Compare<T>> BinaryHeap<T, C> {
667 /// Replaces the comparator of binary heap.
668 ///
669 /// # Examples
670 ///
671 /// Basic usage:
672 ///
673 /// ```
674 /// use binary_heap_plus::BinaryHeap;
675 /// use compare::Compare;
676 /// use std::cmp::Ordering;
677 ///
678 /// struct Comparator {
679 /// ascending: bool
680 /// }
681 ///
682 /// impl Compare<i32> for Comparator {
683 /// fn compare(&self,l: &i32,r: &i32) -> Ordering {
684 /// if self.ascending {
685 /// r.cmp(l)
686 /// } else {
687 /// l.cmp(r)
688 /// }
689 /// }
690 /// }
691 ///
692 /// // construct a heap in ascending order.
693 /// let mut heap = BinaryHeap::from_vec_cmp(vec![3, 1, 5], Comparator { ascending: true });
694 ///
695 /// // replace the comparor
696 /// heap.replace_cmp(Comparator { ascending: false });
697 /// assert_eq!(heap.into_iter_sorted().collect::<Vec<_>>(), [5, 3, 1]);
698 /// ```
699 #[inline]
700 pub fn replace_cmp(&mut self, cmp: C) {
701 unsafe {
702 self.replace_cmp_raw(cmp, true);
703 }
704 }
705
706 /// Replaces the comparator of binary heap.
707 ///
708 /// # Safety
709 /// User is responsible for providing valid `rebuild` value.
710 pub unsafe fn replace_cmp_raw(&mut self, cmp: C, rebuild: bool) {
711 self.cmp = cmp;
712 if rebuild && !self.data.is_empty() {
713 self.rebuild();
714 }
715 }
716
717 /// Returns a mutable reference to the greatest item in the binary heap, or
718 /// `None` if it is empty.
719 ///
720 /// Note: If the `PeekMut` value is leaked, the heap may be in an
721 /// inconsistent state.
722 ///
723 /// # Examples
724 ///
725 /// Basic usage:
726 ///
727 /// ```
728 /// use binary_heap_plus::BinaryHeap;
729 /// let mut heap = BinaryHeap::new();
730 /// assert!(heap.peek_mut().is_none());
731 ///
732 /// heap.push(1);
733 /// heap.push(5);
734 /// heap.push(2);
735 /// {
736 /// let mut val = heap.peek_mut().unwrap();
737 /// *val = 0;
738 /// }
739 /// assert_eq!(heap.peek(), Some(&2));
740 /// ```
741 ///
742 /// # Time complexity
743 ///
744 /// If the item is modified then the worst case time complexity is *O*(log(*n*)),
745 /// otherwise it's *O*(1).
746 // #[stable(feature = "binary_heap_peek_mut", since = "1.12.0")]
747 pub fn peek_mut(&mut self) -> Option<PeekMut<'_, T, C>> {
748 if self.is_empty() {
749 None
750 } else {
751 Some(PeekMut {
752 heap: self,
753 sift: false,
754 })
755 }
756 }
757
758 /// Removes the greatest item from the binary heap and returns it, or `None` if it
759 /// is empty.
760 ///
761 /// # Examples
762 ///
763 /// Basic usage:
764 ///
765 /// ```
766 /// use binary_heap_plus::BinaryHeap;
767 /// let mut heap = BinaryHeap::from([1, 3]);
768 ///
769 /// assert_eq!(heap.pop(), Some(3));
770 /// assert_eq!(heap.pop(), Some(1));
771 /// assert_eq!(heap.pop(), None);
772 /// ```
773 ///
774 /// # Time complexity
775 ///
776 /// The worst case cost of `pop` on a heap containing *n* elements is *O*(log(*n*)).
777 // #[stable(feature = "rust1", since = "1.0.0")]
778 pub fn pop(&mut self) -> Option<T> {
779 self.data.pop().map(|mut item| {
780 if !self.is_empty() {
781 swap(&mut item, &mut self.data[0]);
782 // SAFETY: !self.is_empty() means that self.len() > 0
783 unsafe { self.sift_down_to_bottom(0) };
784 }
785 item
786 })
787 }
788
789 /// Pushes an item onto the binary heap.
790 ///
791 /// # Examples
792 ///
793 /// Basic usage:
794 ///
795 /// ```
796 /// use binary_heap_plus::BinaryHeap;
797 /// let mut heap = BinaryHeap::new();
798 /// heap.push(3);
799 /// heap.push(5);
800 /// heap.push(1);
801 ///
802 /// assert_eq!(heap.len(), 3);
803 /// assert_eq!(heap.peek(), Some(&5));
804 /// ```
805 ///
806 /// # Time complexity
807 ///
808 /// The expected cost of `push`, averaged over every possible ordering of
809 /// the elements being pushed, and over a sufficiently large number of
810 /// pushes, is *O*(1). This is the most meaningful cost metric when pushing
811 /// elements that are *not* already in any sorted pattern.
812 ///
813 /// The time complexity degrades if elements are pushed in predominantly
814 /// ascending order. In the worst case, elements are pushed in ascending
815 /// sorted order and the amortized cost per push is *O*(log(*n*)) against a heap
816 /// containing *n* elements.
817 ///
818 /// The worst case cost of a *single* call to `push` is *O*(*n*). The worst case
819 /// occurs when capacity is exhausted and needs a resize. The resize cost
820 /// has been amortized in the previous figures.
821 // #[stable(feature = "rust1", since = "1.0.0")]
822 pub fn push(&mut self, item: T) {
823 let old_len = self.len();
824 self.data.push(item);
825 // SAFETY: Since we pushed a new item it means that
826 // old_len = self.len() - 1 < self.len()
827 unsafe { self.sift_up(0, old_len) };
828 }
829
830 /// Consumes the `BinaryHeap` and returns a vector in sorted
831 /// (ascending) order.
832 ///
833 /// # Examples
834 ///
835 /// Basic usage:
836 ///
837 /// ```
838 /// use binary_heap_plus::BinaryHeap;
839 ///
840 /// let mut heap = BinaryHeap::from([1, 2, 4, 5, 7]);
841 /// heap.push(6);
842 /// heap.push(3);
843 ///
844 /// let vec = heap.into_sorted_vec();
845 /// assert_eq!(vec, [1, 2, 3, 4, 5, 6, 7]);
846 /// ```
847 #[must_use = "`self` will be dropped if the result is not used"]
848 // #[stable(feature = "binary_heap_extras_15", since = "1.5.0")]
849 pub fn into_sorted_vec(mut self) -> Vec<T> {
850 let mut end = self.len();
851 while end > 1 {
852 end -= 1;
853 // SAFETY: `end` goes from `self.len() - 1` to 1 (both included),
854 // so it's always a valid index to access.
855 // It is safe to access index 0 (i.e. `ptr`), because
856 // 1 <= end < self.len(), which means self.len() >= 2.
857 unsafe {
858 let ptr = self.data.as_mut_ptr();
859 ptr::swap(ptr, ptr.add(end));
860 }
861 // SAFETY: `end` goes from `self.len() - 1` to 1 (both included) so:
862 // 0 < 1 <= end <= self.len() - 1 < self.len()
863 // Which means 0 < end and end < self.len().
864 unsafe { self.sift_down_range(0, end) };
865 }
866 self.into_vec()
867 }
868
869 // The implementations of sift_up and sift_down use unsafe blocks in
870 // order to move an element out of the vector (leaving behind a
871 // hole), shift along the others and move the removed element back into the
872 // vector at the final location of the hole.
873 // The `Hole` type is used to represent this, and make sure
874 // the hole is filled back at the end of its scope, even on panic.
875 // Using a hole reduces the constant factor compared to using swaps,
876 // which involves twice as many moves.
877
878 /// # Safety
879 ///
880 /// The caller must guarantee that `pos < self.len()`.
881 unsafe fn sift_up(&mut self, start: usize, pos: usize) -> usize {
882 // Take out the value at `pos` and create a hole.
883 // SAFETY: The caller guarantees that pos < self.len()
884 let mut hole = unsafe { Hole::new(&mut self.data, pos) };
885
886 while hole.pos() > start {
887 let parent = (hole.pos() - 1) / 2;
888
889 // SAFETY: hole.pos() > start >= 0, which means hole.pos() > 0
890 // and so hole.pos() - 1 can't underflow.
891 // This guarantees that parent < hole.pos() so
892 // it's a valid index and also != hole.pos().
893 if self
894 .cmp
895 .compares_le(hole.element(), unsafe { hole.get(parent) })
896 {
897 break;
898 }
899
900 // SAFETY: Same as above
901 unsafe { hole.move_to(parent) };
902 }
903
904 hole.pos()
905 }
906
907 /// Take an element at `pos` and move it down the heap,
908 /// while its children are larger.
909 ///
910 /// # Safety
911 ///
912 /// The caller must guarantee that `pos < end <= self.len()`.
913 unsafe fn sift_down_range(&mut self, pos: usize, end: usize) {
914 // SAFETY: The caller guarantees that pos < end <= self.len().
915 let mut hole = unsafe { Hole::new(&mut self.data, pos) };
916 let mut child = 2 * hole.pos() + 1;
917
918 // Loop invariant: child == 2 * hole.pos() + 1.
919 while child <= end.saturating_sub(2) {
920 // compare with the greater of the two children
921 // SAFETY: child < end - 1 < self.len() and
922 // child + 1 < end <= self.len(), so they're valid indexes.
923 // child == 2 * hole.pos() + 1 != hole.pos() and
924 // child + 1 == 2 * hole.pos() + 2 != hole.pos().
925 // FIXME: 2 * hole.pos() + 1 or 2 * hole.pos() + 2 could overflow
926 // if T is a ZST
927 child += unsafe { self.cmp.compares_le(hole.get(child), hole.get(child + 1)) } as usize;
928
929 // if we are already in order, stop.
930 // SAFETY: child is now either the old child or the old child+1
931 // We already proven that both are < self.len() and != hole.pos()
932 if self
933 .cmp
934 .compares_ge(hole.element(), unsafe { hole.get(child) })
935 {
936 return;
937 }
938
939 // SAFETY: same as above.
940 unsafe { hole.move_to(child) };
941 child = 2 * hole.pos() + 1;
942 }
943
944 // SAFETY: && short circuit, which means that in the
945 // second condition it's already true that child == end - 1 < self.len().
946 if child == end - 1
947 && self
948 .cmp
949 .compares_lt(hole.element(), unsafe { hole.get(child) })
950 {
951 // SAFETY: child is already proven to be a valid index and
952 // child == 2 * hole.pos() + 1 != hole.pos().
953 unsafe { hole.move_to(child) };
954 }
955 }
956
957 /// # Safety
958 ///
959 /// The caller must guarantee that `pos < self.len()`.
960 unsafe fn sift_down(&mut self, pos: usize) {
961 let len = self.len();
962 // SAFETY: pos < len is guaranteed by the caller and
963 // obviously len = self.len() <= self.len().
964 unsafe { self.sift_down_range(pos, len) };
965 }
966
967 /// Take an element at `pos` and move it all the way down the heap,
968 /// then sift it up to its position.
969 ///
970 /// Note: This is faster when the element is known to be large / should
971 /// be closer to the bottom.
972 ///
973 /// # Safety
974 ///
975 /// The caller must guarantee that `pos < self.len()`.
976 unsafe fn sift_down_to_bottom(&mut self, mut pos: usize) {
977 let end = self.len();
978 let start = pos;
979
980 // SAFETY: The caller guarantees that pos < self.len().
981 let mut hole = unsafe { Hole::new(&mut self.data, pos) };
982 let mut child = 2 * hole.pos() + 1;
983
984 // Loop invariant: child == 2 * hole.pos() + 1.
985 while child <= end.saturating_sub(2) {
986 // SAFETY: child < end - 1 < self.len() and
987 // child + 1 < end <= self.len(), so they're valid indexes.
988 // child == 2 * hole.pos() + 1 != hole.pos() and
989 // child + 1 == 2 * hole.pos() + 2 != hole.pos().
990 // FIXME: 2 * hole.pos() + 1 or 2 * hole.pos() + 2 could overflow
991 // if T is a ZST
992 child += unsafe { self.cmp.compares_le(hole.get(child), hole.get(child + 1)) } as usize;
993
994 // SAFETY: Same as above
995 unsafe { hole.move_to(child) };
996 child = 2 * hole.pos() + 1;
997 }
998
999 if child == end - 1 {
1000 // SAFETY: child == end - 1 < self.len(), so it's a valid index
1001 // and child == 2 * hole.pos() + 1 != hole.pos().
1002 unsafe { hole.move_to(child) };
1003 }
1004 pos = hole.pos();
1005 drop(hole);
1006
1007 // SAFETY: pos is the position in the hole and was already proven
1008 // to be a valid index.
1009 unsafe { self.sift_up(start, pos) };
1010 }
1011
1012 /// Rebuild assuming data[0..start] is still a proper heap.
1013 fn rebuild_tail(&mut self, start: usize) {
1014 if start == self.len() {
1015 return;
1016 }
1017
1018 let tail_len = self.len() - start;
1019
1020 #[inline(always)]
1021 fn log2_fast(x: usize) -> usize {
1022 (usize::BITS - x.leading_zeros() - 1) as usize
1023 }
1024
1025 // `rebuild` takes O(self.len()) operations
1026 // and about 2 * self.len() comparisons in the worst case
1027 // while repeating `sift_up` takes O(tail_len * log(start)) operations
1028 // and about 1 * tail_len * log_2(start) comparisons in the worst case,
1029 // assuming start >= tail_len. For larger heaps, the crossover point
1030 // no longer follows this reasoning and was determined empirically.
1031 let better_to_rebuild = if start < tail_len {
1032 true
1033 } else if self.len() <= 2048 {
1034 2 * self.len() < tail_len * log2_fast(start)
1035 } else {
1036 2 * self.len() < tail_len * 11
1037 };
1038
1039 if better_to_rebuild {
1040 self.rebuild();
1041 } else {
1042 for i in start..self.len() {
1043 // SAFETY: The index `i` is always less than self.len().
1044 unsafe { self.sift_up(0, i) };
1045 }
1046 }
1047 }
1048
1049 fn rebuild(&mut self) {
1050 let mut n = self.len() / 2;
1051 while n > 0 {
1052 n -= 1;
1053 // SAFETY: n starts from self.len() / 2 and goes down to 0.
1054 // The only case when !(n < self.len()) is if
1055 // self.len() == 0, but it's ruled out by the loop condition.
1056 unsafe { self.sift_down(n) };
1057 }
1058 }
1059
1060 /// Moves all the elements of `other` into `self`, leaving `other` empty.
1061 ///
1062 /// # Examples
1063 ///
1064 /// Basic usage:
1065 ///
1066 /// ```
1067 /// use binary_heap_plus::BinaryHeap;
1068 ///
1069 /// let mut a = BinaryHeap::from([-10, 1, 2, 3, 3]);
1070 /// let mut b = BinaryHeap::from([-20, 5, 43]);
1071 ///
1072 /// a.append(&mut b);
1073 ///
1074 /// assert_eq!(a.into_sorted_vec(), [-20, -10, 1, 2, 3, 3, 5, 43]);
1075 /// assert!(b.is_empty());
1076 /// ```
1077 // #[stable(feature = "binary_heap_append", since = "1.11.0")]
1078 pub fn append(&mut self, other: &mut Self) {
1079 if self.len() < other.len() {
1080 swap(self, other);
1081 }
1082
1083 let start = self.data.len();
1084
1085 self.data.append(&mut other.data);
1086
1087 self.rebuild_tail(start);
1088 }
1089}
1090
1091impl<T, C> BinaryHeap<T, C> {
1092 /// Returns an iterator visiting all values in the underlying vector, in
1093 /// arbitrary order.
1094 ///
1095 /// # Examples
1096 ///
1097 /// Basic usage:
1098 ///
1099 /// ```
1100 /// use binary_heap_plus::BinaryHeap;
1101 /// let heap = BinaryHeap::from([1, 2, 3, 4]);
1102 ///
1103 /// // Print 1, 2, 3, 4 in arbitrary order
1104 /// for x in heap.iter() {
1105 /// println!("{}", x);
1106 /// }
1107 /// ```
1108 // #[stable(feature = "rust1", since = "1.0.0")]
1109 pub fn iter(&self) -> Iter<'_, T> {
1110 Iter {
1111 iter: self.data.iter(),
1112 }
1113 }
1114
1115 /// Returns an iterator which retrieves elements in heap order.
1116 /// This method consumes the original heap.
1117 ///
1118 /// # Examples
1119 ///
1120 /// Basic usage:
1121 ///
1122 /// ```
1123 /// use binary_heap_plus::BinaryHeap;
1124 /// let heap = BinaryHeap::from([1, 2, 3, 4, 5]);
1125 ///
1126 /// assert_eq!(heap.into_iter_sorted().take(2).collect::<Vec<_>>(), [5, 4]);
1127 /// ```
1128 // #[unstable(feature = "binary_heap_into_iter_sorted", issue = "59278")]
1129 pub fn into_iter_sorted(self) -> IntoIterSorted<T, C> {
1130 IntoIterSorted { inner: self }
1131 }
1132
1133 /// Returns the greatest item in the binary heap, or `None` if it is empty.
1134 ///
1135 /// # Examples
1136 ///
1137 /// Basic usage:
1138 ///
1139 /// ```
1140 /// use binary_heap_plus::BinaryHeap;
1141 /// let mut heap = BinaryHeap::new();
1142 /// assert_eq!(heap.peek(), None);
1143 ///
1144 /// heap.push(1);
1145 /// heap.push(5);
1146 /// heap.push(2);
1147 /// assert_eq!(heap.peek(), Some(&5));
1148 ///
1149 /// ```
1150 ///
1151 /// # Time complexity
1152 ///
1153 /// Cost is *O*(1) in the worst case.
1154 #[must_use]
1155 // #[stable(feature = "rust1", since = "1.0.0")]
1156 pub fn peek(&self) -> Option<&T> {
1157 self.data.get(0)
1158 }
1159
1160 /// Returns the number of elements the binary heap can hold without reallocating.
1161 ///
1162 /// # Examples
1163 ///
1164 /// Basic usage:
1165 ///
1166 /// ```
1167 /// use binary_heap_plus::BinaryHeap;
1168 /// let mut heap = BinaryHeap::with_capacity(100);
1169 /// assert!(heap.capacity() >= 100);
1170 /// heap.push(4);
1171 /// ```
1172 #[must_use]
1173 // #[stable(feature = "rust1", since = "1.0.0")]
1174 pub fn capacity(&self) -> usize {
1175 self.data.capacity()
1176 }
1177
1178 /// Reserves the minimum capacity for exactly `additional` more elements to be inserted in the
1179 /// given `BinaryHeap`. Does nothing if the capacity is already sufficient.
1180 ///
1181 /// Note that the allocator may give the collection more space than it requests. Therefore
1182 /// capacity can not be relied upon to be precisely minimal. Prefer [`reserve`] if future
1183 /// insertions are expected.
1184 ///
1185 /// # Panics
1186 ///
1187 /// Panics if the new capacity overflows `usize`.
1188 ///
1189 /// # Examples
1190 ///
1191 /// Basic usage:
1192 ///
1193 /// ```
1194 /// use binary_heap_plus::BinaryHeap;
1195 /// let mut heap = BinaryHeap::new();
1196 /// heap.reserve_exact(100);
1197 /// assert!(heap.capacity() >= 100);
1198 /// heap.push(4);
1199 /// ```
1200 ///
1201 /// [`reserve`]: BinaryHeap::reserve
1202 // #[stable(feature = "rust1", since = "1.0.0")]
1203 pub fn reserve_exact(&mut self, additional: usize) {
1204 self.data.reserve_exact(additional);
1205 }
1206
1207 /// Reserves capacity for at least `additional` more elements to be inserted in the
1208 /// `BinaryHeap`. The collection may reserve more space to avoid frequent reallocations.
1209 ///
1210 /// # Panics
1211 ///
1212 /// Panics if the new capacity overflows `usize`.
1213 ///
1214 /// # Examples
1215 ///
1216 /// Basic usage:
1217 ///
1218 /// ```
1219 /// use binary_heap_plus::BinaryHeap;
1220 /// let mut heap = BinaryHeap::new();
1221 /// heap.reserve(100);
1222 /// assert!(heap.capacity() >= 100);
1223 /// heap.push(4);
1224 /// ```
1225 // #[stable(feature = "rust1", since = "1.0.0")]
1226 pub fn reserve(&mut self, additional: usize) {
1227 self.data.reserve(additional);
1228 }
1229
1230 /// Discards as much additional capacity as possible.
1231 ///
1232 /// # Examples
1233 ///
1234 /// Basic usage:
1235 ///
1236 /// ```
1237 /// use binary_heap_plus::BinaryHeap;
1238 /// let mut heap: BinaryHeap<i32> = BinaryHeap::with_capacity(100);
1239 ///
1240 /// assert!(heap.capacity() >= 100);
1241 /// heap.shrink_to_fit();
1242 /// assert!(heap.capacity() == 0);
1243 /// ```
1244 // #[stable(feature = "rust1", since = "1.0.0")]
1245 pub fn shrink_to_fit(&mut self) {
1246 self.data.shrink_to_fit();
1247 }
1248
1249 /// Discards capacity with a lower bound.
1250 ///
1251 /// The capacity will remain at least as large as both the length
1252 /// and the supplied value.
1253 ///
1254 /// If the current capacity is less than the lower limit, this is a no-op.
1255 ///
1256 /// # Examples
1257 ///
1258 /// ```
1259 /// use std::collections::BinaryHeap;
1260 /// let mut heap: BinaryHeap<i32> = BinaryHeap::with_capacity(100);
1261 ///
1262 /// assert!(heap.capacity() >= 100);
1263 /// heap.shrink_to(10);
1264 /// assert!(heap.capacity() >= 10);
1265 /// ```
1266 #[inline]
1267 pub fn shrink_to(&mut self, min_capacity: usize) {
1268 self.data.shrink_to(min_capacity)
1269 }
1270
1271 /// Consumes the `BinaryHeap` and returns the underlying vector
1272 /// in arbitrary order.
1273 ///
1274 /// # Examples
1275 ///
1276 /// Basic usage:
1277 ///
1278 /// ```
1279 /// use binary_heap_plus::BinaryHeap;
1280 /// let heap = BinaryHeap::from([1, 2, 3, 4, 5, 6, 7]);
1281 /// let vec = heap.into_vec();
1282 ///
1283 /// // Will print in some order
1284 /// for x in vec {
1285 /// println!("{}", x);
1286 /// }
1287 /// ```
1288 #[must_use = "`self` will be dropped if the result is not used"]
1289 // #[stable(feature = "binary_heap_extras_15", since = "1.5.0")]
1290 pub fn into_vec(self) -> Vec<T> {
1291 self.into()
1292 }
1293
1294 /// Returns the length of the binary heap.
1295 ///
1296 /// # Examples
1297 ///
1298 /// Basic usage:
1299 ///
1300 /// ```
1301 /// use binary_heap_plus::BinaryHeap;
1302 /// let heap = BinaryHeap::from([1, 3]);
1303 ///
1304 /// assert_eq!(heap.len(), 2);
1305 /// ```
1306 #[must_use]
1307 // #[stable(feature = "rust1", since = "1.0.0")]
1308 pub fn len(&self) -> usize {
1309 self.data.len()
1310 }
1311
1312 /// Checks if the binary heap is empty.
1313 ///
1314 /// # Examples
1315 ///
1316 /// Basic usage:
1317 ///
1318 /// ```
1319 /// use binary_heap_plus::BinaryHeap;
1320 /// let mut heap = BinaryHeap::new();
1321 ///
1322 /// assert!(heap.is_empty());
1323 ///
1324 /// heap.push(3);
1325 /// heap.push(5);
1326 /// heap.push(1);
1327 ///
1328 /// assert!(!heap.is_empty());
1329 /// ```
1330 #[must_use]
1331 // #[stable(feature = "rust1", since = "1.0.0")]
1332 pub fn is_empty(&self) -> bool {
1333 self.len() == 0
1334 }
1335
1336 /// Clears the binary heap, returning an iterator over the removed elements
1337 /// in arbitrary order. If the iterator is dropped before being fully
1338 /// consumed, it drops the remaining elements in arbitrary order.
1339 ///
1340 /// The returned iterator keeps a mutable borrow on the heap to optimize
1341 /// its implementation.
1342 ///
1343 /// # Examples
1344 ///
1345 /// Basic usage:
1346 ///
1347 /// ```
1348 /// use binary_heap_plus::BinaryHeap;
1349 /// let mut heap = BinaryHeap::from([1, 3]);
1350 ///
1351 /// assert!(!heap.is_empty());
1352 ///
1353 /// for x in heap.drain() {
1354 /// println!("{}", x);
1355 /// }
1356 ///
1357 /// assert!(heap.is_empty());
1358 /// ```
1359 #[inline]
1360 // #[stable(feature = "drain", since = "1.6.0")]
1361 pub fn drain(&mut self) -> Drain<'_, T> {
1362 Drain {
1363 iter: self.data.drain(..),
1364 }
1365 }
1366
1367 /// Drops all items from the binary heap.
1368 ///
1369 /// # Examples
1370 ///
1371 /// Basic usage:
1372 ///
1373 /// ```
1374 /// use binary_heap_plus::BinaryHeap;
1375 /// let mut heap = BinaryHeap::from([1, 3]);
1376 ///
1377 /// assert!(!heap.is_empty());
1378 ///
1379 /// heap.clear();
1380 ///
1381 /// assert!(heap.is_empty());
1382 /// ```
1383 // #[stable(feature = "rust1", since = "1.0.0")]
1384 pub fn clear(&mut self) {
1385 self.drain();
1386 }
1387}
1388
1389/// Hole represents a hole in a slice i.e., an index without valid value
1390/// (because it was moved from or duplicated).
1391/// In drop, `Hole` will restore the slice by filling the hole
1392/// position with the value that was originally removed.
1393struct Hole<'a, T: 'a> {
1394 data: &'a mut [T],
1395 elt: ManuallyDrop<T>,
1396 pos: usize,
1397}
1398
1399impl<'a, T> Hole<'a, T> {
1400 /// Create a new `Hole` at index `pos`.
1401 ///
1402 /// Unsafe because pos must be within the data slice.
1403 #[inline]
1404 unsafe fn new(data: &'a mut [T], pos: usize) -> Self {
1405 debug_assert!(pos < data.len());
1406 // SAFE: pos should be inside the slice
1407 let elt = unsafe { ptr::read(data.get_unchecked(pos)) };
1408 Hole {
1409 data,
1410 elt: ManuallyDrop::new(elt),
1411 pos,
1412 }
1413 }
1414
1415 #[inline]
1416 fn pos(&self) -> usize {
1417 self.pos
1418 }
1419
1420 /// Returns a reference to the element removed.
1421 #[inline]
1422 fn element(&self) -> &T {
1423 &self.elt
1424 }
1425
1426 /// Returns a reference to the element at `index`.
1427 ///
1428 /// Unsafe because index must be within the data slice and not equal to pos.
1429 #[inline]
1430 unsafe fn get(&self, index: usize) -> &T {
1431 debug_assert!(index != self.pos);
1432 debug_assert!(index < self.data.len());
1433 unsafe { self.data.get_unchecked(index) }
1434 }
1435
1436 /// Move hole to new location
1437 ///
1438 /// Unsafe because index must be within the data slice and not equal to pos.
1439 #[inline]
1440 unsafe fn move_to(&mut self, index: usize) {
1441 debug_assert!(index != self.pos);
1442 debug_assert!(index < self.data.len());
1443 unsafe {
1444 let ptr = self.data.as_mut_ptr();
1445 let index_ptr: *const _ = ptr.add(index);
1446 let hole_ptr = ptr.add(self.pos);
1447 ptr::copy_nonoverlapping(index_ptr, hole_ptr, 1);
1448 }
1449 self.pos = index;
1450 }
1451}
1452
1453impl<T> Drop for Hole<'_, T> {
1454 #[inline]
1455 fn drop(&mut self) {
1456 // fill the hole again
1457 unsafe {
1458 let pos = self.pos;
1459 ptr::copy_nonoverlapping(&*self.elt, self.data.get_unchecked_mut(pos), 1);
1460 }
1461 }
1462}
1463
1464/// An iterator over the elements of a `BinaryHeap`.
1465///
1466/// This `struct` is created by [`BinaryHeap::iter()`]. See its
1467/// documentation for more.
1468#[must_use = "iterators are lazy and do nothing unless consumed"]
1469// #[stable(feature = "rust1", since = "1.0.0")]
1470pub struct Iter<'a, T: 'a> {
1471 iter: slice::Iter<'a, T>,
1472}
1473
1474// #[stable(feature = "collection_debug", since = "1.17.0")]
1475impl<T: fmt::Debug> fmt::Debug for Iter<'_, T> {
1476 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1477 f.debug_tuple("Iter").field(&self.iter.as_slice()).finish()
1478 }
1479}
1480
1481// FIXME(#26925) Remove in favor of `#[derive(Clone)]`
1482// #[stable(feature = "rust1", since = "1.0.0")]
1483impl<T> Clone for Iter<'_, T> {
1484 fn clone(&self) -> Self {
1485 Iter {
1486 iter: self.iter.clone(),
1487 }
1488 }
1489}
1490
1491// #[stable(feature = "rust1", since = "1.0.0")]
1492impl<'a, T> Iterator for Iter<'a, T> {
1493 type Item = &'a T;
1494
1495 #[inline]
1496 fn next(&mut self) -> Option<&'a T> {
1497 self.iter.next()
1498 }
1499
1500 #[inline]
1501 fn size_hint(&self) -> (usize, Option<usize>) {
1502 self.iter.size_hint()
1503 }
1504
1505 #[inline]
1506 fn last(self) -> Option<&'a T> {
1507 self.iter.last()
1508 }
1509}
1510
1511// #[stable(feature = "rust1", since = "1.0.0")]
1512impl<'a, T> DoubleEndedIterator for Iter<'a, T> {
1513 #[inline]
1514 fn next_back(&mut self) -> Option<&'a T> {
1515 self.iter.next_back()
1516 }
1517}
1518
1519// #[stable(feature = "rust1", since = "1.0.0")]
1520// impl<'a, T> ExactSizeIterator for Iter<'a, T> {
1521// fn is_empty(&self) -> bool {
1522// self.iter.is_empty()
1523// }
1524// }
1525
1526// #[stable(feature = "fused", since = "1.26.0")]
1527// impl<'a, T> FusedIterator for Iter<'a, T> {}
1528
1529/// An owning iterator over the elements of a `BinaryHeap`.
1530///
1531/// This `struct` is created by [`BinaryHeap::into_iter()`]
1532/// (provided by the [`IntoIterator`] trait). See its documentation for more.
1533///
1534/// [`IntoIterator`]: https://doc.rust-lang.org/stable/core/iter/trait.IntoIterator.html
1535// #[stable(feature = "rust1", since = "1.0.0")]
1536#[derive(Clone)]
1537pub struct IntoIter<T> {
1538 iter: vec::IntoIter<T>,
1539}
1540
1541// #[stable(feature = "collection_debug", since = "1.17.0")]
1542impl<T: fmt::Debug> fmt::Debug for IntoIter<T> {
1543 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1544 f.debug_tuple("IntoIter")
1545 .field(&self.iter.as_slice())
1546 .finish()
1547 }
1548}
1549
1550// #[stable(feature = "rust1", since = "1.0.0")]
1551impl<T> Iterator for IntoIter<T> {
1552 type Item = T;
1553
1554 #[inline]
1555 fn next(&mut self) -> Option<T> {
1556 self.iter.next()
1557 }
1558
1559 #[inline]
1560 fn size_hint(&self) -> (usize, Option<usize>) {
1561 self.iter.size_hint()
1562 }
1563}
1564
1565// #[stable(feature = "rust1", since = "1.0.0")]
1566impl<T> DoubleEndedIterator for IntoIter<T> {
1567 #[inline]
1568 fn next_back(&mut self) -> Option<T> {
1569 self.iter.next_back()
1570 }
1571}
1572// #[stable(feature = "rust1", since = "1.0.0")]
1573// impl<T> ExactSizeIterator for IntoIter<T> {
1574// fn is_empty(&self) -> bool {
1575// self.iter.is_empty()
1576// }
1577// }
1578
1579// #[stable(feature = "fused", since = "1.26.0")]
1580// impl<T> FusedIterator for IntoIter<T> {}
1581
1582#[must_use = "iterators are lazy and do nothing unless consumed"]
1583// #[unstable(feature = "binary_heap_into_iter_sorted", issue = "59278")]
1584#[derive(Clone, Debug)]
1585pub struct IntoIterSorted<T, C> {
1586 inner: BinaryHeap<T, C>,
1587}
1588
1589// #[unstable(feature = "binary_heap_into_iter_sorted", issue = "59278")]
1590impl<T, C: Compare<T>> Iterator for IntoIterSorted<T, C> {
1591 type Item = T;
1592
1593 #[inline]
1594 fn next(&mut self) -> Option<T> {
1595 self.inner.pop()
1596 }
1597
1598 #[inline]
1599 fn size_hint(&self) -> (usize, Option<usize>) {
1600 let exact = self.inner.len();
1601 (exact, Some(exact))
1602 }
1603}
1604
1605/// A draining iterator over the elements of a `BinaryHeap`.
1606///
1607/// This `struct` is created by [`BinaryHeap::drain()`]. See its
1608/// documentation for more.
1609// #[stable(feature = "drain", since = "1.6.0")]
1610#[derive(Debug)]
1611pub struct Drain<'a, T: 'a> {
1612 iter: vec::Drain<'a, T>,
1613}
1614
1615// #[stable(feature = "drain", since = "1.6.0")]
1616impl<T> Iterator for Drain<'_, T> {
1617 type Item = T;
1618
1619 #[inline]
1620 fn next(&mut self) -> Option<T> {
1621 self.iter.next()
1622 }
1623
1624 #[inline]
1625 fn size_hint(&self) -> (usize, Option<usize>) {
1626 self.iter.size_hint()
1627 }
1628}
1629
1630// #[stable(feature = "drain", since = "1.6.0")]
1631impl<T> DoubleEndedIterator for Drain<'_, T> {
1632 #[inline]
1633 fn next_back(&mut self) -> Option<T> {
1634 self.iter.next_back()
1635 }
1636}
1637
1638// #[stable(feature = "drain", since = "1.6.0")]
1639// impl<'a, T: 'a> ExactSizeIterator for Drain<'a, T> {
1640// fn is_empty(&self) -> bool {
1641// self.iter.is_empty()
1642// }
1643// }
1644
1645// #[stable(feature = "fused", since = "1.26.0")]
1646// impl<'a, T: 'a> FusedIterator for Drain<'a, T> {}
1647
1648// #[stable(feature = "binary_heap_extras_15", since = "1.5.0")]
1649impl<T: Ord> From<Vec<T>> for BinaryHeap<T> {
1650 /// Converts a `Vec<T>` into a `BinaryHeap<T>`.
1651 ///
1652 /// This conversion happens in-place, and has *O*(*n*) time complexity.
1653 fn from(vec: Vec<T>) -> Self {
1654 BinaryHeap::from_vec(vec)
1655 }
1656}
1657
1658impl<T: Ord, const N: usize> From<[T; N]> for BinaryHeap<T> {
1659 /// ```
1660 /// use binary_heap_plus::BinaryHeap;
1661 ///
1662 /// let mut h1 = BinaryHeap::from([1, 4, 2, 3]);
1663 /// let mut h2: BinaryHeap<_> = [1, 4, 2, 3].into();
1664 /// while let Some((a, b)) = h1.pop().zip(h2.pop()) {
1665 /// assert_eq!(a, b);
1666 /// }
1667 /// ```
1668 fn from(arr: [T; N]) -> Self {
1669 Self::from_iter(arr)
1670 }
1671}
1672
1673impl<T, C> From<BinaryHeap<T, C>> for Vec<T> {
1674 /// Converts a `BinaryHeap<T>` into a `Vec<T>`.
1675 ///
1676 /// This conversion requires no data movement or allocation, and has
1677 /// constant time complexity.
1678 fn from(heap: BinaryHeap<T, C>) -> Vec<T> {
1679 heap.data
1680 }
1681}
1682
1683// #[stable(feature = "rust1", since = "1.0.0")]
1684impl<T: Ord> FromIterator<T> for BinaryHeap<T> {
1685 fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> Self {
1686 BinaryHeap::from(iter.into_iter().collect::<Vec<_>>())
1687 }
1688}
1689
1690// #[stable(feature = "rust1", since = "1.0.0")]
1691impl<T, C> IntoIterator for BinaryHeap<T, C> {
1692 type Item = T;
1693 type IntoIter = IntoIter<T>;
1694
1695 /// Creates a consuming iterator, that is, one that moves each value out of
1696 /// the binary heap in arbitrary order. The binary heap cannot be used
1697 /// after calling this.
1698 ///
1699 /// # Examples
1700 ///
1701 /// Basic usage:
1702 ///
1703 /// ```
1704 /// use binary_heap_plus::BinaryHeap;
1705 /// let heap = BinaryHeap::from([1, 2, 3, 4]);
1706 ///
1707 /// // Print 1, 2, 3, 4 in arbitrary order
1708 /// for x in heap.into_iter() {
1709 /// // x has type i32, not &i32
1710 /// println!("{}", x);
1711 /// }
1712 /// ```
1713 fn into_iter(self) -> IntoIter<T> {
1714 IntoIter {
1715 iter: self.data.into_iter(),
1716 }
1717 }
1718}
1719
1720// #[stable(feature = "rust1", since = "1.0.0")]
1721impl<'a, T, C> IntoIterator for &'a BinaryHeap<T, C> {
1722 type Item = &'a T;
1723 type IntoIter = Iter<'a, T>;
1724
1725 fn into_iter(self) -> Iter<'a, T> {
1726 self.iter()
1727 }
1728}
1729
1730// #[stable(feature = "rust1", since = "1.0.0")]
1731impl<T, C: Compare<T>> Extend<T> for BinaryHeap<T, C> {
1732 #[inline]
1733 fn extend<I: IntoIterator<Item = T>>(&mut self, iter: I) {
1734 // <Self as SpecExtend<I>>::spec_extend(self, iter);
1735 self.extend_desugared(iter);
1736 }
1737}
1738
1739// impl<T, I: IntoIterator<Item = T>> SpecExtend<I> for BinaryHeap<T> {
1740// default fn spec_extend(&mut self, iter: I) {
1741// self.extend_desugared(iter.into_iter());
1742// }
1743// }
1744
1745// impl<T> SpecExtend<BinaryHeap<T>> for BinaryHeap<T> {
1746// fn spec_extend(&mut self, ref mut other: BinaryHeap<T>) {
1747// self.append(other);
1748// }
1749// }
1750
1751impl<T, C: Compare<T>> BinaryHeap<T, C> {
1752 fn extend_desugared<I: IntoIterator<Item = T>>(&mut self, iter: I) {
1753 let iterator = iter.into_iter();
1754 let (lower, _) = iterator.size_hint();
1755
1756 self.reserve(lower);
1757
1758 iterator.for_each(move |elem| self.push(elem));
1759 }
1760}
1761
1762// #[stable(feature = "extend_ref", since = "1.2.0")]
1763impl<'a, T: 'a + Copy, C: Compare<T>> Extend<&'a T> for BinaryHeap<T, C> {
1764 fn extend<I: IntoIterator<Item = &'a T>>(&mut self, iter: I) {
1765 self.extend(iter.into_iter().cloned());
1766 }
1767}
1768
1769// #[unstable(feature = "collection_placement",
1770// reason = "placement protocol is subject to change",
1771// issue = "30172")]
1772// pub struct BinaryHeapPlace<'a, T: 'a>
1773// where T: Clone {
1774// heap: *mut BinaryHeap<T>,
1775// place: vec::PlaceBack<'a, T>,
1776// }
1777
1778// #[unstable(feature = "collection_placement",
1779// reason = "placement protocol is subject to change",
1780// issue = "30172")]
1781// impl<'a, T: Clone + Ord + fmt::Debug> fmt::Debug for BinaryHeapPlace<'a, T> {
1782// fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
1783// f.debug_tuple("BinaryHeapPlace")
1784// .field(&self.place)
1785// .finish()
1786// }
1787// }
1788
1789// #[unstable(feature = "collection_placement",
1790// reason = "placement protocol is subject to change",
1791// issue = "30172")]
1792// impl<'a, T: 'a> Placer<T> for &'a mut BinaryHeap<T>
1793// where T: Clone + Ord {
1794// type Place = BinaryHeapPlace<'a, T>;
1795
1796// fn make_place(self) -> Self::Place {
1797// let ptr = self as *mut BinaryHeap<T>;
1798// let place = Placer::make_place(self.data.place_back());
1799// BinaryHeapPlace {
1800// heap: ptr,
1801// place,
1802// }
1803// }
1804// }
1805
1806// #[unstable(feature = "collection_placement",
1807// reason = "placement protocol is subject to change",
1808// issue = "30172")]
1809// unsafe impl<'a, T> Place<T> for BinaryHeapPlace<'a, T>
1810// where T: Clone + Ord {
1811// fn pointer(&mut self) -> *mut T {
1812// self.place.pointer()
1813// }
1814// }
1815
1816// #[unstable(feature = "collection_placement",
1817// reason = "placement protocol is subject to change",
1818// issue = "30172")]
1819// impl<'a, T> InPlace<T> for BinaryHeapPlace<'a, T>
1820// where T: Clone + Ord {
1821// type Owner = &'a T;
1822
1823// unsafe fn finalize(self) -> &'a T {
1824// self.place.finalize();
1825
1826// let heap: &mut BinaryHeap<T> = &mut *self.heap;
1827// let len = heap.len();
1828// let i = heap.sift_up(0, len - 1);
1829// heap.data.get_unchecked(i)
1830// }
1831// }