mut_binary_heap/binary_heap.rs
1#![deny(unsafe_op_in_unsafe_fn)]
2// #![stable(feature = "rust1", since = "1.0.0")]
3
4use std::cmp::{min, Ordering};
5use std::collections::HashMap;
6use std::hash::Hash;
7use std::marker::PhantomData;
8// use std::iter::FusedIterator;
9// use std::vec::Drain;
10use compare::Compare;
11use core::fmt;
12use core::mem::{swap, ManuallyDrop};
13use core::ptr;
14#[cfg(feature = "serde")]
15use serde::{
16 de::{self, MapAccess, SeqAccess, Visitor},
17 ser::SerializeStruct,
18 Deserialize, Deserializer, Serialize, Serializer,
19};
20use std::ops::Deref;
21use std::ops::DerefMut;
22use std::vec;
23
24// use super::SpecExtend;
25
26/// A priority queue implemented with a binary heap storing key-value pairs.
27///
28/// Unlike the implementation of [BinaryHeap](std::collections::BinaryHeap) in the
29/// standard library, it is ok to modify values while in the heap. This is possible
30/// through the [`BinaryHeap::get_mut()`] method. Updating a value through `RefCell`
31/// or global state, etc however will still result in an invalid heap as the heap
32/// won't get updated automatically.
33///
34/// # Examples
35///
36/// ```
37/// use mut_binary_heap::BinaryHeap;
38///
39/// // Type inference lets us omit an explicit type signature (which
40/// // would be `BinaryHeap<i32, i32, MaxComparator>` in this example).
41/// let mut heap: BinaryHeap<_, _> = BinaryHeap::new();
42///
43/// // We can use peek to look at the next item in the heap. In this case,
44/// // there's no items in there yet so we get None.
45/// assert_eq!(heap.peek(), None);
46///
47/// // Let's add some scores...
48/// heap.push(1, 1);
49/// heap.push(2, 5);
50/// heap.push(3, 2);
51///
52/// // Now peek shows the most important item in the heap.
53/// assert_eq!(heap.peek(), Some(&5));
54///
55/// // We can check the length of a heap.
56/// assert_eq!(heap.len(), 3);
57///
58/// // We can iterate over the items in the heap, although they are returned in
59/// // a random order.
60/// for x in &heap {
61/// println!("key {}, value {}", x.0, x.1);
62/// }
63///
64/// // If we instead pop these scores, they should come back in order.
65/// assert_eq!(heap.pop(), Some(5));
66/// assert_eq!(heap.pop(), Some(2));
67/// assert_eq!(heap.pop(), Some(1));
68/// assert_eq!(heap.pop(), None);
69///
70/// // We can clear the heap of any remaining items.
71/// heap.clear();
72///
73/// // The heap should now be empty.
74/// assert!(heap.is_empty())
75/// ```
76///
77/// A `BinaryHeap` with a known list of items can be initialized from an iterator
78/// and a key selection function
79///
80/// ```
81/// use mut_binary_heap::BinaryHeap;
82///
83/// // This will create a max-heap.
84/// let heap: BinaryHeap<_, _> = BinaryHeap::from([1, 5, 2].iter(), |v| v.clone());
85/// ```
86///
87/// ## Min-heap
88///
89/// `BinaryHeap` can also act as a min-heap without requiring [`Reverse`] or a custom [`Ord`]
90/// implementation.
91///
92/// ```
93/// use mut_binary_heap::BinaryHeap;
94///
95/// let mut heap = BinaryHeap::new_min();
96///
97/// // There is no need to wrap values in `Reverse`
98/// heap.push(1, 1);
99/// heap.push(2, 5);
100/// heap.push(3, 2);
101///
102/// // If we pop these scores now, they should come back in the reverse order.
103/// assert_eq!(heap.pop(), Some(1));
104/// assert_eq!(heap.pop(), Some(2));
105/// assert_eq!(heap.pop(), Some(5));
106/// assert_eq!(heap.pop(), None);
107/// ```
108///
109/// # Time complexity
110///
111/// | method | cost |
112/// |--------------------|----------------|
113/// | [push] | *O*(1)~ |
114/// | [pop] | *O*(log(*n*)) |
115/// | [peek]/[peek\_mut] | *O*(1) |
116/// | [get] | *O*(1) |
117/// | [get\_mut] | *O*(log(*n*)) |
118/// | [contains\_key] | *O*(1) |
119///
120/// The value for `push` is an expected cost; the method documentation gives a
121/// more detailed analysis.
122/// The cost for `get_mut` contains the cost of dropping the `RefMut` returned
123/// by the function. Getting access is *O*(1).
124///
125/// [`Reverse`]: https://doc.rust-lang.org/stable/core/cmp/struct.Reverse.html
126/// [`Ord`]: https://doc.rust-lang.org/stable/core/cmp/trait.Ord.html
127/// [`Cell`]: https://doc.rust-lang.org/stable/core/cell/struct.Cell.html
128/// [`RefCell`]: https://doc.rust-lang.org/stable/core/cell/struct.RefCell.html
129/// [push]: BinaryHeap::push
130/// [pop]: BinaryHeap::pop
131/// [peek]: BinaryHeap::peek
132/// [peek\_mut]: BinaryHeap::peek_mut
133/// [get]: BinaryHeap::get
134/// [get\_mut]: BinaryHeap::get_mut
135/// [contains\_key]: BinaryHeap::contains_key
136// #[stable(feature = "rust1", since = "1.0.0")]
137pub struct BinaryHeap<K, T, C = MaxComparator> {
138 data: Vec<(K, T)>,
139 cmp: C,
140 keys: HashMap<K, usize>,
141 _not_sync: PhantomData<std::cell::Cell<()>>,
142}
143
144/// For `T` that implements `Ord`, you can use this struct to quickly
145/// set up a max heap.
146#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
147#[derive(Clone, Copy, Default, PartialEq, Eq, Debug)]
148pub struct MaxComparator;
149
150impl<T: Ord> Compare<T> for MaxComparator {
151 fn compare(&self, a: &T, b: &T) -> Ordering {
152 a.cmp(b)
153 }
154}
155
156/// For `T` that implements `Ord`, you can use this struct to quickly
157/// set up a min heap.
158#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
159#[derive(Clone, Copy, Default, PartialEq, Eq, Debug)]
160pub struct MinComparator;
161
162impl<T: Ord> Compare<T> for MinComparator {
163 fn compare(&self, a: &T, b: &T) -> Ordering {
164 b.cmp(a)
165 }
166}
167
168/// The comparator defined by closure
169#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
170#[derive(Clone, Copy, Default, PartialEq, Eq, Debug)]
171pub struct FnComparator<F>(pub F);
172
173impl<T, F> Compare<T> for FnComparator<F>
174where
175 F: Fn(&T, &T) -> Ordering,
176{
177 fn compare(&self, a: &T, b: &T) -> Ordering {
178 self.0(a, b)
179 }
180}
181
182/// The comparator ordered by key
183#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
184#[derive(Clone, Copy, Default, PartialEq, Eq, Debug)]
185pub struct KeyComparator<F>(pub F);
186
187impl<K: Ord, T, F> Compare<T> for KeyComparator<F>
188where
189 F: Fn(&T) -> K,
190{
191 fn compare(&self, a: &T, b: &T) -> Ordering {
192 self.0(a).cmp(&self.0(b))
193 }
194}
195
196/// Structure wrapping a mutable reference to the first item on a
197/// `BinaryHeap`.
198///
199/// This `struct` is created by the [`peek_mut`] method on [`BinaryHeap`]. See
200/// its documentation for more.
201///
202/// [`peek_mut`]: BinaryHeap::peek_mut
203pub struct PeekMut<'a, K: Hash + Eq, T: 'a, C: 'a + Compare<T>> {
204 heap: &'a mut BinaryHeap<K, T, C>,
205 sift: bool,
206}
207
208impl<K: fmt::Debug + Hash + Eq, T: fmt::Debug, C: Compare<T>> fmt::Debug for PeekMut<'_, K, T, C> {
209 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
210 f.debug_tuple("PeekMut").field(&self.heap.data[0]).finish()
211 }
212}
213
214impl<K: Hash + Eq, T, C: Compare<T>> Drop for PeekMut<'_, K, T, C> {
215 fn drop(&mut self) {
216 if self.sift {
217 // SAFETY: PeekMut is only instantiated for non-empty heaps.
218 unsafe { self.heap.sift_down(0) };
219 }
220 }
221}
222
223impl<K: Hash + Eq, T, C: Compare<T>> Deref for PeekMut<'_, K, T, C> {
224 type Target = T;
225 fn deref(&self) -> &T {
226 self.key_value().1
227 }
228}
229
230impl<K: Hash + Eq, T, C: Compare<T>> DerefMut for PeekMut<'_, K, T, C> {
231 fn deref_mut(&mut self) -> &mut T {
232 self.key_value_mut().1
233 }
234}
235
236impl<K: Hash + Eq, T, C: Compare<T>> PeekMut<'_, K, T, C> {
237 /// returns the key of the first item on the heap.
238 pub fn key(&self) -> &K {
239 debug_assert!(!self.heap.is_empty());
240 // SAFE: PeekMut is only instantiated for non-empty heaps
241 let key_value = unsafe { self.heap.data.get_unchecked(0) };
242 &key_value.0
243 }
244
245 /// returns the key-value pair that is the first item on the heap.
246 pub fn key_value(&self) -> (&K, &T) {
247 debug_assert!(!self.heap.is_empty());
248 // SAFE: PeekMut is only instantiated for non-empty heaps
249 let key_value = unsafe { self.heap.data.get_unchecked(0) };
250 (&key_value.0, &key_value.1)
251 }
252
253 /// returns a mutable reference to the key-value pair that is the first item on the heap.
254 pub fn key_value_mut(&mut self) -> (&mut K, &mut T) {
255 debug_assert!(!self.heap.is_empty());
256 self.sift = true;
257 // SAFE: PeekMut is only instantiated for non-empty heaps
258 let key_value = unsafe { self.heap.data.get_unchecked_mut(0) };
259 (&mut key_value.0, &mut key_value.1)
260 }
261
262 /// Removes the peeked value from the heap and returns it.
263 pub fn pop(mut self) -> T {
264 let value = self.heap.pop().unwrap();
265 self.sift = false;
266 value
267 }
268
269 /// Removes the peeked value from the heap and returns it as a key-value pair.
270 pub fn pop_with_key(mut self) -> (K, T) {
271 let key_value = self.heap.pop_with_key().unwrap();
272 self.sift = false;
273 key_value
274 }
275}
276
277/// Structure wrapping a mutable reference to any item on a `BinaryHeap`.
278///
279/// This `struct` is created by the [`get_mut`] method on [`BinaryHeap`]. See
280/// its documentation for more.
281///
282/// [`get_mut`]: BinaryHeap::get_mut
283pub struct RefMut<'a, K: 'a + Hash + Eq, T: 'a, C: 'a + Compare<T>> {
284 heap: &'a mut BinaryHeap<K, T, C>,
285 pos: usize,
286 key: &'a K,
287}
288
289impl<K: fmt::Debug + Hash + Eq, T: fmt::Debug, C: Compare<T>> fmt::Debug for RefMut<'_, K, T, C> {
290 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
291 f.debug_tuple("RefMut")
292 .field(&self.key)
293 .field(&self.heap.data.get(self.pos))
294 .finish()
295 }
296}
297
298impl<K: Hash + Eq, T, C: Compare<T>> Drop for RefMut<'_, K, T, C> {
299 fn drop(&mut self) {
300 self.heap.update(self.key);
301 }
302}
303
304impl<K: Hash + Eq, T, C: Compare<T>> Deref for RefMut<'_, K, T, C> {
305 type Target = T;
306 fn deref(&self) -> &T {
307 &self.heap.data[self.pos].1
308 }
309}
310
311impl<K: Hash + Eq, T, C: Compare<T>> DerefMut for RefMut<'_, K, T, C> {
312 fn deref_mut(&mut self) -> &mut Self::Target {
313 &mut self.heap.data[self.pos].1
314 }
315}
316
317impl<K: Hash + Eq, T, C: Compare<T>> RefMut<'_, K, T, C> {
318 /// returns the key of the heap item.
319 pub fn key(&self) -> &K {
320 self.key
321 }
322
323 /// returns a key-value pair for this heap item.
324 pub fn key_value(&self) -> (&K, &T) {
325 (self.key, self)
326 }
327
328 /// returns a mutable key-value pair for this heap item.
329 /// modifying the key is not possible. Only the value is mutable.
330 pub fn key_value_mut(&mut self) -> (&K, &mut T) {
331 (self.key, self)
332 }
333}
334
335impl<K: Clone, T: Clone, C: Clone> Clone for BinaryHeap<K, T, C> {
336 fn clone(&self) -> Self {
337 BinaryHeap {
338 data: self.data.clone(),
339 cmp: self.cmp.clone(),
340 keys: self.keys.clone(),
341 _not_sync: PhantomData::default(),
342 }
343 }
344
345 fn clone_from(&mut self, source: &Self) {
346 // TODO unit test
347 self.data.clone_from(&source.data);
348 self.keys.clone_from(&source.keys);
349 self.cmp = source.cmp.clone();
350 }
351}
352
353impl<K: Hash + Eq, T, C: Compare<T> + Default> Default for BinaryHeap<K, T, C> {
354 /// Creates an empty `BinaryHeap<K, T>`.
355 #[inline]
356 fn default() -> BinaryHeap<K, T, C> {
357 BinaryHeap::new()
358 }
359}
360
361impl<K: fmt::Debug, T: fmt::Debug, C> fmt::Debug for BinaryHeap<K, T, C> {
362 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
363 f.debug_list().entries(self.iter()).finish()
364 }
365}
366
367impl<K: Hash + Eq, T, C: Compare<T> + Default> BinaryHeap<K, T, C> {
368 /// Creates an empty `BinaryHeap`.
369 ///
370 /// This default version will create a max-heap.
371 ///
372 /// # Examples
373 ///
374 /// Basic usage:
375 ///
376 /// ```
377 /// use mut_binary_heap::BinaryHeap;
378 /// let mut heap: BinaryHeap<i32, i32> = BinaryHeap::new();
379 /// heap.push(0, 3);
380 /// heap.push(1, 1);
381 /// heap.push(2, 5);
382 /// assert_eq!(heap.pop(), Some(5));
383 /// ```
384 // #[stable(feature = "rust1", since = "1.0.0")]
385 #[must_use]
386 pub fn new() -> Self {
387 unsafe { BinaryHeap::new_from_data_raw(Vec::new(), HashMap::new(), C::default(), false) }
388 }
389
390 /// Creates an empty `BinaryHeap` with a specific capacity.
391 /// This preallocates enough memory for `capacity` elements,
392 /// so that the `BinaryHeap` does not have to be reallocated
393 /// until it contains at least that many values.
394 ///
395 /// This default version will create a max-heap.
396 ///
397 /// # Examples
398 ///
399 /// Basic usage:
400 ///
401 /// ```
402 /// use mut_binary_heap::BinaryHeap;
403 /// let mut heap: BinaryHeap<i32, i32> = BinaryHeap::with_capacity(10);
404 /// assert!(heap.capacity_min() >= 10);
405 /// heap.push(0, 3);
406 /// heap.push(1, 1);
407 /// heap.push(2, 5);
408 /// assert_eq!(heap.pop(), Some(5));
409 /// ```
410 // #[stable(feature = "rust1", since = "1.0.0")]
411 #[must_use]
412 pub fn with_capacity(capacity: usize) -> Self {
413 unsafe {
414 BinaryHeap::new_from_data_raw(
415 Vec::with_capacity(capacity),
416 HashMap::with_capacity(capacity),
417 C::default(),
418 false,
419 )
420 }
421 }
422}
423
424impl<K: Hash + Eq + Clone, T, C: Compare<T> + Default> BinaryHeap<K, T, C> {
425 pub fn from<I: IntoIterator<Item = T>, F: Fn(&T) -> K>(values: I, key_selector: F) -> Self {
426 values
427 .into_iter()
428 .map(|value| (key_selector(&value), value))
429 .collect()
430 }
431}
432
433impl<K: Hash + Eq, T, C: Compare<T>> BinaryHeap<K, T, C> {
434 /// Creates a new Binary Heap from a vec and hashmap.
435 ///
436 /// # Safety
437 ///
438 /// caller is responsible for passing a valid combination of `data`,
439 /// `keys` and `rebuild`.
440 /// * `data`: there must not be any duplicate keys in data
441 /// * `keys`: for each key in `data` there must be a entry in `keys` with the
442 /// index into `data`
443 /// * `rebuild`: must be `true` if `data` is not in valid heap-order based on `cmp`
444 #[must_use]
445 #[doc(hidden)]
446 pub unsafe fn new_from_data_raw(
447 data: Vec<(K, T)>,
448 keys: HashMap<K, usize>,
449 cmp: C,
450 rebuild: bool,
451 ) -> Self {
452 let mut heap = BinaryHeap {
453 data,
454 cmp,
455 keys,
456 _not_sync: PhantomData::default(),
457 };
458 debug_assert!(heap.data.len() == heap.keys.len());
459 if rebuild && !heap.data.is_empty() {
460 heap.rebuild();
461 }
462 heap
463 }
464}
465
466impl<K: Hash + Eq, T: Ord> BinaryHeap<K, T, MinComparator> {
467 /// Creates an empty `BinaryHeap`.
468 ///
469 /// The `_min()` version will create a min-heap.
470 ///
471 /// # Examples
472 ///
473 /// Basic usage:
474 ///
475 /// ```
476 /// use mut_binary_heap::BinaryHeap;
477 /// let mut heap = BinaryHeap::new_min();
478 /// heap.push(0, 3);
479 /// heap.push(1, 1);
480 /// heap.push(2, 5);
481 /// assert_eq!(heap.pop(), Some(1));
482 /// ```
483 #[must_use]
484 pub fn new_min() -> Self {
485 unsafe { BinaryHeap::new_from_data_raw(Vec::new(), HashMap::new(), MinComparator, false) }
486 }
487
488 /// Creates an empty `BinaryHeap` with a specific capacity.
489 /// This preallocates enough memory for `capacity` elements,
490 /// so that the `BinaryHeap` does not have to be reallocated
491 /// until it contains at least that many values.
492 ///
493 /// The `_min()` version will create a min-heap.
494 ///
495 /// # Examples
496 ///
497 /// Basic usage:
498 ///
499 /// ```
500 /// use mut_binary_heap::BinaryHeap;
501 /// let mut heap = BinaryHeap::with_capacity_min(10);
502 /// assert!(heap.capacity_min() >= 10);
503 /// heap.push(0, 3);
504 /// heap.push(1, 1);
505 /// heap.push(2, 5);
506 /// assert_eq!(heap.pop(), Some(1));
507 /// ```
508 #[must_use]
509 pub fn with_capacity_min(capacity: usize) -> Self {
510 unsafe {
511 BinaryHeap::new_from_data_raw(
512 Vec::with_capacity(capacity),
513 HashMap::with_capacity(capacity),
514 MinComparator,
515 false,
516 )
517 }
518 }
519}
520
521impl<K: Hash + Eq, T, F> BinaryHeap<K, T, FnComparator<F>>
522where
523 F: Fn(&T, &T) -> Ordering,
524{
525 /// Creates an empty `BinaryHeap`.
526 ///
527 /// The `_by()` version will create a heap ordered by given closure.
528 ///
529 /// # Examples
530 ///
531 /// Basic usage:
532 ///
533 /// ```
534 /// use mut_binary_heap::BinaryHeap;
535 /// let mut heap = BinaryHeap::new_by(|a: &i32, b: &i32| b.cmp(a));
536 /// heap.push(0, 3);
537 /// heap.push(1, 1);
538 /// heap.push(2, 5);
539 /// assert_eq!(heap.pop(), Some(1));
540 /// ```
541 #[must_use]
542 pub fn new_by(f: F) -> Self {
543 unsafe { BinaryHeap::new_from_data_raw(Vec::new(), HashMap::new(), FnComparator(f), false) }
544 }
545
546 /// Creates an empty `BinaryHeap` with a specific capacity.
547 /// This preallocates enough memory for `capacity` elements,
548 /// so that the `BinaryHeap` does not have to be reallocated
549 /// until it contains at least that many values.
550 ///
551 /// The `_by()` version will create a heap ordered by given closure.
552 ///
553 /// # Examples
554 ///
555 /// Basic usage:
556 ///
557 /// ```
558 /// use mut_binary_heap::BinaryHeap;
559 /// let mut heap = BinaryHeap::with_capacity_by(10, |a: &i32, b: &i32| b.cmp(a));
560 /// assert!(heap.capacity_min() >= 10);
561 /// heap.push(0, 3);
562 /// heap.push(1, 1);
563 /// heap.push(2, 5);
564 /// assert_eq!(heap.pop(), Some(1));
565 /// ```
566 #[must_use]
567 pub fn with_capacity_by(capacity: usize, f: F) -> Self {
568 unsafe {
569 BinaryHeap::new_from_data_raw(
570 Vec::with_capacity(capacity),
571 HashMap::with_capacity(capacity),
572 FnComparator(f),
573 false,
574 )
575 }
576 }
577}
578
579impl<K: Hash + Eq, T, F, C: Ord> BinaryHeap<K, T, KeyComparator<F>>
580where
581 F: Fn(&T) -> C,
582{
583 /// Creates an empty `BinaryHeap`.
584 ///
585 /// The `_by_sort_key()` version will create a heap ordered by
586 /// key converted by given closure.
587 ///
588 /// # Examples
589 ///
590 /// Basic usage:
591 ///
592 /// ```
593 /// use mut_binary_heap::BinaryHeap;
594 /// let mut heap = BinaryHeap::new_by_sort_key(|a: &i32| a % 4);
595 /// heap.push(0, 3);
596 /// heap.push(1, 1);
597 /// heap.push(2, 5);
598 /// assert_eq!(heap.pop(), Some(3));
599 /// ```
600 #[must_use]
601 pub fn new_by_sort_key(f: F) -> Self {
602 unsafe {
603 BinaryHeap::new_from_data_raw(Vec::new(), HashMap::new(), KeyComparator(f), false)
604 }
605 }
606
607 /// Creates an empty `BinaryHeap` with a specific capacity.
608 /// This preallocates enough memory for `capacity` elements,
609 /// so that the `BinaryHeap` does not have to be reallocated
610 /// until it contains at least that many values.
611 ///
612 /// The `_by_sort_key()` version will create a heap ordered by
613 /// key coverted by given closure.
614 ///
615 /// # Examples
616 ///
617 /// Basic usage:
618 ///
619 /// ```
620 /// use mut_binary_heap::BinaryHeap;
621 /// let mut heap = BinaryHeap::with_capacity_by_sort_key(10, |a: &i32| a % 4);
622 /// assert!(heap.capacity_min() >= 10);
623 /// heap.push(0, 3);
624 /// heap.push(1, 1);
625 /// heap.push(2, 5);
626 /// assert_eq!(heap.pop(), Some(3));
627 /// ```
628 #[must_use]
629 pub fn with_capacity_by_sort_key(capacity: usize, f: F) -> Self {
630 unsafe {
631 BinaryHeap::new_from_data_raw(
632 Vec::with_capacity(capacity),
633 HashMap::with_capacity(capacity),
634 KeyComparator(f),
635 false,
636 )
637 }
638 }
639}
640
641impl<K: Hash + Eq + Clone, T, C: Compare<T>> BinaryHeap<K, T, C> {
642 /**
643 Pushes an item onto the binary heap.
644
645 If the heap did not have this key present, [None] is returned.
646
647 If the heap did have this key present, the value is updated, and the old
648 value is returned. The key is not updated, though; this matters for
649 types that can be `==` without being identical. For more information see
650 the documentation of [HashMap::insert].
651
652 # Examples
653
654 Basic usage:
655
656 ```
657 use mut_binary_heap::BinaryHeap;
658 let mut heap: BinaryHeap<i32, i32> = BinaryHeap::new();
659 heap.push(0, 3);
660 heap.push(1, 5);
661 heap.push(2, 1);
662
663 assert_eq!(heap.len(), 3);
664 assert_eq!(heap.peek(), Some(&5));
665 ```
666
667 # Time complexity
668
669 The expected cost of `push`, averaged over every possible ordering of
670 the elements being pushed, and over a sufficiently large number of
671 pushes, is *O*(1). This is the most meaningful cost metric when pushing
672 elements that are *not* already in any sorted pattern.
673
674 The time complexity degrades if elements are pushed in predominantly
675 ascending order. In the worst case, elements are pushed in ascending
676 sorted order and the amortized cost per push is *O*(log(*n*)) against a heap
677 containing *n* elements.
678
679 The worst case cost of a *single* call to `push` is *O*(*n*). The worst case
680 occurs when capacity is exhausted and needs a resize. The resize cost
681 has been amortized in the previous figures.
682 */
683 pub fn push(&mut self, key: K, item: T) -> Option<T> {
684 if let Some(pos) = self.keys.get(&key).copied() {
685 let mut old = std::mem::replace(&mut self.data[pos], (key, item));
686 // NOTE: the swap is required in order to keep the guarantee
687 // that the key is not replaced by a second push.
688 // I would prefer replacing the key, but that is not supported by
689 // [HashMap]
690 std::mem::swap(&mut old.0, &mut self.data[pos].0);
691 self.update(&old.0);
692 Some(old.1)
693 } else {
694 let old_len = self.len();
695 self.data.push((key.clone(), item));
696 self.keys.insert(key, old_len);
697 // SAFETY: Since we pushed a new item it means that
698 // old_len = self.len() - 1 < self.len()
699 unsafe { self.sift_up(0, old_len) };
700 None
701 }
702 }
703}
704
705impl<K: Hash + Eq, T, C: Compare<T>> BinaryHeap<K, T, C> {
706 /// Returns a mutable reference to the first item in the binary heap, or
707 /// `None` if it is empty.
708 ///
709 /// Note: If the `PeekMut` value is leaked, the heap may be in an
710 /// inconsistent state.
711 ///
712 /// # Examples
713 ///
714 /// Basic usage:
715 ///
716 /// ```
717 /// use mut_binary_heap::BinaryHeap;
718 /// let mut heap: BinaryHeap<i32, i32> = BinaryHeap::new();
719 /// assert!(heap.peek_mut().is_none());
720 ///
721 /// heap.push(0, 1);
722 /// heap.push(1, 5);
723 /// heap.push(2, 2);
724 /// {
725 /// let mut val = heap.peek_mut().unwrap();
726 /// assert_eq!(*val, 5);
727 /// *val = 0;
728 /// }
729 /// assert_eq!(heap.peek(), Some(&2));
730 /// ```
731 ///
732 /// # Time complexity
733 ///
734 /// If the item is modified then the worst case time complexity is *O*(log(*n*)),
735 /// otherwise it's *O*(1).
736 // #[stable(feature = "binary_heap_peek_mut", since = "1.12.0")]
737 pub fn peek_mut(&mut self) -> Option<PeekMut<'_, K, T, C>> {
738 if self.is_empty() {
739 None
740 } else {
741 Some(PeekMut {
742 heap: self,
743 sift: false,
744 })
745 }
746 }
747
748 /// Removes the greatest item from the binary heap and returns it, or `None` if it
749 /// is empty.
750 ///
751 /// # Examples
752 ///
753 /// Basic usage:
754 ///
755 /// ```
756 /// use mut_binary_heap::BinaryHeap;
757 /// let mut heap = BinaryHeap::<_, _>::from([1, 3], |v| v.clone());
758 ///
759 /// assert_eq!(heap.pop(), Some(3));
760 /// assert_eq!(heap.pop(), Some(1));
761 /// assert_eq!(heap.pop(), None);
762 /// ```
763 ///
764 /// # Time complexity
765 ///
766 /// The worst case cost of `pop` on a heap containing *n* elements is *O*(log(*n*)).
767 pub fn pop(&mut self) -> Option<T> {
768 self.pop_with_key().map(|kv| kv.1)
769 }
770
771 /// Removes the greatest item from the binary heap and returns it as a key-value pair,
772 /// or `None` if it is empty.
773 ///
774 /// # Examples
775 ///
776 /// Basic usage:
777 ///
778 /// ```
779 /// use mut_binary_heap::BinaryHeap;
780 /// let mut heap = BinaryHeap::<_,_>::from(vec![1, 3], |v| v.clone());
781 ///
782 /// assert_eq!(heap.pop_with_key(), Some((3, 3)));
783 /// assert_eq!(heap.pop_with_key(), Some((1, 1)));
784 /// assert_eq!(heap.pop_with_key(), None);
785 /// ```
786 ///
787 /// # Time complexity
788 ///
789 /// The worst case cost of `pop` on a heap containing *n* elements is *O*(log(*n*)).
790 pub fn pop_with_key(&mut self) -> Option<(K, T)> {
791 let item = self.data.pop().map(|mut item| {
792 // NOTE: we can't just use self.is_empty here, because that will
793 // trigger a debug_assert that keys and data are equal lenght.
794 if !self.data.is_empty() {
795 swap(&mut item, &mut self.data[0]);
796 // SAFETY: !self.is_empty() means that self.len() > 0
797 unsafe { self.sift_down_to_bottom(0) };
798 }
799 item
800 });
801 item.as_ref().and_then(|kv| self.keys.remove(&kv.0));
802 item
803 }
804
805 /// Returns `true` if the heap contains a value for the given key.
806 ///
807 /// # Examples
808 /// ```
809 /// use mut_binary_heap::BinaryHeap;
810 /// let mut heap = BinaryHeap::<_,_>::from([1, 3], |v| v.clone());
811 ///
812 /// assert!(heap.contains_key(&1));
813 /// assert!(heap.contains_key(&3));
814 /// assert!(!heap.contains_key(&2));
815 /// ```
816 ///
817 /// # Time complexity
818 ///
819 /// This method runs in *O*(1) time.
820 ///
821 pub fn contains_key(&self, key: &K) -> bool {
822 self.keys.contains_key(key)
823 }
824
825 /// Returns a reference to the value for a given key or [None] if the key does not exist.
826 ///
827 /// # Examples
828 /// ```
829 /// use mut_binary_heap::BinaryHeap;
830 /// let mut heap = BinaryHeap::<_,_>::from(vec![1, 3], |v| v.clone());
831 ///
832 /// assert_eq!(heap.get(&1), Some(&1));
833 /// assert_eq!(heap.get(&2), None);
834 /// ```
835 ///
836 /// # Time complecity
837 ///
838 /// This method runs in *O*(1) time.
839 pub fn get(&self, key: &K) -> Option<&T> {
840 self.keys.get(key).map(|index| &self.data[*index].1)
841 }
842
843 /// Returns a mutable reference to the value for a given key or
844 /// [None] if the key does not exist.
845 ///
846 /// The heap is updated when [RefMut] is dropped.
847 /// # Examples
848 /// ```
849 /// use mut_binary_heap::BinaryHeap;
850 /// let mut heap = BinaryHeap::<i32, i32>::from(vec![1, 3], |v| v.clone());
851 ///
852 /// {
853 /// let mut v = heap.get_mut(&1).unwrap();
854 /// assert_eq!(*v, 1);
855 /// *v = 5;
856 /// // Drop updates the heap
857 /// }
858 /// assert_eq!(heap.peek_with_key(), Some((&1, &5)));
859 /// assert_eq!(heap.get(&2), None);
860 /// ```
861 ///
862 /// # Time complecity
863 ///
864 pub fn get_mut<'a>(&'a mut self, key: &'a K) -> Option<RefMut<'a, K, T, C>> {
865 self.keys.get(key).copied().map(|pos| RefMut {
866 heap: self,
867 pos,
868 key,
869 })
870 }
871
872 /// Removes a key from the heap, returning the `(key, value)` if the key
873 /// was previously in the heap.
874 ///
875 /// The key may be any borrowed form of the map's key type, but
876 /// [Hash] and [Eq] on the borrowed form *must* match those for
877 /// the key type.
878 ///
879 /// # Example
880 /// ```
881 /// use mut_binary_heap::BinaryHeap;
882 ///
883 /// let mut heap: BinaryHeap<_, _> = BinaryHeap::new();
884 /// heap.push(0, 5);
885 /// heap.push(1, 3);
886 /// heap.push(2, 6);
887 ///
888 /// assert_eq!(heap.remove(&0), Some((0, 5)));
889 /// assert_eq!(heap.remove(&3), None);
890 /// assert_eq!(heap.len(), 2);
891 /// assert_eq!(heap.pop(), Some(6));
892 /// assert_eq!(heap.pop(), Some(3));
893 ///
894 /// ```
895 pub fn remove(&mut self, key: &K) -> Option<(K, T)> {
896 if let Some(pos) = self.keys.get(key).copied() {
897 let item = self.data.pop().map(|mut item| {
898 if !self.data.is_empty() && pos < self.data.len() {
899 swap(&mut item, &mut self.data[pos]);
900 // SAFETY: !self.is_empty && pos < self.data.len()
901 unsafe { self.sift_down_to_bottom(pos) };
902 }
903 item
904 });
905 item.as_ref().and_then(|kv| self.keys.remove(&kv.0));
906 item
907 } else {
908 None
909 }
910 }
911
912 /// Updates the binary heap after the value behind this key was modified.
913 ///
914 /// This is called by [push] if the key already existed and also by [RefMut].
915 ///
916 /// This function will panic if the key is not part of the binary heap.
917 /// A none panicing alternative is to check with [BinaryHeap::contains_key]
918 /// or using [BinaryHeap::get_mut] instead.
919 ///
920 /// # Time complexity
921 ///
922 /// This function runs in *O*(*log* n) time.
923 #[doc(hidden)]
924 pub fn update(&mut self, key: &K) {
925 let pos = self.keys[key];
926 let pos_after_sift_up = unsafe { self.sift_up(0, pos) };
927 if pos_after_sift_up != pos {
928 return;
929 }
930 unsafe {
931 self.sift_down(pos);
932 }
933 }
934
935 /// Consumes the `BinaryHeap` and returns a vector in sorted
936 /// (ascending) order.
937 ///
938 /// # Examples
939 ///
940 /// Basic usage:
941 ///
942 /// ```
943 /// use mut_binary_heap::BinaryHeap;
944 ///
945 /// let mut heap = BinaryHeap::<_, _>::from([1, 2, 4, 5, 7], |v| v.clone());
946 /// heap.push(0, 6);
947 /// heap.push(1, 3);
948 ///
949 /// // let vec = heap.into_sorted_vec();
950 /// // assert_eq!(vec, [1, 2, 3, 4, 5, 6, 7]);
951 /// ```
952 // #[must_use = "`self` will be dropped if the result is not used"]
953 // // #[stable(feature = "binary_heap_extras_15", since = "1.5.0")]
954 // pub fn into_sorted_vec(mut self) -> Vec<T> {
955 // let mut end = self.len();
956 // while end > 1 {
957 // end -= 1;
958 // // SAFETY: `end` goes from `self.len() - 1` to 1 (both included),
959 // // so it's always a valid index to access.
960 // // It is safe to access index 0 (i.e. `ptr`), because
961 // // 1 <= end < self.len(), which means self.len() >= 2.
962 // unsafe {
963 // let ptr = self.data.as_mut_ptr();
964 // ptr::swap(ptr, ptr.add(end));
965 // }
966 // // SAFETY: `end` goes from `self.len() - 1` to 1 (both included) so:
967 // // 0 < 1 <= end <= self.len() - 1 < self.len()
968 // // Which means 0 < end and end < self.len().
969 // unsafe { self.sift_down_range(0, end) };
970 // }
971 // self.into_vec()
972 // }
973
974 // The implementations of sift_up and sift_down use unsafe blocks in
975 // order to move an element out of the vector (leaving behind a
976 // hole), shift along the others and move the removed element back into the
977 // vector at the final location of the hole.
978 // The `Hole` type is used to represent this, and make sure
979 // the hole is filled back at the end of its scope, even on panic.
980 // Using a hole reduces the constant factor compared to using swaps,
981 // which involves twice as many moves.
982
983 /// Reserves capacity for at least `additional` more elements to be inserted in the
984 /// `BinaryHeap`. The collection may reserve more space to avoid frequent reallocations.
985 ///
986 /// # Panics
987 ///
988 /// Panics if the new capacity overflows `usize`.
989 ///
990 /// # Examples
991 ///
992 /// Basic usage:
993 ///
994 /// ```
995 /// use mut_binary_heap::BinaryHeap;
996 /// let mut heap: BinaryHeap<_, _> = BinaryHeap::new();
997 /// heap.reserve(100);
998 /// assert!(heap.capacity_min() >= 100);
999 /// heap.push(0, 4);
1000 /// ```
1001 // #[stable(feature = "rust1", since = "1.0.0")]
1002 pub fn reserve(&mut self, additional: usize) {
1003 self.data.reserve(additional);
1004 self.keys.reserve(additional);
1005 }
1006
1007 /// Discards as much additional capacity as possible.
1008 /// The implementation of [Vec] and [HashMap] the exact value of the
1009 /// new capacity.
1010 ///
1011 /// # Examples
1012 ///
1013 /// Basic usage:
1014 ///
1015 /// ```
1016 /// use mut_binary_heap::BinaryHeap;
1017 /// let mut heap: BinaryHeap<i32, i32> = BinaryHeap::with_capacity(100);
1018 ///
1019 /// assert!(heap.capacity_min() >= 100);
1020 /// heap.shrink_to_fit();
1021 /// assert!(heap.capacity_min() >= 0);
1022 /// ```
1023 // #[stable(feature = "rust1", since = "1.0.0")]
1024 pub fn shrink_to_fit(&mut self) {
1025 self.data.shrink_to_fit();
1026 self.keys.shrink_to_fit();
1027 }
1028
1029 /// Discards capacity with a lower bound.
1030 /// The implementation of [Vec] and [HashMap] the exact value of the
1031 /// new capacity.
1032 ///
1033 /// The capacity will remain at least as large as both the length
1034 /// and the supplied value.
1035 ///
1036 /// If the current capacity is less than the lower limit, this is a no-op.
1037 ///
1038 /// # Examples
1039 ///
1040 /// ```
1041 /// use mut_binary_heap::BinaryHeap;
1042 /// let mut heap: BinaryHeap<i32, i32> = BinaryHeap::with_capacity(100);
1043 ///
1044 /// assert!(heap.capacity_min() >= 100);
1045 /// heap.shrink_to(10);
1046 /// assert!(heap.capacity_min() >= 10);
1047 /// ```
1048 #[inline]
1049 pub fn shrink_to(&mut self, min_capacity: usize) {
1050 self.data.shrink_to(min_capacity);
1051 self.keys.shrink_to(min_capacity);
1052 }
1053
1054 /// # Safety
1055 ///
1056 /// The caller must guarantee that `pos < self.data.len()`.
1057 unsafe fn sift_up(&mut self, start: usize, pos: usize) -> usize {
1058 // Take out the value at `pos` and create a hole.
1059 // SAFETY: The caller guarantees that pos < self.data.len()
1060 let mut hole = unsafe { Hole::new(&mut self.data, &mut self.keys, pos) };
1061
1062 while hole.pos() > start {
1063 let parent = (hole.pos() - 1) / 2;
1064
1065 // SAFETY: hole.pos() > start >= 0, which means hole.pos() > 0
1066 // and so hole.pos() - 1 can't underflow.
1067 // This guarantees that parent < hole.pos() so
1068 // it's a valid index and also != hole.pos().
1069 if self
1070 .cmp
1071 .compares_le(hole.element(), unsafe { hole.get(parent) })
1072 {
1073 break;
1074 }
1075
1076 // SAFETY: Same as above
1077 unsafe { hole.move_to(parent) };
1078 }
1079
1080 hole.pos()
1081 }
1082
1083 /// Take an element at `pos` and move it down the heap,
1084 /// while its children are larger.
1085 ///
1086 /// # Safety
1087 ///
1088 /// The caller must guarantee that `pos < end <= self.data.len()`.
1089 unsafe fn sift_down_range(&mut self, pos: usize, end: usize) {
1090 // SAFETY: The caller guarantees that pos < end <= self.data.len().
1091 let mut hole = unsafe { Hole::new(&mut self.data, &mut self.keys, pos) };
1092 let mut child = 2 * hole.pos() + 1;
1093
1094 // Loop invariant: child == 2 * hole.pos() + 1.
1095 while child <= end.saturating_sub(2) {
1096 // compare with the greater of the two children
1097 // SAFETY: child < end - 1 < self.data.len() and
1098 // child + 1 < end <= self.data.len(), so they're valid indexes.
1099 // child == 2 * hole.pos() + 1 != hole.pos() and
1100 // child + 1 == 2 * hole.pos() + 2 != hole.pos().
1101 // FIXME: 2 * hole.pos() + 1 or 2 * hole.pos() + 2 could overflow
1102 // if T is a ZST
1103 child += unsafe { self.cmp.compares_le(hole.get(child), hole.get(child + 1)) } as usize;
1104
1105 // if we are already in order, stop.
1106 // SAFETY: child is now either the old child or the old child+1
1107 // We already proven that both are < self.data.len() and != hole.pos()
1108 if self
1109 .cmp
1110 .compares_ge(hole.element(), unsafe { hole.get(child) })
1111 {
1112 return;
1113 }
1114
1115 // SAFETY: same as above.
1116 unsafe { hole.move_to(child) };
1117 child = 2 * hole.pos() + 1;
1118 }
1119
1120 // SAFETY: && short circuit, which means that in the
1121 // second condition it's already true that child == end - 1 < self.data.len().
1122 if child == end - 1
1123 && self
1124 .cmp
1125 .compares_lt(hole.element(), unsafe { hole.get(child) })
1126 {
1127 // SAFETY: child is already proven to be a valid index and
1128 // child == 2 * hole.pos() + 1 != hole.pos().
1129 unsafe { hole.move_to(child) };
1130 }
1131 }
1132
1133 /// # Safety
1134 ///
1135 /// The caller must guarantee that `pos < self.data.len()`.
1136 unsafe fn sift_down(&mut self, pos: usize) {
1137 let len = self.data.len();
1138 // SAFETY: pos < len is guaranteed by the caller and
1139 // obviously len = self.data.len() <= self.len().
1140 unsafe { self.sift_down_range(pos, len) };
1141 }
1142
1143 /// Take an element at `pos` and move it all the way down the heap,
1144 /// then sift it up to its position.
1145 ///
1146 /// Note: This is faster when the element is known to be large / should
1147 /// be closer to the bottom.
1148 ///
1149 /// # Safety
1150 ///
1151 /// The caller must guarantee that `pos < self.data.len()`.
1152 unsafe fn sift_down_to_bottom(&mut self, mut pos: usize) {
1153 let end = self.data.len();
1154 let start = pos;
1155
1156 // SAFETY: The caller guarantees that pos < self.data.len().
1157 let mut hole = unsafe { Hole::new(&mut self.data, &mut self.keys, pos) };
1158 let mut child = 2 * hole.pos() + 1;
1159
1160 // Loop invariant: child == 2 * hole.pos() + 1.
1161 while child <= end.saturating_sub(2) {
1162 // SAFETY: child < end - 1 < self.data.len() and
1163 // child + 1 < end <= self.data.len(), so they're valid indexes.
1164 // child == 2 * hole.pos() + 1 != hole.pos() and
1165 // child + 1 == 2 * hole.pos() + 2 != hole.pos().
1166 // FIXME: 2 * hole.pos() + 1 or 2 * hole.pos() + 2 could overflow
1167 // if T is a ZST
1168 child += unsafe { self.cmp.compares_le(hole.get(child), hole.get(child + 1)) } as usize;
1169
1170 // SAFETY: Same as above
1171 unsafe { hole.move_to(child) };
1172 child = 2 * hole.pos() + 1;
1173 }
1174
1175 if child == end - 1 {
1176 // SAFETY: child == end - 1 < self.data.len(), so it's a valid index
1177 // and child == 2 * hole.pos() + 1 != hole.pos().
1178 unsafe { hole.move_to(child) };
1179 }
1180 pos = hole.pos();
1181 drop(hole);
1182
1183 // SAFETY: pos is the position in the hole and was already proven
1184 // to be a valid index.
1185 unsafe { self.sift_up(start, pos) };
1186 }
1187
1188 /// Rebuild assuming data[0..start] is still a proper heap.
1189 #[allow(dead_code)] // TODO this is unused because append is currently not implemented
1190 fn rebuild_tail(&mut self, start: usize) {
1191 if start == self.len() {
1192 return;
1193 }
1194
1195 let tail_len = self.len() - start;
1196
1197 #[inline(always)]
1198 fn log2_fast(x: usize) -> usize {
1199 (usize::BITS - x.leading_zeros() - 1) as usize
1200 }
1201
1202 // `rebuild` takes O(self.data.len()) operations
1203 // and about 2 * self.data.len() comparisons in the worst case
1204 // while repeating `sift_up` takes O(tail_len * log(start)) operations
1205 // and about 1 * tail_len * log_2(start) comparisons in the worst case,
1206 // assuming start >= tail_len. For larger heaps, the crossover point
1207 // no longer follows this reasoning and was determined empirically.
1208 let better_to_rebuild = if start < tail_len {
1209 true
1210 } else if self.data.len() <= 2048 {
1211 2 * self.data.len() < tail_len * log2_fast(start)
1212 } else {
1213 2 * self.data.len() < tail_len * 11
1214 };
1215
1216 if better_to_rebuild {
1217 self.rebuild();
1218 } else {
1219 for i in start..self.data.len() {
1220 // SAFETY: The index `i` is always less than self.data.len().
1221 unsafe { self.sift_up(0, i) };
1222 }
1223 }
1224 }
1225
1226 /// rebuild the entire heap.
1227 ///
1228 /// In some cases it might be faster to rebuild
1229 /// the entire heap instead of just updating the specific elements that have
1230 /// been modified.
1231 fn rebuild(&mut self) {
1232 let mut n = self.len() / 2;
1233 while n > 0 {
1234 n -= 1;
1235 // SAFETY: n starts from self.data.len() / 2 and goes down to 0.
1236 // The only case when !(n < self.data.len()) is if
1237 // self.data.len() == 0, but it's ruled out by the loop condition.
1238 unsafe { self.sift_down(n) };
1239 }
1240 }
1241
1242 // /// Moves all the elements of `other` into `self`, leaving `other` empty.
1243 // ///
1244 // /// # Examples
1245 // ///
1246 // /// Basic usage:
1247 // ///
1248 // /// ```
1249 // /// use mut_binary_heap::BinaryHeap;
1250 // ///
1251 // /// let mut a = BinaryHeap::from([-10, 1, 2, 3, 3]);
1252 // /// let mut b = BinaryHeap::from([-20, 5, 43]);
1253 // ///
1254 // /// a.append(&mut b);
1255 // ///
1256 // /// assert_eq!(a.into_sorted_vec(), [-20, -10, 1, 2, 3, 3, 5, 43]);
1257 // /// assert!(b.is_empty());
1258 // /// ```
1259 // ///
1260 // pub fn append(&mut self, other: &mut Self) {
1261 // if self.len() < other.len() {
1262 // swap(self, other);
1263 // }
1264 //
1265 // let start = self.data.len();
1266 //
1267 // // TODO append needs to also copy keys. How do we handle duplicate keys?
1268 // self.data.append(&mut other.data);
1269 //
1270 // self.rebuild_tail(start);
1271 // }
1272}
1273
1274impl<K, T, C> BinaryHeap<K, T, C> {
1275 /// Returns an iterator visiting all key-value pairs in the underlying vector, in
1276 /// arbitrary order.
1277 ///
1278 /// # Examples
1279 ///
1280 /// Basic usage:
1281 ///
1282 /// ```
1283 /// use mut_binary_heap::BinaryHeap;
1284 /// let heap = BinaryHeap::<_,_>::from([1, 2, 3, 4], |v| v.clone());
1285 ///
1286 /// // Print (1, 1), (2, 2), (3, 3), (4, 4) in arbitrary order
1287 /// for x in heap.iter() {
1288 /// println!("key {}, value {}", x.0, x.1);
1289 /// }
1290 /// ```
1291 pub fn iter(&self) -> Iter<'_, K, T> {
1292 Iter {
1293 iter: self.data.iter(),
1294 }
1295 }
1296
1297 /// Returns an iterator visiting all values in the underlying vector, in
1298 /// arbitrary order.
1299 ///
1300 /// # Examples
1301 ///
1302 /// Basic usage:
1303 ///
1304 /// ```
1305 /// use mut_binary_heap::BinaryHeap;
1306 /// let heap = BinaryHeap::<_,_>::from([1, 2, 3, 4], |v| v.clone());
1307 ///
1308 /// // Print 1, 2, 3, 4 in arbitrary order
1309 /// for x in heap.iter_values() {
1310 /// println!("{}", x);
1311 /// }
1312 /// ```
1313 pub fn iter_values(&self) -> IterValues<'_, K, T> {
1314 IterValues {
1315 iter: self.data.iter(),
1316 }
1317 }
1318
1319 /// Returns an iterator visiting all keys in the underlying vector, in
1320 /// arbitrary order.
1321 ///
1322 /// # Examples
1323 ///
1324 /// Basic usage:
1325 ///
1326 /// ```
1327 /// use mut_binary_heap::BinaryHeap;
1328 /// let heap = BinaryHeap::<_,_>::from([1, 2, 3, 4], |v| v.clone());
1329 ///
1330 /// // Print 1, 2, 3, 4 in arbitrary order
1331 /// for x in heap.iter_keys() {
1332 /// println!("{}", x);
1333 /// }
1334 /// ```
1335 pub fn iter_keys(&self) -> IterKeys<'_, K, T> {
1336 IterKeys {
1337 iter: self.data.iter(),
1338 }
1339 }
1340
1341 /// Creates a consuming iterator, that is, one that moves each value out of
1342 /// the heap in arbitrary order. The heap cannot be used after calling this.
1343 ///
1344 /// See also [BinaryHeap::into_iter()], [BinaryHeap::into_keys()]
1345 pub fn into_values(self) -> IntoValues<K, T> {
1346 IntoValues {
1347 iter: self.data.into_iter(),
1348 }
1349 }
1350
1351 /// Creates a consuming iterator, that is, one that moves each key out of
1352 /// the heap in arbitrary order. The heap cannot be used after calling this.
1353 ///
1354 /// See also [BinaryHeap::into_iter()], [BinaryHeap::into_values()]
1355 pub fn into_keys(self) -> IntoKeys<K, T> {
1356 IntoKeys {
1357 iter: self.data.into_iter(),
1358 }
1359 }
1360
1361 /// Returns an iterator which retrieves elements in heap order.
1362 /// This method consumes the original heap.
1363 ///
1364 /// # Examples
1365 ///
1366 /// Basic usage:
1367 ///
1368 /// ```
1369 /// use mut_binary_heap::BinaryHeap;
1370 /// let heap = BinaryHeap::<_,_>::from([1, 2, 3, 4, 5], |v| v.clone());
1371 ///
1372 /// assert_eq!(heap.into_iter_sorted().take(2).collect::<Vec<_>>(), [5, 4]);
1373 /// ```
1374 // #[unstable(feature = "binary_heap_into_iter_sorted", issue = "59278")]
1375 pub fn into_iter_sorted(self) -> IntoIterSorted<K, T, C> {
1376 IntoIterSorted { inner: self }
1377 }
1378
1379 /// Returns the greatest item in the binary heap, or `None` if it is empty.
1380 ///
1381 /// # Examples
1382 ///
1383 /// Basic usage:
1384 ///
1385 /// ```
1386 /// use mut_binary_heap::BinaryHeap;
1387 /// let mut heap: BinaryHeap<_, _> = BinaryHeap::new();
1388 /// assert_eq!(heap.peek(), None);
1389 ///
1390 /// heap.push(1, 1);
1391 /// heap.push(2, 5);
1392 /// heap.push(3, 2);
1393 /// assert_eq!(heap.peek(), Some(&5));
1394 ///
1395 /// ```
1396 ///
1397 /// # Time complexity
1398 ///
1399 /// Cost is *O*(1) in the worst case.
1400 #[must_use]
1401 // #[stable(feature = "rust1", since = "1.0.0")]
1402 pub fn peek(&self) -> Option<&T> {
1403 self.peek_with_key().map(|kv| kv.1)
1404 }
1405
1406 /// Returns the greatest item in the binary heap as a key-value pair,
1407 /// or `None` if it is empty.
1408 ///
1409 /// # Examples
1410 ///
1411 /// Basic usage:
1412 ///
1413 /// ```
1414 /// use mut_binary_heap::BinaryHeap;
1415 /// let mut heap: BinaryHeap<_, _> = BinaryHeap::new();
1416 /// assert_eq!(heap.peek(), None);
1417 ///
1418 /// heap.push(1, 1);
1419 /// heap.push(2, 5);
1420 /// heap.push(3, 2);
1421 /// assert!(heap.peek_mut().is_some());
1422 /// assert_eq!(heap.peek_mut().unwrap().key_value(), (&2, &5));
1423 ///
1424 /// ```
1425 ///
1426 /// # Time complexity
1427 ///
1428 /// Cost is *O*(1) in the worst case.
1429 #[must_use]
1430 pub fn peek_with_key(&self) -> Option<(&K, &T)> {
1431 let kv = self.data.get(0);
1432 kv.map(|kv| (&kv.0, &kv.1))
1433 }
1434
1435 /// Returns the number of elements the binary heap can hold without reallocating.
1436 /// Returns a touple with the capacity of the internal vector and hashmap.
1437 ///
1438 /// # Examples
1439 ///
1440 /// Basic usage:
1441 ///
1442 /// ```
1443 /// use mut_binary_heap::BinaryHeap;
1444 /// let mut heap: BinaryHeap<_, _> = BinaryHeap::with_capacity(100);
1445 /// assert!(heap.capacity().0 >= 100);
1446 /// assert!(heap.capacity().1 >= 100);
1447 /// heap.push(0, 4);
1448 /// ```
1449 #[must_use]
1450 pub fn capacity(&self) -> (usize, usize) {
1451 (self.data.capacity(), self.keys.capacity())
1452 }
1453
1454 /// Returns the minimum number of elements the binary heap can hold without reallocating.
1455 ///
1456 /// # Examples
1457 ///
1458 /// Basic usage:
1459 ///
1460 /// ```
1461 /// use mut_binary_heap::BinaryHeap;
1462 /// let mut heap: BinaryHeap<_, _> = BinaryHeap::with_capacity(100);
1463 /// assert!(heap.capacity_min() >= 100);
1464 /// heap.push(0, 4);
1465 /// ```
1466 #[must_use]
1467 pub fn capacity_min(&self) -> usize {
1468 min(self.data.capacity(), self.keys.capacity())
1469 }
1470
1471 /// Consumes the `BinaryHeap` and returns the underlying vector
1472 /// in arbitrary order.
1473 ///
1474 /// # Examples
1475 ///
1476 /// Basic usage:
1477 ///
1478 /// ```
1479 /// use mut_binary_heap::BinaryHeap;
1480 /// let heap = BinaryHeap::<_,_>::from([1, 2, 3, 4, 5, 6, 7], |v| v.clone());
1481 /// // let vec = heap.into_vec();
1482 ///
1483 /// // Will print in some order
1484 /// // for x in vec {
1485 /// // println!("{}", x);
1486 /// // }
1487 /// ```
1488 // TODO into_vec impl and type def
1489 // #[must_use = "`self` will be dropped if the result is not used"]
1490 // // #[stable(feature = "binary_heap_extras_15", since = "1.5.0")]
1491 // pub fn into_vec(self) -> Vec<T> {
1492 // self.into()
1493 // }
1494
1495 /// Returns the length of the binary heap.
1496 ///
1497 /// # Examples
1498 ///
1499 /// Basic usage:
1500 ///
1501 /// ```
1502 /// use mut_binary_heap::BinaryHeap;
1503 /// let heap = BinaryHeap::<_,_>::from([1, 3].iter(), |v| v.clone());
1504 ///
1505 /// assert_eq!(heap.len(), 2);
1506 /// ```
1507 #[must_use]
1508 // #[stable(feature = "rust1", since = "1.0.0")]
1509 pub fn len(&self) -> usize {
1510 debug_assert!(self.data.len() == self.keys.len());
1511 self.data.len()
1512 }
1513
1514 /// Checks if the binary heap is empty.
1515 ///
1516 /// # Examples
1517 ///
1518 /// Basic usage:
1519 ///
1520 /// ```
1521 /// use mut_binary_heap::BinaryHeap;
1522 /// let mut heap: BinaryHeap<_, _> = BinaryHeap::new();
1523 ///
1524 /// assert!(heap.is_empty());
1525 ///
1526 /// heap.push(0, 3);
1527 /// heap.push(1, 5);
1528 /// heap.push(2, 1);
1529 ///
1530 /// assert!(!heap.is_empty());
1531 /// ```
1532 #[must_use]
1533 // #[stable(feature = "rust1", since = "1.0.0")]
1534 pub fn is_empty(&self) -> bool {
1535 self.len() == 0
1536 }
1537
1538 /// Clears the binary heap, returning an iterator over the removed elements
1539 /// in arbitrary order. If the iterator is dropped before being fully
1540 /// consumed, it drops the remaining elements in arbitrary order.
1541 ///
1542 /// The returned iterator keeps a mutable borrow on the heap to optimize
1543 /// its implementation.
1544 ///
1545 /// # Examples
1546 ///
1547 /// Basic usage:
1548 ///
1549 /// ```
1550 /// use mut_binary_heap::BinaryHeap;
1551 /// let mut heap = BinaryHeap::<_,_>::from([1, 3].iter(), |v| v.clone());
1552 ///
1553 /// assert!(!heap.is_empty());
1554 ///
1555 /// for x in heap.drain() {
1556 /// println!("key {}, value {}", x.0, x.1);
1557 /// }
1558 ///
1559 /// assert!(heap.is_empty());
1560 /// ```
1561 #[inline]
1562 pub fn drain(&mut self) -> Drain<'_, (K, T)> {
1563 self.keys.clear();
1564 Drain {
1565 iter: self.data.drain(..),
1566 }
1567 }
1568
1569 /// Drops all items from the binary heap.
1570 ///
1571 /// # Examples
1572 ///
1573 /// Basic usage:
1574 ///
1575 /// ```
1576 /// use mut_binary_heap::BinaryHeap;
1577 /// let mut heap = BinaryHeap::<_,_>::from([1, 3].iter(), |v| v.clone());
1578 ///
1579 /// assert!(!heap.is_empty());
1580 ///
1581 /// heap.clear();
1582 ///
1583 /// assert!(heap.is_empty());
1584 /// ```
1585 pub fn clear(&mut self) {
1586 self.drain();
1587 }
1588}
1589
1590#[cfg(feature = "serde")]
1591impl<K: Hash + Eq + Serialize, T: Serialize, C: Serialize> Serialize for BinaryHeap<K, T, C> {
1592 fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
1593 where
1594 S: Serializer,
1595 {
1596 let mut state = serializer.serialize_struct("BinaryHeap", 3)?;
1597 state.serialize_field("data", &self.data)?;
1598 state.serialize_field("cmp", &self.cmp)?;
1599 state.serialize_field("keys", &self.keys)?;
1600 state.end()
1601 }
1602}
1603
1604#[cfg(feature = "serde")]
1605impl<'de, K: Hash + Eq + Deserialize<'de>, T: Deserialize<'de>, C: Deserialize<'de>>
1606 Deserialize<'de> for BinaryHeap<K, T, C>
1607{
1608 fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
1609 where
1610 D: Deserializer<'de>,
1611 {
1612 enum Field {
1613 Data,
1614 Cmp,
1615 Keys,
1616 }
1617
1618 impl<'de> Deserialize<'de> for Field {
1619 fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
1620 where
1621 D: Deserializer<'de>,
1622 {
1623 struct FieldVisitor;
1624
1625 impl<'de_f> Visitor<'de_f> for FieldVisitor {
1626 type Value = Field;
1627
1628 fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
1629 formatter.write_str("`data` or `cmp` or `keys`")
1630 }
1631
1632 fn visit_str<E>(self, value: &str) -> Result<Field, E>
1633 where
1634 E: de::Error,
1635 {
1636 match value {
1637 "data" => Ok(Field::Data),
1638 "cmp" => Ok(Field::Cmp),
1639 "keys" => Ok(Field::Keys),
1640 _ => Err(de::Error::unknown_field(value, FIELDS)),
1641 }
1642 }
1643 }
1644 deserializer.deserialize_identifier(FieldVisitor)
1645 }
1646 }
1647
1648 struct BinaryHeapVisitor<
1649 'de_bh,
1650 K: Hash + Eq + Deserialize<'de_bh>,
1651 T: Deserialize<'de_bh>,
1652 C: Deserialize<'de_bh>,
1653 > {
1654 _phandom_de: std::marker::PhantomData<&'de_bh ()>,
1655 _phantom_k: std::marker::PhantomData<K>,
1656 _phantom_t: std::marker::PhantomData<T>,
1657 _phtatom_c: std::marker::PhantomData<C>,
1658 }
1659
1660 impl<
1661 'de_bh,
1662 K: Hash + Eq + Deserialize<'de_bh>,
1663 T: Deserialize<'de_bh>,
1664 C: Deserialize<'de_bh>,
1665 > Visitor<'de_bh> for BinaryHeapVisitor<'de_bh, K, T, C>
1666 {
1667 type Value = BinaryHeap<K, T, C>;
1668
1669 fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
1670 formatter.write_str("struct BinaryHeap")
1671 }
1672
1673 fn visit_seq<V>(self, mut seq: V) -> Result<Self::Value, V::Error>
1674 where
1675 V: SeqAccess<'de_bh>,
1676 {
1677 let data = seq
1678 .next_element()?
1679 .ok_or_else(|| de::Error::invalid_length(0, &self))?;
1680 let cmp = seq
1681 .next_element()?
1682 .ok_or_else(|| de::Error::invalid_length(1, &self))?;
1683 let keys = seq
1684 .next_element()?
1685 .ok_or_else(|| de::Error::invalid_length(2, &self))?;
1686
1687 Ok(BinaryHeap {
1688 data,
1689 cmp,
1690 keys,
1691 _not_sync: PhantomData::default(),
1692 })
1693 }
1694
1695 fn visit_map<V>(self, mut map: V) -> Result<Self::Value, V::Error>
1696 where
1697 V: MapAccess<'de_bh>,
1698 {
1699 let mut data = None;
1700 let mut cmp = None;
1701 let mut keys = None;
1702 while let Some(key) = map.next_key()? {
1703 match key {
1704 Field::Data => {
1705 if data.is_some() {
1706 return Err(de::Error::duplicate_field("data"));
1707 }
1708 data = Some(map.next_value()?);
1709 }
1710 Field::Cmp => {
1711 if cmp.is_some() {
1712 return Err(de::Error::duplicate_field("cmp"));
1713 }
1714 cmp = Some(map.next_value()?);
1715 }
1716 Field::Keys => {
1717 if keys.is_some() {
1718 return Err(de::Error::duplicate_field("keys"));
1719 }
1720 keys = Some(map.next_value()?);
1721 }
1722 }
1723 }
1724
1725 let data = data.ok_or_else(|| de::Error::missing_field("data"))?;
1726 let cmp = cmp.ok_or_else(|| de::Error::missing_field("cmp"))?;
1727 let keys = keys.ok_or_else(|| de::Error::missing_field("keys"))?;
1728
1729 Ok(BinaryHeap {
1730 data,
1731 cmp,
1732 keys,
1733 _not_sync: PhantomData::default(),
1734 })
1735 }
1736 }
1737
1738 let visitor = BinaryHeapVisitor {
1739 _phandom_de: Default::default(),
1740 _phantom_k: Default::default(),
1741 _phantom_t: Default::default(),
1742 _phtatom_c: Default::default(),
1743 };
1744
1745 const FIELDS: &'static [&'static str] = &["data", "cmp", "keys"];
1746 deserializer.deserialize_struct("BinaryHeap", FIELDS, visitor)
1747 }
1748}
1749
1750/// Hole represents a hole in a slice i.e., an index without valid value
1751/// (because it was moved from or duplicated).
1752/// In drop, `Hole` will restore the slice by filling the hole
1753/// position with the value that was originally removed.
1754struct Hole<'a, K: Hash + Eq, T: 'a> {
1755 data: &'a mut [(K, T)],
1756 keys: &'a mut HashMap<K, usize>,
1757 elt: ManuallyDrop<(K, T)>,
1758 pos: usize,
1759}
1760
1761impl<'a, K: Hash + Eq, T> Hole<'a, K, T> {
1762 /// Create a new `Hole` at index `pos`.
1763 ///
1764 /// Unsafe because pos must be within the data slice.
1765 #[inline]
1766 unsafe fn new(data: &'a mut [(K, T)], keys: &'a mut HashMap<K, usize>, pos: usize) -> Self {
1767 debug_assert!(pos < data.len());
1768 // SAFE: pos should be inside the slice
1769 let elt = unsafe { ptr::read(data.get_unchecked(pos)) };
1770 debug_assert!(keys.contains_key(&elt.0));
1771 Hole {
1772 data,
1773 keys,
1774 elt: ManuallyDrop::new(elt),
1775 pos,
1776 }
1777 }
1778
1779 /// Returns the position of the hole.
1780 #[inline]
1781 fn pos(&self) -> usize {
1782 self.pos
1783 }
1784
1785 /// Returns a reference to the element removed.
1786 #[inline]
1787 fn element(&self) -> &T {
1788 &self.elt.1
1789 }
1790
1791 /// Returns a reference to the element at `index`.
1792 ///
1793 /// # Safety
1794 ///
1795 /// Index must be within the data slice and not equal to pos.
1796 #[inline]
1797 unsafe fn get(&self, index: usize) -> &T {
1798 debug_assert!(index != self.pos);
1799 debug_assert!(index < self.data.len());
1800 let key_value = unsafe { self.data.get_unchecked(index) };
1801 &key_value.1
1802 }
1803
1804 /// Move hole to new location
1805 ///
1806 /// # Safety
1807 ///
1808 /// target_position must be within the data slice and not equal to pos.
1809 #[inline]
1810 unsafe fn move_to(&mut self, target_position: usize) {
1811 debug_assert!(target_position != self.pos);
1812 debug_assert!(target_position < self.data.len());
1813 unsafe {
1814 let ptr = self.data.as_mut_ptr();
1815 let target_ptr: *const _ = ptr.add(target_position);
1816
1817 // update target index in key map
1818 let target_element: &(K, T) = &*target_ptr;
1819 *self.keys.get_mut(&target_element.0).expect(
1820 "Hole can only exist for key values pairs, that are already part of the heap.",
1821 ) = self.pos;
1822
1823 // move target into hole
1824 let hole_ptr = ptr.add(self.pos);
1825 ptr::copy_nonoverlapping(target_ptr, hole_ptr, 1);
1826 }
1827 // update hole position
1828 self.pos = target_position;
1829 }
1830}
1831
1832impl<K: Hash + Eq, T> Drop for Hole<'_, K, T> {
1833 #[inline]
1834 fn drop(&mut self) {
1835 // fill the hole again
1836 unsafe {
1837 let pos = self.pos;
1838 ptr::copy_nonoverlapping(&*self.elt, self.data.get_unchecked_mut(pos), 1);
1839 }
1840 let key = &self.elt.0;
1841 *self.keys.get_mut(key).expect(
1842 "Hole can only exist for key values pairs, that are already part of the heap.",
1843 ) = self.pos;
1844 }
1845}
1846
1847#[must_use = "iterators are lazy and do nothing unless consumed"]
1848// #[unstable(feature = "binary_heap_into_iter_sorted", issue = "59278")]
1849#[derive(Clone, Debug)]
1850pub struct IntoIterSorted<K, T, C> {
1851 inner: BinaryHeap<K, T, C>,
1852}
1853
1854// #[unstable(feature = "binary_heap_into_iter_sorted", issue = "59278")]
1855impl<K: Hash + Eq, T, C: Compare<T>> Iterator for IntoIterSorted<K, T, C> {
1856 type Item = T; // TODO IntoIterSorted should return (K, T) and we need a variant for only keys or values
1857
1858 #[inline]
1859 fn next(&mut self) -> Option<T> {
1860 self.inner.pop()
1861 }
1862
1863 #[inline]
1864 fn size_hint(&self) -> (usize, Option<usize>) {
1865 let exact = self.inner.len();
1866 (exact, Some(exact))
1867 }
1868}
1869
1870/// A draining iterator over the elements of a `BinaryHeap`.
1871///
1872/// This `struct` is created by [`BinaryHeap::drain()`]. See its
1873/// documentation for more.
1874#[derive(Debug)]
1875pub struct Drain<'a, T: 'a> {
1876 iter: vec::Drain<'a, T>,
1877}
1878
1879impl<T> Iterator for Drain<'_, T> {
1880 type Item = T;
1881
1882 #[inline]
1883 fn next(&mut self) -> Option<T> {
1884 self.iter.next()
1885 }
1886
1887 #[inline]
1888 fn size_hint(&self) -> (usize, Option<usize>) {
1889 self.iter.size_hint()
1890 }
1891}
1892
1893impl<T> DoubleEndedIterator for Drain<'_, T> {
1894 #[inline]
1895 fn next_back(&mut self) -> Option<T> {
1896 self.iter.next_back()
1897 }
1898}
1899
1900// #[stable(feature = "drain", since = "1.6.0")]
1901// impl<'a, T: 'a> ExactSizeIterator for Drain<'a, T> {
1902// fn is_empty(&self) -> bool {
1903// self.iter.is_empty()
1904// }
1905// }
1906
1907// #[stable(feature = "fused", since = "1.26.0")]
1908// impl<'a, T: 'a> FusedIterator for Drain<'a, T> {}
1909
1910// TODO From implementations
1911// // #[stable(feature = "binary_heap_extras_15", since = "1.5.0")]
1912// impl<K, T: Ord> From<Vec<T>> for BinaryHeap<K, T> {
1913// /// Converts a `Vec<T>` into a `BinaryHeap<K, T>`.
1914// ///
1915// /// This conversion happens in-place, and has *O*(*n*) time complexity.
1916// fn from(vec: Vec<T>) -> Self {
1917// BinaryHeap::from_vec(vec)
1918// }
1919// }
1920
1921// impl<K, T: Ord, const N: usize> From<[T; N]> for BinaryHeap<K, T> {
1922// /// ```
1923// /// use mut_binary_heap::BinaryHeap;
1924// ///
1925// /// let mut h1 = BinaryHeap::from([1, 4, 2, 3]);
1926// /// let mut h2: BinaryHeap<_> = [1, 4, 2, 3].into();
1927// /// while let Some((a, b)) = h1.pop().zip(h2.pop()) {
1928// /// assert_eq!(a, b);
1929// /// }
1930// /// ```
1931// fn from(arr: [T; N]) -> Self {
1932// Self::from_iter(arr)
1933// }
1934// }
1935
1936// impl<K, T, C> From<BinaryHeap<K, T, C>> for Vec<T> {
1937// /// Converts a `BinaryHeap<K, T>` into a `Vec<T>`.
1938// ///
1939// /// This conversion requires no data movement or allocation, and has
1940// /// constant time complexity.
1941// fn from(heap: BinaryHeap<K, T, C>) -> Vec<T> {
1942// heap.data
1943// }
1944// }
1945
1946// #[stable(feature = "rust1", since = "1.0.0")]
1947// impl<K: Hash + Eq + Clone, T: Ord> FromIterator<(K, T)> for BinaryHeap<K, T> {
1948// fn from_iter<I: IntoIterator<Item = (K, T)>>(iter: I) -> Self {
1949// let iter = iter.into_iter();
1950// let size_hint = iter.size_hint().0;
1951
1952// let mut heap = BinaryHeap::with_capacity(size_hint);
1953
1954// for (key, value) in iter {
1955// heap.data.push((key.clone(), value));
1956// heap.keys.insert(key, heap.data.len() - 1);
1957// }
1958// heap.rebuild();
1959// heap
1960// }
1961// }
1962
1963impl<K: Hash + Eq + Clone, T, C: Compare<T> + Default> FromIterator<(K, T)>
1964 for BinaryHeap<K, T, C>
1965{
1966 fn from_iter<I: IntoIterator<Item = (K, T)>>(iter: I) -> Self {
1967 let iter = iter.into_iter();
1968 let size_hint = iter.size_hint().0;
1969
1970 let mut heap = BinaryHeap::with_capacity(size_hint);
1971
1972 for (key, value) in iter {
1973 heap.data.push((key.clone(), value));
1974 let existing = heap.keys.insert(key, heap.data.len() - 1);
1975
1976 #[cfg(debug_assertions)]
1977 if let Some(existing_key) = existing {
1978 debug_assert!(
1979 false,
1980 "Tried to insert the same key multiple times: {}",
1981 existing_key
1982 );
1983 }
1984 }
1985
1986 heap.rebuild();
1987 heap
1988 }
1989}
1990
1991impl<K, T, C> IntoIterator for BinaryHeap<K, T, C> {
1992 type Item = (K, T);
1993 type IntoIter = IntoIter<K, T>;
1994
1995 /// Creates a consuming iterator, that is, one that moves each key-value pair
1996 /// out of the binary heap in arbitrary order. The binary heap cannot be used
1997 /// after calling this.
1998 ///
1999 /// # Examples
2000 ///
2001 /// Basic usage:
2002 ///
2003 /// ```
2004 /// use mut_binary_heap::BinaryHeap;
2005 /// let heap = BinaryHeap::<_,_>::from([1, 2, 3, 4].iter(), |v| v.clone());
2006 ///
2007 /// // Print 1, 2, 3, 4 in arbitrary order
2008 /// for x in heap.into_iter() {
2009 /// // x has type (i32, i32), not (&i32, &i32)
2010 /// println!("key {}, value {}", x.0, x.1);
2011 /// }
2012 /// ```
2013 fn into_iter(self) -> IntoIter<K, T> {
2014 IntoIter {
2015 iter: self.data.into_iter(),
2016 }
2017 }
2018}
2019
2020// TODO implement Debug for Iterator types
2021// TODO implement FusedIterator for Iterator types
2022
2023/// An owning iterator over the elements of a `BinaryHeap`.
2024///
2025/// This `struct` is created by [`BinaryHeap::into_iter()`]
2026/// (provided by the [`IntoIterator`] trait). See its documentation for more.
2027///
2028/// [`IntoIterator`]: https://doc.rust-lang.org/stable/core/iter/trait.IntoIterator.html
2029// #[stable(feature = "rust1", since = "1.0.0")]
2030#[derive(Clone)]
2031pub struct IntoIter<K, T> {
2032 iter: vec::IntoIter<(K, T)>,
2033}
2034
2035impl<K, T> Iterator for IntoIter<K, T> {
2036 type Item = (K, T);
2037
2038 fn next(&mut self) -> Option<Self::Item> {
2039 self.iter.next()
2040 }
2041
2042 #[inline]
2043 fn size_hint(&self) -> (usize, Option<usize>) {
2044 self.iter.size_hint()
2045 }
2046}
2047
2048#[derive(Clone)]
2049pub struct IntoValues<K, V> {
2050 iter: vec::IntoIter<(K, V)>,
2051}
2052
2053impl<K, V> Iterator for IntoValues<K, V> {
2054 type Item = V;
2055
2056 fn next(&mut self) -> Option<Self::Item> {
2057 self.iter.next().map(|kv| kv.1)
2058 }
2059
2060 #[inline]
2061 fn size_hint(&self) -> (usize, Option<usize>) {
2062 self.iter.size_hint()
2063 }
2064}
2065
2066#[derive(Clone)]
2067pub struct IntoKeys<K, V> {
2068 iter: vec::IntoIter<(K, V)>,
2069}
2070
2071impl<K, V> Iterator for IntoKeys<K, V> {
2072 type Item = K;
2073
2074 fn next(&mut self) -> Option<Self::Item> {
2075 self.iter.next().map(|kv| kv.0)
2076 }
2077
2078 #[inline]
2079 fn size_hint(&self) -> (usize, Option<usize>) {
2080 self.iter.size_hint()
2081 }
2082}
2083
2084#[derive(Clone)]
2085pub struct Iter<'a, K, T> {
2086 iter: std::slice::Iter<'a, (K, T)>,
2087}
2088
2089impl<'a, K, T> Iterator for Iter<'a, K, T> {
2090 type Item = (&'a K, &'a T);
2091
2092 fn next(&mut self) -> Option<Self::Item> {
2093 self.iter.next().map(|kv| (&kv.0, &kv.1))
2094 }
2095
2096 #[inline]
2097 fn size_hint(&self) -> (usize, Option<usize>) {
2098 self.iter.size_hint()
2099 }
2100
2101 #[inline]
2102 fn last(self) -> Option<Self::Item>
2103 where
2104 Self: Sized,
2105 {
2106 self.iter.last().map(|kv| (&kv.0, &kv.1))
2107 }
2108}
2109
2110impl<'a, K, T> DoubleEndedIterator for Iter<'a, K, T> {
2111 fn next_back(&mut self) -> Option<Self::Item> {
2112 self.iter.next_back().map(|kv| (&kv.0, &kv.1))
2113 }
2114}
2115
2116#[derive(Clone)]
2117pub struct IterValues<'a, K, T> {
2118 iter: std::slice::Iter<'a, (K, T)>,
2119}
2120
2121impl<'a, K, T> Iterator for IterValues<'a, K, T> {
2122 type Item = &'a T;
2123
2124 fn next(&mut self) -> Option<Self::Item> {
2125 self.iter.next().map(|kv| &kv.1)
2126 }
2127
2128 #[inline]
2129 fn size_hint(&self) -> (usize, Option<usize>) {
2130 self.iter.size_hint()
2131 }
2132
2133 #[inline]
2134 fn last(self) -> Option<Self::Item>
2135 where
2136 Self: Sized,
2137 {
2138 self.iter.last().map(|kv| (&kv.1))
2139 }
2140}
2141
2142#[derive(Clone)]
2143pub struct IterKeys<'a, K, T> {
2144 iter: std::slice::Iter<'a, (K, T)>,
2145}
2146
2147impl<'a, K, T> Iterator for IterKeys<'a, K, T> {
2148 type Item = &'a K;
2149
2150 fn next(&mut self) -> Option<Self::Item> {
2151 self.iter.next().map(|kv| &kv.0)
2152 }
2153
2154 #[inline]
2155 fn size_hint(&self) -> (usize, Option<usize>) {
2156 self.iter.size_hint()
2157 }
2158
2159 #[inline]
2160 fn last(self) -> Option<Self::Item>
2161 where
2162 Self: Sized,
2163 {
2164 self.iter.last().map(|kv| (&kv.0))
2165 }
2166}
2167
2168impl<'a, K, T, C> IntoIterator for &'a BinaryHeap<K, T, C> {
2169 type Item = (&'a K, &'a T);
2170 type IntoIter = Iter<'a, K, T>;
2171
2172 fn into_iter(self) -> Self::IntoIter {
2173 self.iter()
2174 }
2175}
2176
2177/// An Iterator that yields mutable references to the values in the heap.
2178/// The heap will be rebuild after the iterator is droped.
2179// NOTE: this can not implement Clone or we invalidate the mutability guarantee.
2180pub struct MutIter<'a, K: Hash + Eq, T, C: Compare<T>> {
2181 heap: *mut BinaryHeap<K, T, C>,
2182 iter: std::slice::Iter<'a, (K, T)>,
2183}
2184
2185impl<'a, K: Hash + Eq, T, C: Compare<T>> IntoIterator for &'a mut BinaryHeap<K, T, C> {
2186 type Item = (&'a K, &'a mut T);
2187 type IntoIter = MutIter<'a, K, T, C>;
2188
2189 fn into_iter(self) -> Self::IntoIter {
2190 MutIter {
2191 heap: self,
2192 iter: self.data.iter(),
2193 }
2194 }
2195}
2196
2197impl<'a, K: Hash + Eq, T, C: Compare<T>> Iterator for MutIter<'a, K, T, C> {
2198 type Item = (&'a K, &'a mut T);
2199
2200 fn next(&mut self) -> Option<Self::Item> {
2201 if let Some(kv) = self.iter.next() {
2202 let key = &kv.0;
2203 let ptr: *const T = &kv.1 as *const T;
2204 let mut_ptr: *mut T = ptr as *mut T;
2205 // SAFTEY: We have mut access to the heap, because we are in a
2206 // MutIter which can only be constructed with a mut ref to the
2207 // heap.
2208 //
2209 // We only give out a mut ref once per element in the heap, so this
2210 // reference has not been shared so it's unique.
2211 let value = unsafe { &mut *mut_ptr };
2212 Some((key, value))
2213 } else {
2214 None
2215 }
2216 }
2217
2218 #[inline]
2219 fn size_hint(&self) -> (usize, Option<usize>) {
2220 self.iter.size_hint()
2221 }
2222}
2223
2224impl<'a, K: Hash + Eq, T, C: Compare<T>> Drop for MutIter<'a, K, T, C> {
2225 fn drop(&mut self) {
2226 // SAFETY: MutIter was constructed from a valid mut reference
2227 let heap = unsafe { &mut *self.heap };
2228 heap.rebuild();
2229 }
2230}
2231
2232// #[stable(feature = "rust1", since = "1.0.0")]
2233// TODO heap extension helpers
2234// impl<K, T, C: Compare<T>> Extend<T> for BinaryHeap<K, T, C> {
2235// #[inline]
2236// fn extend<I: IntoIterator<Item = T>>(&mut self, iter: I) {
2237// // <Self as SpecExtend<I>>::spec_extend(self, iter);
2238// self.extend_desugared(iter);
2239// }
2240// }
2241
2242// impl<K, T, I: IntoIterator<Item = T>> SpecExtend<I> for BinaryHeap<K, T> {
2243// default fn spec_extend(&mut self, iter: I) {
2244// self.extend_desugared(iter.into_iter());
2245// }
2246// }
2247
2248// impl<K, T> SpecExtend<BinaryHeap<K, T>> for BinaryHeap<K, T> {
2249// fn spec_extend(&mut self, ref mut other: BinaryHeap<K, T>) {
2250// self.append(other);
2251// }
2252// }
2253
2254// impl<K, T, C: Compare<T>> BinaryHeap<K, T, C> {
2255// fn extend_desugared<I: IntoIterator<Item = T>>(&mut self, iter: I) {
2256// let iterator = iter.into_iter();
2257// let (lower, _) = iterator.size_hint();
2258
2259// self.reserve(lower);
2260
2261// iterator.for_each(move |elem| self.push(elem));
2262// }
2263// }
2264
2265// // #[stable(feature = "extend_ref", since = "1.2.0")]
2266// impl<'a, K, T: 'a + Copy, C: Compare<T>> Extend<&'a T> for BinaryHeap<K, T, C> {
2267// fn extend<I: IntoIterator<Item = &'a T>>(&mut self, iter: I) {
2268// self.extend(iter.into_iter().cloned());
2269// }
2270// }
2271
2272// #[unstable(feature = "collection_placement",
2273// reason = "placement protocol is subject to change",
2274// issue = "30172")]
2275// pub struct BinaryHeapPlace<'a, T: 'a>
2276// where T: Clone {
2277// heap: *mut BinaryHeap<K, T>,
2278// place: vec::PlaceBack<'a, T>,
2279// }
2280
2281// #[unstable(feature = "collection_placement",
2282// reason = "placement protocol is subject to change",
2283// issue = "30172")]
2284// impl<'a, T: Clone + Ord + fmt::Debug> fmt::Debug for BinaryHeapPlace<'a, T> {
2285// fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
2286// f.debug_tuple("BinaryHeapPlace")
2287// .field(&self.place)
2288// .finish()
2289// }
2290// }
2291
2292// #[unstable(feature = "collection_placement",
2293// reason = "placement protocol is subject to change",
2294// issue = "30172")]
2295// impl<'a, T: 'a> Placer<T> for &'a mut BinaryHeap<K, T>
2296// where T: Clone + Ord {
2297// type Place = BinaryHeapPlace<'a, T>;
2298
2299// fn make_place(self) -> Self::Place {
2300// let ptr = self as *mut BinaryHeap<K, T>;
2301// let place = Placer::make_place(self.data.place_back());
2302// BinaryHeapPlace {
2303// heap: ptr,
2304// place,
2305// }
2306// }
2307// }
2308
2309// #[unstable(feature = "collection_placement",
2310// reason = "placement protocol is subject to change",
2311// issue = "30172")]
2312// unsafe impl<'a, T> Place<T> for BinaryHeapPlace<'a, T>
2313// where T: Clone + Ord {
2314// fn pointer(&mut self) -> *mut T {
2315// self.place.pointer()
2316// }
2317// }
2318
2319// #[unstable(feature = "collection_placement",
2320// reason = "placement protocol is subject to change",
2321// issue = "30172")]
2322// impl<'a, T> InPlace<T> for BinaryHeapPlace<'a, T>
2323// where T: Clone + Ord {
2324// type Owner = &'a T;
2325
2326// unsafe fn finalize(self) -> &'a T {
2327// self.place.finalize();
2328
2329// let heap: &mut BinaryHeap<K, T> = &mut *self.heap;
2330// let len = heap.len();
2331// let i = heap.sift_up(0, len - 1);
2332// heap.data.get_unchecked(i)
2333// }
2334// }
2335
2336#[cfg(test)]
2337mod test {
2338 use crate::BinaryHeap;
2339 use std::collections::HashMap;
2340 use std::hash::Hash;
2341
2342 fn is_normal<T: Send + Unpin>() {}
2343
2344 #[test]
2345 fn check_is_send_unpin() {
2346 is_normal::<BinaryHeap<i64, i64>>();
2347 assert!(true);
2348 }
2349
2350 fn assert_key_map_valid<K: Hash + Eq + Clone, T, C>(bh: &BinaryHeap<K, T, C>) {
2351 let mut expected_keys = HashMap::new();
2352 for (i, kv) in bh.data.iter().enumerate() {
2353 expected_keys.insert(kv.0.clone(), i);
2354 }
2355
2356 for key_index in &expected_keys {
2357 let key = &key_index.0;
2358 let index = *key_index.1;
2359 assert!(bh.keys.contains_key(&key));
2360 assert_eq!(bh.keys[key], index);
2361 }
2362 assert_eq!(bh.keys.len(), expected_keys.len());
2363 }
2364
2365 #[test]
2366 fn valid_key_map() {
2367 // TODO why do I need to specify the type here? The compiler should be able to infer this
2368 let mut heap: BinaryHeap<_, _> = BinaryHeap::new();
2369
2370 assert_key_map_valid(&heap);
2371
2372 heap.push(0, 0);
2373
2374 assert_key_map_valid(&heap);
2375
2376 heap.push(1, 10);
2377 heap.push(2, 15);
2378 heap.push(3, 5);
2379 heap.push(4, 8);
2380
2381 assert_key_map_valid(&heap);
2382
2383 assert_eq!(heap.pop_with_key(), Some((2, 15)));
2384
2385 assert_key_map_valid(&heap);
2386
2387 assert_eq!(heap.pop_with_key(), Some((1, 10)));
2388 assert_eq!(heap.pop_with_key(), Some((4, 8)));
2389
2390 heap.push(5, 2);
2391
2392 assert_key_map_valid(&heap);
2393
2394 assert_eq!(heap.pop_with_key(), Some((3, 5)));
2395 assert_eq!(heap.pop_with_key(), Some((5, 2)));
2396 assert_eq!(heap.pop_with_key(), Some((0, 0)));
2397
2398 assert_key_map_valid(&heap);
2399
2400 assert_eq!(heap.pop_with_key(), None);
2401
2402 assert_key_map_valid(&heap);
2403 }
2404
2405 #[test]
2406 fn valid_key_map_after_clear() {
2407 // TODO why do I need to specify the type here? The compiler should be able to infer this
2408 let mut heap: BinaryHeap<_, _> = BinaryHeap::new();
2409
2410 assert_key_map_valid(&heap);
2411
2412 heap.push(0, 0);
2413
2414 assert_key_map_valid(&heap);
2415
2416 heap.push(1, 10);
2417 heap.push(2, 15);
2418 heap.push(3, 5);
2419 heap.push(4, 8);
2420
2421 assert_key_map_valid(&heap);
2422
2423 heap.clear();
2424
2425 assert_key_map_valid(&heap);
2426 assert_eq!(heap.len(), 0);
2427 }
2428}