Skip to main content

small_collections/
heap.rs

1//! Priority queue that lives on the stack and spills to the heap.
2//!
3//! Provides [`SmallBinaryHeap`] — backed by `heapless::binary_heap::BinaryHeap` on the stack
4//! and `std::collections::BinaryHeap` on the heap.  Supports both max-heap ([`Max`]) and
5//! min-heap ([`Min`]) orderings via the [`HeapKind`] trait, which wraps `Min` elements in
6//! `core::cmp::Reverse` to invert the ordering for `std::collections::BinaryHeap`.
7//!
8//! [`AnyHeap`] is an object-safe trait abstracting over both backends.
9
10use core::cmp::Reverse;
11use core::mem::ManuallyDrop;
12use core::ops::{Deref, DerefMut};
13use core::ptr;
14use std::collections::BinaryHeap;
15use std::fmt::{Debug, Formatter, Result};
16
17/// A trait for abstraction over different priority queue types (Stack, Heap, Small).
18pub trait AnyHeap<T> {
19    /// Returns the number of elements in the heap.
20    fn len(&self) -> usize;
21    /// Returns `true` if the heap is empty.
22    fn is_empty(&self) -> bool {
23        self.len() == 0
24    }
25    /// Pushes an item onto the heap.
26    fn push(&mut self, item: T);
27    /// Removes the greatest (or smallest, depending on the heap type) item from the heap and returns it.
28    fn pop(&mut self) -> Option<T>;
29    /// Returns a reference to the greatest (or smallest) item in the heap.
30    fn peek(&self) -> Option<&T>;
31    /// Clears the heap, removing all values.
32    fn clear(&mut self);
33}
34
35impl<T: Ord> AnyHeap<T> for BinaryHeap<T> {
36    fn len(&self) -> usize {
37        self.len()
38    }
39    fn push(&mut self, item: T) {
40        self.push(item);
41    }
42    fn pop(&mut self) -> Option<T> {
43        self.pop()
44    }
45    fn peek(&self) -> Option<&T> {
46        self.peek()
47    }
48    fn clear(&mut self) {
49        self.clear();
50    }
51}
52
53use heapless::binary_heap::BinaryHeap as HeaplessHeap;
54pub use heapless::binary_heap::{Kind, Max, Min};
55
56/// Internal trait to bridge heapless::Kind and std::collections::BinaryHeap.
57pub trait HeapKind<T: Ord>: Kind {
58    /// The actual element type stored inside the heap format.
59    type Element: Ord;
60    /// Wraps an item to its `Element` representation.
61    fn wrap(item: T) -> Self::Element;
62    /// Unwraps an `Element` back to the raw item.
63    fn unwrap(item: Self::Element) -> T;
64    /// Wraps a reference to an item.
65    fn wrap_ref(item: &T) -> &Self::Element;
66    /// Unwraps a reference to an `Element`.
67    fn unwrap_ref(item: &Self::Element) -> &T;
68}
69
70impl<T: Ord> HeapKind<T> for Max {
71    type Element = T;
72    #[inline]
73    fn wrap(item: T) -> T {
74        item
75    }
76    #[inline]
77    fn unwrap(item: T) -> T {
78        item
79    }
80    #[inline]
81    fn wrap_ref(item: &T) -> &T {
82        item
83    }
84    #[inline]
85    fn unwrap_ref(item: &T) -> &T {
86        item
87    }
88}
89
90impl<T: Ord> HeapKind<T> for Min {
91    type Element = Reverse<T>;
92    #[inline]
93    fn wrap(item: T) -> Reverse<T> {
94        Reverse(item)
95    }
96    #[inline]
97    fn unwrap(item: Reverse<T>) -> T {
98        item.0
99    }
100    #[inline]
101    fn wrap_ref(item: &T) -> &Reverse<T> {
102        // Reverse is #[repr(transparent)]
103        unsafe { &*(item as *const T as *const Reverse<T>) }
104    }
105    #[inline]
106    fn unwrap_ref(item: &Reverse<T>) -> &T {
107        &item.0
108    }
109}
110
111/// A priority queue that lives on the stack for `N` items, then spills to the heap.
112///
113/// # Behavior
114/// * **Heap Kind:** Supports both `Max` (default) and `Min` heap behavior.
115/// * **Spill:** Occurs automatically when pushing the (N+1)th item.
116/// * **Complexity:** Push and Pop are O(log n). Spill is O(n).
117///
118/// # Safety Invariants
119/// * `on_stack` tag determines which side of the `HeapData` union is active.
120/// * Elements are stored in a binary heap order (array representation).
121pub struct SmallBinaryHeap<T: Ord, const N: usize, K: Kind + HeapKind<T> = Max> {
122    on_stack: bool,
123    data: HeapData<T, N, K>,
124}
125
126impl<T: Ord, const N: usize, K: Kind + HeapKind<T>> AnyHeap<T> for SmallBinaryHeap<T, N, K> {
127    fn len(&self) -> usize {
128        self.len()
129    }
130    fn push(&mut self, item: T) {
131        self.push(item);
132    }
133    fn pop(&mut self) -> Option<T> {
134        self.pop()
135    }
136    fn peek(&self) -> Option<&T> {
137        self.peek()
138    }
139    fn clear(&mut self) {
140        self.clear();
141    }
142}
143
144/// Internal storage for `SmallBinaryHeap`.
145///
146/// We use `ManuallyDrop` because the compiler cannot know which field is active
147/// and therefore cannot automatically drop the correct one.
148union HeapData<T: Ord, const N: usize, K: Kind + HeapKind<T>> {
149    stack: ManuallyDrop<HeaplessHeap<T, K, N>>,
150    heap: ManuallyDrop<BinaryHeap<K::Element>>,
151}
152
153impl<T, const N: usize, K> SmallBinaryHeap<T, N, K>
154where
155    T: Ord,
156    K: Kind + HeapKind<T>,
157{
158    /// The maximum allowed stack size in bytes (16 KB).
159    pub const MAX_STACK_SIZE: usize = 16 * 1024;
160
161    /// Creates a new empty binary heap.
162    pub fn new() -> Self {
163        const {
164            assert!(
165                std::mem::size_of::<Self>() <= SmallBinaryHeap::<T, N, K>::MAX_STACK_SIZE,
166                "SmallBinaryHeap is too large! Reduce N."
167            );
168        }
169
170        Self {
171            on_stack: true,
172            data: HeapData {
173                stack: ManuallyDrop::new(HeaplessHeap::new()),
174            },
175        }
176    }
177
178    /// Creates a new empty binary heap with a specific initial capacity on the heap.
179    /// This immediately forces the storage to be on the heap, bypassing the stack optimization.
180    pub fn with_capacity(capacity: usize) -> Self {
181        if capacity <= N {
182            Self::new()
183        } else {
184            Self {
185                on_stack: false,
186                data: HeapData {
187                    heap: ManuallyDrop::new(BinaryHeap::with_capacity(capacity)),
188                },
189            }
190        }
191    }
192
193    // --- Inspection ---
194
195    /// Returns `true` if the heap is currently storing data on the stack.
196    #[inline]
197    pub fn is_on_stack(&self) -> bool {
198        self.on_stack
199    }
200
201    /// Returns the number of elements in the heap.
202    pub fn len(&self) -> usize {
203        unsafe {
204            if self.on_stack {
205                self.data.stack.len()
206            } else {
207                self.data.heap.len()
208            }
209        }
210    }
211
212    /// Returns `true` if the heap contains no elements.
213    pub fn is_empty(&self) -> bool {
214        self.len() == 0
215    }
216
217    /// Returns the total capacity of the heap.
218    /// If on stack, returns N. If on heap, returns the vector capacity.
219    pub fn capacity(&self) -> usize {
220        unsafe {
221            if self.on_stack {
222                N
223            } else {
224                self.data.heap.capacity()
225            }
226        }
227    }
228
229    // --- Modification ---
230
231    /// Reserves capacity for at least `additional` more elements to be inserted.
232    /// May trigger a spill to heap if current stack capacity is insufficient.
233    pub fn reserve(&mut self, additional: usize) {
234        unsafe {
235            if self.on_stack {
236                if self.data.stack.len() + additional > N {
237                    self.spill_to_heap();
238                    (*self.data.heap).reserve(additional);
239                }
240            } else {
241                (*self.data.heap).reserve(additional);
242            }
243        }
244    }
245
246    /// Discards as much additional capacity as possible.
247    pub fn shrink_to_fit(&mut self) {
248        if !self.on_stack {
249            unsafe { (*self.data.heap).shrink_to_fit() }
250        }
251    }
252
253    /// Returns a reference to the greatest item.
254    pub fn peek(&self) -> Option<&T> {
255        unsafe {
256            if self.on_stack {
257                self.data.stack.peek()
258            } else {
259                self.data.heap.peek().map(K::unwrap_ref)
260            }
261        }
262    }
263
264    /// Returns a mutable reference to the greatest item.
265    pub fn peek_mut(&mut self) -> Option<SmallPeekMut<'_, T, N, K>> {
266        unsafe {
267            if self.on_stack {
268                (*self.data.stack).peek_mut().map(SmallPeekMut::Stack)
269            } else {
270                (*self.data.heap).peek_mut().map(SmallPeekMut::Heap)
271            }
272        }
273    }
274
275    /// Pushes an item onto the binary heap.
276    ///
277    /// If the stack capacity `N` is exceeded, this triggers a transparent spill to the heap.
278    pub fn push(&mut self, item: T) {
279        unsafe {
280            if self.on_stack {
281                let stack_heap = &mut *self.data.stack;
282                if stack_heap.len() == N {
283                    self.spill_to_heap();
284                    // Fallthrough to heap push
285                } else {
286                    stack_heap.push(item).ok().unwrap();
287                    return;
288                }
289            }
290            (*self.data.heap).push(K::wrap(item));
291        }
292    }
293
294    /// Removes the greatest item from the binary heap and returns it, or `None` if it is empty.
295    pub fn pop(&mut self) -> Option<T> {
296        unsafe {
297            if self.on_stack {
298                (*self.data.stack).pop()
299            } else {
300                (*self.data.heap).pop().map(K::unwrap)
301            }
302        }
303    }
304
305    /// Drops all items from the binary heap.
306    pub fn clear(&mut self) {
307        unsafe {
308            if self.on_stack {
309                (*self.data.stack).clear();
310            } else {
311                (*self.data.heap).clear();
312            }
313        }
314    }
315
316    /// Moves all the elements of `other` into `self`, leaving `other` empty.
317    pub fn append(&mut self, other: &mut Self) {
318        if other.is_empty() {
319            return;
320        }
321
322        self.reserve(other.len());
323
324        match (self.on_stack, other.on_stack) {
325            (false, false) => unsafe {
326                // Both on heap, but we can't easily merge two BinaryHeaps of different types
327                // unless we drain.
328                (*self.data.heap).extend((*other.data.heap).drain());
329            },
330            (false, true) => {
331                while let Some(item) = other.pop() {
332                    unsafe {
333                        (*self.data.heap).push(K::wrap(item));
334                    }
335                }
336            }
337            (true, _) => {
338                // If we are on stack, we just reserved, so we know we fit or spilled.
339                while let Some(item) = other.pop() {
340                    self.push(item);
341                }
342            }
343        }
344    }
345
346    /// Retains only the elements specified by the predicate.
347    /// Rebuilds the heap, complexity O(n).
348    pub fn retain<F>(&mut self, mut f: F)
349    where
350        F: FnMut(&T) -> bool,
351    {
352        let items = self.drain().collect::<Vec<_>>();
353        self.clear(); // Reset state
354        for item in items {
355            if f(&item) {
356                self.push(item);
357            }
358        }
359    }
360
361    // --- Consumption ---
362
363    /// Consumes the heap and returns the underlying vector in arbitrary (heap) order.
364    pub fn into_vec(self) -> Vec<T> {
365        let this = ManuallyDrop::new(self);
366        unsafe {
367            if this.on_stack {
368                ptr::read(&*this.data.stack)
369                    .into_vec()
370                    .into_iter()
371                    .collect()
372            } else {
373                ptr::read(&*this.data.heap)
374                    .into_vec()
375                    .into_iter()
376                    .map(K::unwrap)
377                    .collect()
378            }
379        }
380    }
381
382    /// Consumes the heap and returns a vector sorted from greatest to lowest.
383    pub fn into_sorted_vec(mut self) -> Vec<T> {
384        let mut vec = Vec::with_capacity(self.len());
385        while let Some(item) = self.pop() {
386            vec.push(item);
387        }
388        vec
389    }
390
391    // --- Internals ---
392
393    #[inline(never)]
394    unsafe fn spill_to_heap(&mut self) {
395        unsafe {
396            let stack_heap = ptr::read(&*self.data.stack);
397            let std_heap = BinaryHeap::from_iter(stack_heap.into_vec().into_iter().map(K::wrap));
398            ptr::write(&mut self.data.heap, ManuallyDrop::new(std_heap));
399            self.on_stack = false;
400        }
401    }
402
403    /// Helper for append/retain: drains all elements.
404    fn drain(&mut self) -> IntoIter<T, N, K> {
405        let old_self = std::mem::replace(self, Self::new());
406        old_self.into_iter()
407    }
408}
409
410// --- Wrapper Types ---
411
412/// A mutable representation of the top element of the heap.
413pub enum SmallPeekMut<'a, T, const N: usize, K: Kind + HeapKind<T>>
414where
415    T: Ord,
416{
417    /// Mutable representation of the top element on the stack heap.
418    Stack(heapless::binary_heap::PeekMut<'a, T, K, N>),
419    /// Mutable representation of the top element on the std heap.
420    Heap(std::collections::binary_heap::PeekMut<'a, K::Element>),
421}
422
423impl<'a, T, const N: usize, K> Deref for SmallPeekMut<'a, T, N, K>
424where
425    T: Ord,
426    K: Kind + HeapKind<T>,
427{
428    type Target = T;
429    fn deref(&self) -> &Self::Target {
430        match self {
431            SmallPeekMut::Stack(p) => p,
432            SmallPeekMut::Heap(p) => K::unwrap_ref(p),
433        }
434    }
435}
436
437impl<'a, T, const N: usize, K> DerefMut for SmallPeekMut<'a, T, N, K>
438where
439    T: Ord,
440    K: Kind + HeapKind<T>,
441{
442    fn deref_mut(&mut self) -> &mut Self::Target {
443        match self {
444            SmallPeekMut::Stack(p) => p,
445            SmallPeekMut::Heap(p) => {
446                // This is slightly incorrect for Min-Heap because mutating might violate the heap property
447                // of std::collections::BinaryHeap<Reverse<T>> if we don't handle it carefully.
448                // However, PeekMut's Drop handle's the re-heapification.
449                // We need to return a mutable reference to the inner T.
450                // BinaryHeap<Reverse<T>>::peek_mut() returns a PeekMut<Reverse<T>>.
451                // DerefMut on that returns &mut Reverse<T>.
452                // We need &mut T.
453                let rev_mut: &mut K::Element = &mut **p;
454                let t_mut: &mut T = unsafe { &mut *(rev_mut as *mut K::Element as *mut T) };
455                t_mut
456            }
457        }
458    }
459}
460
461impl<'a, T, const N: usize, K> SmallPeekMut<'a, T, N, K>
462where
463    T: Ord,
464    K: Kind + HeapKind<T>,
465{
466    /// Removes the peeked value from the heap and returns it.
467    pub fn pop(self) -> T {
468        match self {
469            SmallPeekMut::Stack(p) => heapless::binary_heap::PeekMut::pop(p),
470            SmallPeekMut::Heap(p) => K::unwrap(std::collections::binary_heap::PeekMut::pop(p)),
471        }
472    }
473}
474
475// --- Iterators ---
476
477/// An owning iterator over the elements of a `SmallBinaryHeap`.
478pub struct IntoIter<T, const N: usize, K>
479where
480    T: Ord,
481    K: Kind + HeapKind<T>,
482{
483    heap: SmallBinaryHeap<T, N, K>,
484}
485
486impl<T, const N: usize, K> Iterator for IntoIter<T, N, K>
487where
488    T: Ord,
489    K: Kind + HeapKind<T>,
490{
491    type Item = T;
492    fn next(&mut self) -> Option<T> {
493        self.heap.pop()
494    }
495    fn size_hint(&self) -> (usize, Option<usize>) {
496        let len = self.heap.len();
497        (len, Some(len))
498    }
499}
500
501impl<T, const N: usize, K> IntoIterator for SmallBinaryHeap<T, N, K>
502where
503    T: Ord,
504    K: Kind + HeapKind<T>,
505{
506    type Item = T;
507    type IntoIter = IntoIter<T, N, K>;
508    fn into_iter(self) -> Self::IntoIter {
509        IntoIter { heap: self }
510    }
511}
512
513impl<'a, T, const N: usize, K> IntoIterator for &'a SmallBinaryHeap<T, N, K>
514where
515    T: Ord,
516    K: Kind + HeapKind<T>,
517{
518    type Item = &'a T;
519    type IntoIter = SmallHeapIter<'a, T, K>;
520
521    fn into_iter(self) -> Self::IntoIter {
522        self.iter()
523    }
524}
525
526/// An iterator over the elements of a `SmallBinaryHeap`.
527pub enum SmallHeapIter<'a, T, K: Kind + HeapKind<T>>
528where
529    T: Ord,
530{
531    /// Stack iterator.
532    Stack(core::slice::Iter<'a, T>),
533    /// Heap iterator.
534    Heap(std::collections::binary_heap::Iter<'a, K::Element>),
535}
536
537impl<'a, T, K> Iterator for SmallHeapIter<'a, T, K>
538where
539    T: Ord,
540    K: Kind + HeapKind<T>,
541{
542    type Item = &'a T;
543    fn next(&mut self) -> Option<Self::Item> {
544        match self {
545            SmallHeapIter::Stack(iter) => iter.next(),
546            SmallHeapIter::Heap(iter) => iter.next().map(K::unwrap_ref),
547        }
548    }
549}
550
551impl<T, const N: usize, K> SmallBinaryHeap<T, N, K>
552where
553    T: Ord,
554    K: Kind + HeapKind<T>,
555{
556    /// Returns an iterator visiting all values in the underlying vector, in arbitrary order.
557    pub fn iter(&self) -> SmallHeapIter<'_, T, K> {
558        unsafe {
559            if self.on_stack {
560                SmallHeapIter::Stack(self.data.stack.iter())
561            } else {
562                SmallHeapIter::Heap(self.data.heap.iter())
563            }
564        }
565    }
566}
567
568// --- Trait Implementations ---
569
570impl<T: Ord, const N: usize, K: Kind + HeapKind<T>> Drop for SmallBinaryHeap<T, N, K> {
571    fn drop(&mut self) {
572        unsafe {
573            if self.on_stack {
574                ManuallyDrop::drop(&mut self.data.stack);
575            } else {
576                ManuallyDrop::drop(&mut self.data.heap);
577            }
578        }
579    }
580}
581
582impl<T: Ord, const N: usize, K: Kind + HeapKind<T>> Default for SmallBinaryHeap<T, N, K> {
583    fn default() -> Self {
584        Self::new()
585    }
586}
587
588impl<T, const N: usize, K> Clone for SmallBinaryHeap<T, N, K>
589where
590    T: Ord + Clone,
591    K: Kind + HeapKind<T>,
592    K::Element: Clone,
593{
594    fn clone(&self) -> Self {
595        unsafe {
596            if self.on_stack {
597                Self {
598                    on_stack: true,
599                    data: HeapData {
600                        stack: ManuallyDrop::new((*self.data.stack).clone()),
601                    },
602                }
603            } else {
604                Self {
605                    on_stack: false,
606                    data: HeapData {
607                        heap: ManuallyDrop::new((*self.data.heap).clone()),
608                    },
609                }
610            }
611        }
612    }
613}
614
615impl<T, const N: usize, K> Debug for SmallBinaryHeap<T, N, K>
616where
617    T: Ord + Debug,
618    K: Kind + HeapKind<T>,
619{
620    fn fmt(&self, f: &mut Formatter<'_>) -> Result {
621        f.debug_list().entries(self.iter()).finish()
622    }
623}
624
625impl<T, const N: usize, K> Extend<T> for SmallBinaryHeap<T, N, K>
626where
627    T: Ord,
628    K: Kind + HeapKind<T>,
629{
630    fn extend<I: IntoIterator<Item = T>>(&mut self, iter: I) {
631        let iter = iter.into_iter();
632        let (lower, _) = iter.size_hint();
633        self.reserve(lower);
634        for elem in iter {
635            self.push(elem);
636        }
637    }
638}
639
640impl<T, const N: usize, K> FromIterator<T> for SmallBinaryHeap<T, N, K>
641where
642    T: Ord,
643    K: Kind + HeapKind<T>,
644{
645    fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> Self {
646        let mut heap = Self::new();
647        heap.extend(iter);
648        heap
649    }
650}
651
652// --- Tests ---
653
654#[cfg(test)]
655mod tests {
656    use super::*;
657
658    #[test]
659    fn test_heap_stack_ops_basic() {
660        let mut heap: SmallBinaryHeap<i32, 4> = SmallBinaryHeap::new();
661        assert!(heap.is_empty());
662        assert!(heap.is_on_stack());
663
664        heap.push(1);
665        heap.push(5);
666        heap.push(2);
667
668        assert_eq!(heap.len(), 3);
669        assert_eq!(heap.peek(), Some(&5));
670
671        // Pop Order
672        assert_eq!(heap.pop(), Some(5));
673        assert_eq!(heap.pop(), Some(2));
674        assert_eq!(heap.pop(), Some(1));
675        assert_eq!(heap.pop(), None);
676    }
677
678    #[test]
679    fn test_heap_stack_min_heap_ops() {
680        let mut heap: SmallBinaryHeap<i32, 4, Min> = SmallBinaryHeap::new();
681        heap.push(10);
682        heap.push(5);
683        heap.push(15);
684
685        assert_eq!(heap.peek(), Some(&5));
686        assert_eq!(heap.pop(), Some(5));
687        assert_eq!(heap.pop(), Some(10));
688        assert_eq!(heap.pop(), Some(15));
689    }
690
691    #[test]
692    fn test_heap_spill_trigger_max() {
693        let mut heap: SmallBinaryHeap<i32, 2, Max> = SmallBinaryHeap::new();
694        heap.push(10);
695        heap.push(20);
696        assert!(heap.is_on_stack());
697
698        heap.push(30);
699        assert!(!heap.is_on_stack());
700        assert_eq!(heap.pop(), Some(30));
701    }
702
703    #[test]
704    fn test_heap_spill_trigger_min() {
705        let mut heap: SmallBinaryHeap<i32, 2, Min> = SmallBinaryHeap::new();
706        heap.push(30);
707        heap.push(20);
708        assert!(heap.is_on_stack());
709
710        heap.push(10);
711        assert!(!heap.is_on_stack());
712        assert_eq!(heap.peek(), Some(&10));
713        assert_eq!(heap.pop(), Some(10));
714        assert_eq!(heap.pop(), Some(20));
715        assert_eq!(heap.pop(), Some(30));
716    }
717
718    #[test]
719    fn test_heap_any_storage_peek_mut_reorder() {
720        let mut heap: SmallBinaryHeap<i32, 4> = SmallBinaryHeap::new();
721        heap.push(10);
722        heap.push(50);
723        heap.push(20);
724
725        if let Some(mut top) = heap.peek_mut() {
726            assert_eq!(*top, 50);
727            *top = 5;
728        }
729
730        assert_eq!(heap.peek(), Some(&20));
731        assert_eq!(heap.pop(), Some(20));
732        assert_eq!(heap.pop(), Some(10));
733        assert_eq!(heap.pop(), Some(5));
734    }
735
736    #[test]
737    fn test_heap_any_storage_peek_mut_min_heap() {
738        let mut heap: SmallBinaryHeap<i32, 4, Min> = SmallBinaryHeap::new();
739        heap.push(10);
740        heap.push(5);
741        heap.push(20);
742
743        assert_eq!(heap.peek(), Some(&5));
744
745        if let Some(mut top) = heap.peek_mut() {
746            assert_eq!(*top, 5);
747            *top = 100;
748        }
749
750        assert_eq!(heap.peek(), Some(&10));
751        assert_eq!(heap.pop(), Some(10));
752        assert_eq!(heap.pop(), Some(20));
753        assert_eq!(heap.pop(), Some(100));
754    }
755
756    #[test]
757    fn test_heap_any_storage_append() {
758        let mut h1: SmallBinaryHeap<i32, 4> = SmallBinaryHeap::new();
759        h1.push(1);
760        h1.push(10);
761
762        let mut h2: SmallBinaryHeap<i32, 4> = SmallBinaryHeap::new();
763        h2.push(5);
764        h2.push(15);
765
766        h1.append(&mut h2);
767
768        assert!(h2.is_empty());
769        assert_eq!(h1.len(), 4);
770        assert!(h1.is_on_stack());
771
772        assert_eq!(h1.pop(), Some(15));
773        assert_eq!(h1.pop(), Some(10));
774    }
775
776    #[test]
777    fn test_heap_any_storage_retain() {
778        let mut heap: SmallBinaryHeap<i32, 8> = SmallBinaryHeap::from_iter(0..10);
779        assert!(!heap.is_on_stack());
780
781        heap.retain(|&x| x % 2 == 0);
782
783        let vec = heap.into_sorted_vec();
784        assert_eq!(vec, vec![8, 6, 4, 2, 0]);
785    }
786
787    #[test]
788    fn test_heap_traits_clone() {
789        let mut h1: SmallBinaryHeap<i32, 2> = SmallBinaryHeap::new();
790        h1.push(1);
791        h1.push(2);
792
793        let h2 = h1.clone();
794        h1.push(3);
795
796        assert!(h1.len() == 3 && !h1.is_on_stack());
797        assert!(h2.len() == 2 && h2.is_on_stack());
798
799        let mut h3 = h1.clone();
800        assert!(!h3.is_on_stack());
801        assert_eq!(h3.pop(), Some(3));
802    }
803
804    #[test]
805    fn test_heap_any_storage_with_capacity() {
806        let heap: SmallBinaryHeap<i32, 2> = SmallBinaryHeap::with_capacity(10);
807        assert!(!heap.is_on_stack());
808        assert!(heap.capacity() >= 10);
809
810        let heap2: SmallBinaryHeap<i32, 4> = SmallBinaryHeap::with_capacity(2);
811        assert!(heap2.is_on_stack());
812    }
813
814    #[test]
815    fn test_heap_any_storage_reserve_shrink() {
816        let mut heap: SmallBinaryHeap<i32, 2> = SmallBinaryHeap::new();
817        heap.push(1);
818        heap.reserve(10);
819        assert!(!heap.is_on_stack());
820        assert!(heap.capacity() >= 11);
821
822        heap.shrink_to_fit();
823        assert!(heap.capacity() >= 1);
824    }
825
826    #[test]
827    fn test_heap_any_storage_clear() {
828        let mut heap: SmallBinaryHeap<i32, 2> = SmallBinaryHeap::new();
829        heap.push(1);
830        heap.push(2);
831        heap.clear();
832        assert!(heap.is_empty());
833        assert!(heap.is_on_stack());
834
835        heap.push(1);
836        heap.push(2);
837        heap.push(3); // Spill
838        heap.clear();
839        assert!(heap.is_empty());
840        assert!(!heap.is_on_stack());
841    }
842
843    #[test]
844    fn test_heap_traits_into_vec() {
845        let mut heap: SmallBinaryHeap<i32, 4> = SmallBinaryHeap::new();
846        heap.push(1);
847        heap.push(3);
848        heap.push(2);
849
850        let v = heap.clone().into_vec();
851        assert_eq!(v.len(), 3);
852        assert!(v.contains(&1));
853        assert!(v.contains(&2));
854        assert!(v.contains(&3));
855
856        let sv = heap.into_sorted_vec();
857        assert_eq!(sv, vec![3, 2, 1]);
858    }
859
860    #[test]
861    fn test_heap_traits_debug_default() {
862        let heap: SmallBinaryHeap<i32, 4> = SmallBinaryHeap::default();
863        assert!(heap.is_empty());
864
865        let mut heap = SmallBinaryHeap::<i32, 4>::new();
866        heap.push(1);
867        let debug = format!("{:?}", heap);
868        assert!(debug.contains("1"));
869    }
870
871    #[test]
872    fn test_heap_any_storage_drop_check() {
873        use std::cell::RefCell;
874        use std::rc::Rc;
875
876        struct Tracker(Rc<RefCell<i32>>);
877        impl PartialEq for Tracker {
878            fn eq(&self, _: &Self) -> bool {
879                true
880            }
881        }
882        impl Eq for Tracker {}
883        impl PartialOrd for Tracker {
884            fn partial_cmp(&self, _: &Self) -> Option<std::cmp::Ordering> {
885                Some(std::cmp::Ordering::Equal)
886            }
887        }
888        impl Ord for Tracker {
889            fn cmp(&self, _: &Self) -> std::cmp::Ordering {
890                std::cmp::Ordering::Equal
891            }
892        }
893        impl Drop for Tracker {
894            fn drop(&mut self) {
895                *self.0.borrow_mut() += 1;
896            }
897        }
898
899        let counter = Rc::new(RefCell::new(0));
900        {
901            let mut heap: SmallBinaryHeap<Tracker, 2> = SmallBinaryHeap::new();
902            heap.push(Tracker(counter.clone()));
903            heap.push(Tracker(counter.clone()));
904        }
905        assert_eq!(*counter.borrow(), 2);
906
907        *counter.borrow_mut() = 0;
908        {
909            let mut heap: SmallBinaryHeap<Tracker, 2> = SmallBinaryHeap::new();
910            heap.push(Tracker(counter.clone()));
911            heap.push(Tracker(counter.clone()));
912            heap.push(Tracker(counter.clone()));
913        }
914        assert_eq!(*counter.borrow(), 3);
915    }
916
917    #[test]
918    fn test_heap_traits_min_heap_kind() {
919        // Heap kind traits
920        let _: Reverse<i32> = <Min as HeapKind<i32>>::wrap(1);
921        assert_eq!(<Min as HeapKind<i32>>::unwrap(Reverse(1)), 1);
922        let val = 1;
923        let _ = <Min as HeapKind<i32>>::wrap_ref(&val);
924        let rev = Reverse(val);
925        assert_eq!(<Min as HeapKind<i32>>::unwrap_ref(&rev), &1);
926    }
927
928    #[test]
929    fn test_heap_any_storage_pop_empty() {
930        let mut h: SmallBinaryHeap<i32, 2> = SmallBinaryHeap::new();
931        assert_eq!(h.pop(), None);
932    }
933
934    #[test]
935    fn test_heap_any_storage_reserve_heap_side() {
936        let mut h: SmallBinaryHeap<i32, 2> = SmallBinaryHeap::new();
937        h.push(1);
938        h.push(2);
939        h.push(3); // Spill
940        h.reserve(10);
941        assert!(!h.is_on_stack());
942        assert!(h.capacity() >= 13);
943    }
944
945    #[test]
946    fn test_heap_any_storage_peek_mut_heap() {
947        let mut h: SmallBinaryHeap<i32, 2> = vec![1, 2, 3].into_iter().collect();
948        if let Some(mut top) = h.peek_mut() {
949            assert_eq!(*top, 3);
950            *top = 0;
951        }
952        assert_eq!(h.peek(), Some(&2));
953    }
954
955    #[test]
956    fn test_heap_traits_into_iter_heap() {
957        let h_into: SmallBinaryHeap<i32, 2> = vec![1, 2, 3].into_iter().collect();
958        let mut it = h_into.into_iter();
959        assert_eq!(it.size_hint(), (3, Some(3)));
960        assert_eq!(it.next(), Some(3));
961    }
962
963    #[test]
964    fn test_heap_traits_iter_any_storage() {
965        let h_iter: SmallBinaryHeap<i32, 2> = vec![1, 2, 3].into_iter().collect();
966        let mut it_ref = h_iter.iter();
967        assert_eq!(it_ref.next(), Some(&3));
968
969        let stack_h: SmallBinaryHeap<i32, 4> = vec![1, 2].into_iter().collect();
970        let mut it_stack = stack_h.iter();
971        assert_eq!(it_stack.next(), Some(&2));
972    }
973
974    #[test]
975    fn test_heap_any_storage_append_complex() {
976        let mut h_heap1: SmallBinaryHeap<i32, 2> = vec![1, 2, 3].into_iter().collect();
977        let mut h_heap2: SmallBinaryHeap<i32, 2> = vec![4, 5, 6].into_iter().collect();
978        h_heap1.append(&mut h_heap2);
979        assert_eq!(h_heap1.len(), 6);
980
981        let mut h_heap3: SmallBinaryHeap<i32, 2> = vec![1, 2, 3].into_iter().collect();
982        let mut h_stack1: SmallBinaryHeap<i32, 2> = vec![10].into_iter().collect();
983        h_heap3.append(&mut h_stack1);
984        assert_eq!(h_heap3.len(), 4);
985    }
986
987    #[test]
988    fn test_heap_any_storage_peek_mut_pop() {
989        let mut h_pop: SmallBinaryHeap<i32, 4> = vec![1, 2].into_iter().collect();
990        if let Some(p) = h_pop.peek_mut() {
991            assert_eq!(p.pop(), 2);
992        }
993        assert_eq!(h_pop.len(), 1);
994
995        let mut h_pop_heap: SmallBinaryHeap<i32, 2> = vec![1, 2, 3].into_iter().collect();
996        if let Some(p) = h_pop_heap.peek_mut() {
997            assert_eq!(p.pop(), 3);
998        }
999        assert_eq!(h_pop_heap.len(), 2);
1000    }
1001
1002    #[test]
1003    fn test_heap_any_storage_append_edge_cases() {
1004        let mut h: SmallBinaryHeap<i32, 4> = SmallBinaryHeap::new();
1005        h.push(1);
1006        assert_eq!(h.capacity(), 4);
1007
1008        let mut empty: SmallBinaryHeap<i32, 4> = SmallBinaryHeap::new();
1009        h.append(&mut empty); // append empty
1010        assert_eq!(h.len(), 1);
1011    }
1012}
1013
1014#[cfg(test)]
1015mod heap_coverage_tests {
1016    use super::*;
1017    use std::collections::BinaryHeap;
1018
1019    #[test]
1020    fn test_any_heap_binary_heap_impl() {
1021        let mut std_heap: BinaryHeap<i32> = BinaryHeap::new();
1022        let any: &mut dyn AnyHeap<i32> = &mut std_heap;
1023
1024        assert!(any.is_empty());
1025        assert_eq!(any.len(), 0);
1026
1027        any.push(10);
1028        any.push(20);
1029
1030        assert_eq!(any.len(), 2);
1031        assert!(!any.is_empty());
1032        assert_eq!(any.peek(), Some(&20));
1033        assert_eq!(any.pop(), Some(20));
1034
1035        any.clear();
1036        assert!(any.is_empty());
1037    }
1038
1039    #[test]
1040    fn test_any_heap_small_binary_heap_impl() {
1041        let mut small_heap: SmallBinaryHeap<i32, 2> = SmallBinaryHeap::new();
1042        let any: &mut dyn AnyHeap<i32> = &mut small_heap;
1043
1044        assert!(any.is_empty());
1045        assert_eq!(any.len(), 0);
1046
1047        any.push(10);
1048        any.push(20);
1049
1050        assert_eq!(any.len(), 2);
1051        assert_eq!(any.peek(), Some(&20));
1052        assert_eq!(any.pop(), Some(20));
1053
1054        any.clear();
1055        assert!(any.is_empty());
1056    }
1057
1058    #[test]
1059    fn test_heap_kind_explicit_calls() {
1060        // Max
1061        assert_eq!(<Max as HeapKind<i32>>::wrap(5), 5);
1062        assert_eq!(<Max as HeapKind<i32>>::unwrap(5), 5);
1063        let val = 5;
1064        assert_eq!(<Max as HeapKind<i32>>::wrap_ref(&val), &5);
1065        assert_eq!(<Max as HeapKind<i32>>::unwrap_ref(&val), &5);
1066
1067        // Min
1068        assert_eq!(<Min as HeapKind<i32>>::wrap(5), Reverse(5));
1069        assert_eq!(<Min as HeapKind<i32>>::unwrap(Reverse(5)), 5);
1070        let rev = Reverse(5);
1071        assert_eq!(<Min as HeapKind<i32>>::unwrap_ref(&rev), &5);
1072    }
1073
1074    #[test]
1075    fn test_append_early_return() {
1076        let mut h1: SmallBinaryHeap<i32, 2> = SmallBinaryHeap::new();
1077        let mut h2: SmallBinaryHeap<i32, 2> = SmallBinaryHeap::new();
1078
1079        h1.push(1);
1080        // append empty
1081        h1.append(&mut h2);
1082        assert_eq!(h1.len(), 1);
1083    }
1084
1085    #[test]
1086    fn test_append_heap_to_heap() {
1087        let mut h1: SmallBinaryHeap<i32, 2> = SmallBinaryHeap::with_capacity(5);
1088        h1.push(1);
1089        h1.push(2);
1090
1091        let mut h2: SmallBinaryHeap<i32, 2> = SmallBinaryHeap::with_capacity(5);
1092        h2.push(3);
1093        h2.push(4);
1094
1095        h1.append(&mut h2);
1096        assert_eq!(h1.len(), 4);
1097        assert!(h2.is_empty());
1098        assert_eq!(h1.pop(), Some(4));
1099    }
1100
1101    #[test]
1102    fn test_append_stack_to_heap() {
1103        let mut h1: SmallBinaryHeap<i32, 2> = SmallBinaryHeap::with_capacity(5);
1104        h1.push(1);
1105
1106        let mut h2: SmallBinaryHeap<i32, 2> = SmallBinaryHeap::new();
1107        h2.push(3);
1108
1109        h1.append(&mut h2);
1110        assert_eq!(h1.len(), 2);
1111        assert_eq!(h1.pop(), Some(3));
1112    }
1113
1114    #[test]
1115    fn test_into_vec_heap() {
1116        let mut heap: SmallBinaryHeap<i32, 2> = SmallBinaryHeap::with_capacity(5);
1117        heap.push(1);
1118        heap.push(2);
1119        heap.push(3);
1120
1121        let vec = heap.into_vec();
1122        assert_eq!(vec.len(), 3);
1123        assert!(vec.contains(&1));
1124        assert!(vec.contains(&2));
1125        assert!(vec.contains(&3));
1126    }
1127
1128    #[test]
1129    fn test_into_iter_ref() {
1130        let mut heap: SmallBinaryHeap<i32, 2> = SmallBinaryHeap::new();
1131        heap.push(1);
1132        heap.push(2);
1133
1134        let mut iter = (&heap).into_iter();
1135        assert_eq!(iter.next(), Some(&2));
1136        assert_eq!(iter.next(), Some(&1));
1137        assert_eq!(iter.next(), None);
1138    }
1139}