dotreds_binary_heap_plus/
binary_heap.rs

1#![allow(clippy::needless_doctest_main)]
2#![allow(missing_docs)]
3
4use compare::Compare;
5use core::fmt;
6use core::mem::{size_of, swap};
7use core::ptr;
8use std::cmp::Ordering;
9use std::iter::FromIterator;
10use std::ops::Deref;
11use std::ops::DerefMut;
12use std::slice;
13use std::vec;
14
15pub struct BinaryHeap<T, C = MaxComparator>
16where
17    C: Compare<T>,
18{
19    pub data: Vec<T>,
20    cmp: C,
21}
22
23#[derive(Clone, Copy, Default, PartialEq, Eq, Debug)]
24pub struct MaxComparator;
25
26impl<T: Ord> Compare<T> for MaxComparator {
27    fn compare(&self, a: &T, b: &T) -> Ordering {
28        a.cmp(&b)
29    }
30}
31
32#[derive(Clone, Copy, Default, PartialEq, Eq, Debug)]
33pub struct MinComparator;
34
35impl<T: Ord> Compare<T> for MinComparator {
36    fn compare(&self, a: &T, b: &T) -> Ordering {
37        b.cmp(&a)
38    }
39}
40
41#[derive(Clone, Copy, Default, PartialEq, Eq, Debug)]
42pub struct FnComparator<F>(pub F);
43
44impl<T, F> Compare<T> for FnComparator<F>
45where
46    F: Fn(&T, &T) -> Ordering,
47{
48    fn compare(&self, a: &T, b: &T) -> Ordering {
49        self.0(a, b)
50    }
51}
52
53#[derive(Clone, Copy, Default, PartialEq, Eq, Debug)]
54pub struct KeyComparator<F>(pub F);
55
56impl<K: Ord, T, F> Compare<T> for KeyComparator<F>
57where
58    F: Fn(&T) -> K,
59{
60    fn compare(&self, a: &T, b: &T) -> Ordering {
61        self.0(a).cmp(&self.0(b))
62    }
63}
64
65pub struct PeekMut<'a, T: 'a, C: 'a + Compare<T>> {
66    heap: &'a mut BinaryHeap<T, C>,
67    sift: bool,
68}
69
70impl<'a, T: fmt::Debug, C: Compare<T>> fmt::Debug for PeekMut<'a, T, C> {
71    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
72        f.debug_tuple("PeekMut").field(&self.heap.data[0]).finish()
73    }
74}
75
76impl<'a, T, C: Compare<T>> Drop for PeekMut<'a, T, C> {
77    fn drop(&mut self) {
78        if self.sift {
79            self.heap.sift_down(0);
80        }
81    }
82}
83
84impl<'a, T, C: Compare<T>> Deref for PeekMut<'a, T, C> {
85    type Target = T;
86    fn deref(&self) -> &T {
87        &self.heap.data[0]
88    }
89}
90
91impl<'a, T, C: Compare<T>> DerefMut for PeekMut<'a, T, C> {
92    fn deref_mut(&mut self) -> &mut T {
93        self.sift = true;
94        &mut self.heap.data[0]
95    }
96}
97
98impl<'a, T, C: Compare<T>> PeekMut<'a, T, C> {
99    pub fn pop(mut this: PeekMut<'a, T, C>) -> T {
100        let value = this.heap.pop().unwrap();
101        this.sift = false;
102        value
103    }
104}
105
106impl<T: Clone, C: Compare<T> + Clone> Clone for BinaryHeap<T, C> {
107    fn clone(&self) -> Self {
108        BinaryHeap {
109            data: self.data.clone(),
110            cmp: self.cmp.clone(),
111        }
112    }
113
114    fn clone_from(&mut self, source: &Self) {
115        self.data.clone_from(&source.data);
116    }
117}
118
119impl<T: Ord> Default for BinaryHeap<T> {
120    #[inline]
121    fn default() -> BinaryHeap<T> {
122        BinaryHeap::new()
123    }
124}
125
126impl<T: fmt::Debug, C: Compare<T>> fmt::Debug for BinaryHeap<T, C> {
127    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
128        f.debug_list().entries(self.iter()).finish()
129    }
130}
131
132impl<T, C: Compare<T> + Default> BinaryHeap<T, C> {
133    pub fn from_vec(vec: Vec<T>) -> Self {
134        BinaryHeap::from_vec_cmp(vec, C::default())
135    }
136}
137
138impl<T, C: Compare<T>> BinaryHeap<T, C> {
139    pub fn from_vec_cmp(vec: Vec<T>, cmp: C) -> Self {
140        unsafe { BinaryHeap::from_vec_cmp_raw(vec, cmp, true) }
141    }
142    pub unsafe fn from_vec_cmp_raw(vec: Vec<T>, cmp: C, rebuild: bool) -> Self {
143        let mut heap = BinaryHeap { data: vec, cmp };
144        if rebuild && !heap.data.is_empty() {
145            heap.rebuild();
146        }
147        heap
148    }
149}
150
151impl<T: Ord> BinaryHeap<T> {
152    pub fn new() -> Self {
153        BinaryHeap::from_vec(vec![])
154    }
155
156    pub fn with_capacity(capacity: usize) -> Self {
157        BinaryHeap::from_vec(Vec::with_capacity(capacity))
158    }
159}
160
161impl<T: Ord> BinaryHeap<T, MinComparator> {
162    pub fn new_min() -> Self {
163        BinaryHeap::from_vec(vec![])
164    }
165
166    pub fn with_capacity_min(capacity: usize) -> Self {
167        BinaryHeap::from_vec(Vec::with_capacity(capacity))
168    }
169}
170
171impl<T, F> BinaryHeap<T, FnComparator<F>>
172where
173    F: Fn(&T, &T) -> Ordering,
174{
175    pub fn new_by(f: F) -> Self {
176        BinaryHeap::from_vec_cmp(vec![], FnComparator(f))
177    }
178
179    pub fn with_capacity_by(capacity: usize, f: F) -> Self {
180        BinaryHeap::from_vec_cmp(Vec::with_capacity(capacity), FnComparator(f))
181    }
182}
183
184impl<T, F, K: Ord> BinaryHeap<T, KeyComparator<F>>
185where
186    F: Fn(&T) -> K,
187{
188    pub fn new_by_key(f: F) -> Self {
189        BinaryHeap::from_vec_cmp(vec![], KeyComparator(f))
190    }
191
192    pub fn with_capacity_by_key(capacity: usize, f: F) -> Self {
193        BinaryHeap::from_vec_cmp(Vec::with_capacity(capacity), KeyComparator(f))
194    }
195}
196
197impl<T, C: Compare<T>> BinaryHeap<T, C> {
198    #[inline]
199    pub fn replace_cmp(&mut self, cmp: C) {
200        unsafe {
201            self.replace_cmp_raw(cmp, true);
202        }
203    }
204
205    pub unsafe fn replace_cmp_raw(&mut self, cmp: C, rebuild: bool) {
206        self.cmp = cmp;
207        if rebuild && !self.data.is_empty() {
208            self.rebuild();
209        }
210    }
211
212    pub fn iter(&self) -> Iter<T> {
213        Iter {
214            iter: self.data.iter(),
215        }
216    }
217
218    pub fn into_iter_sorted(self) -> IntoIterSorted<T, C> {
219        IntoIterSorted { inner: self }
220    }
221
222    pub fn peek(&self) -> Option<&T> {
223        self.data.get(0)
224    }
225
226    pub fn peek_mut(&mut self) -> Option<PeekMut<T, C>> {
227        if self.is_empty() {
228            None
229        } else {
230            Some(PeekMut {
231                heap: self,
232                sift: false,
233            })
234        }
235    }
236
237    pub fn capacity(&self) -> usize {
238        self.data.capacity()
239    }
240
241    pub fn reserve(&mut self, additional: usize) {
242        self.data.reserve(additional);
243    }
244
245    pub fn shrink_to_fit(&mut self) {
246        self.data.shrink_to_fit();
247    }
248
249    pub fn pop(&mut self) -> Option<T> {
250        self.data.pop().map(|mut item| {
251            if !self.is_empty() {
252                swap(&mut item, &mut self.data[0]);
253                self.sift_down_to_bottom(0);
254            }
255            item
256        })
257    }
258
259    pub fn push(&mut self, item: T) {
260        let old_len = self.len();
261        self.data.push(item);
262        self.sift_up(0, old_len);
263    }
264
265    pub fn into_vec(self) -> Vec<T> {
266        self.into()
267    }
268
269    pub fn into_sorted_vec(mut self) -> Vec<T> {
270        let mut end = self.len();
271        while end > 1 {
272            end -= 1;
273            unsafe {
274                let ptr = self.data.as_mut_ptr();
275                ptr::swap(ptr, ptr.add(end));
276            }
277            self.sift_down_range(0, end);
278        }
279        self.into_vec()
280    }
281
282    // The implementations of sift_up and sift_down use unsafe blocks in
283    // order to move an element out of the vector (leaving behind a
284    // hole), shift along the others and move the removed element back into the
285    // vector at the final location of the hole.
286    // The `Hole` type is used to represent this, and make sure
287    // the hole is filled back at the end of its scope, even on panic.
288    // Using a hole reduces the constant factor compared to using swaps,
289    // which involves twice as many moves.
290    pub fn sift_up(&mut self, start: usize, pos: usize) -> usize {
291        unsafe {
292            // Take out the value at `pos` and create a hole.
293            let mut hole = Hole::new(&mut self.data, pos);
294
295            while hole.pos() > start {
296                let parent = (hole.pos() - 1) / 2;
297                // if hole.element() <= hole.get(parent) {
298                if self.cmp.compare(hole.element(), hole.get(parent)) != Ordering::Greater {
299                    break;
300                }
301                hole.move_to(parent);
302            }
303            hole.pos()
304        }
305    }
306
307    /// Take an element at `pos` and move it down the heap,
308    /// while its children are larger.
309    pub fn sift_down_range(&mut self, pos: usize, end: usize) {
310        unsafe {
311            let mut hole = Hole::new(&mut self.data, pos);
312            let mut child = 2 * pos + 1;
313            while child < end - 1 {
314                // compare with the greater of the two children
315                // if !(hole.get(child) > hole.get(child + 1)) { child += 1 }
316                child += (self.cmp.compare(hole.get(child), hole.get(child + 1))
317                    != Ordering::Greater) as usize;
318                // if we are already in order, stop.
319                // if hole.element() >= hole.get(child) {
320                if self.cmp.compare(hole.element(), hole.get(child)) != Ordering::Less {
321                    return;
322                }
323                hole.move_to(child);
324                child = 2 * hole.pos() + 1;
325            }
326            if child == end - 1
327                && self.cmp.compare(hole.element(), hole.get(child)) == Ordering::Less
328            {
329                hole.move_to(child);
330            }
331        }
332    }
333
334    pub fn sift_down(&mut self, pos: usize) {
335        let len = self.len();
336        self.sift_down_range(pos, len);
337    }
338
339    /// Take an element at `pos` and move it all the way down the heap,
340    /// then sift it up to its position.
341    ///
342    /// Note: This is faster when the element is known to be large / should
343    /// be closer to the bottom.
344    pub fn sift_down_to_bottom(&mut self, mut pos: usize) {
345        let end = self.len();
346        let start = pos;
347        unsafe {
348            let mut hole = Hole::new(&mut self.data, pos);
349            let mut child = 2 * pos + 1;
350            while child < end - 1 {
351                let right = child + 1;
352                // compare with the greater of the two children
353                // if !(hole.get(child) > hole.get(right)) { child += 1 }
354                child += (self.cmp.compare(hole.get(child), hole.get(right)) != Ordering::Greater)
355                    as usize;
356                hole.move_to(child);
357                child = 2 * hole.pos() + 1;
358            }
359            if child == end - 1 {
360                hole.move_to(child);
361            }
362            pos = hole.pos;
363        }
364        self.sift_up(start, pos);
365    }
366
367    pub fn len(&self) -> usize {
368        self.data.len()
369    }
370
371    pub fn is_empty(&self) -> bool {
372        self.len() == 0
373    }
374
375    #[inline]
376    pub fn drain(&mut self) -> Drain<T> {
377        Drain {
378            iter: self.data.drain(..),
379        }
380    }
381
382    pub fn clear(&mut self) {
383        self.drain();
384    }
385
386    fn rebuild(&mut self) {
387        let mut n = self.len() / 2;
388        while n > 0 {
389            n -= 1;
390            self.sift_down(n);
391        }
392    }
393
394    pub fn append(&mut self, other: &mut Self) {
395        if self.len() < other.len() {
396            swap(self, other);
397        }
398
399        if other.is_empty() {
400            return;
401        }
402
403        #[inline(always)]
404        fn log2_fast(x: usize) -> usize {
405            8 * size_of::<usize>() - (x.leading_zeros() as usize) - 1
406        }
407
408        // `rebuild` takes O(len1 + len2) operations
409        // and about 2 * (len1 + len2) comparisons in the worst case
410        // while `extend` takes O(len2 * log_2(len1)) operations
411        // and about 1 * len2 * log_2(len1) comparisons in the worst case,
412        // assuming len1 >= len2.
413        #[inline]
414        fn better_to_rebuild(len1: usize, len2: usize) -> bool {
415            2 * (len1 + len2) < len2 * log2_fast(len1)
416        }
417
418        if better_to_rebuild(self.len(), other.len()) {
419            self.data.append(&mut other.data);
420            self.rebuild();
421        } else {
422            self.extend(other.drain());
423        }
424    }
425}
426
427/// Hole represents a hole in a slice i.e. an index without valid value
428/// (because it was moved from or duplicated).
429/// In drop, `Hole` will restore the slice by filling the hole
430/// position with the value that was originally removed.
431struct Hole<'a, T: 'a> {
432    data: &'a mut [T],
433    /// `elt` is always `Some` from new until drop.
434    elt: Option<T>,
435    pos: usize,
436}
437
438impl<'a, T> Hole<'a, T> {
439    /// Create a new Hole at index `pos`.
440    ///
441    /// Unsafe because pos must be within the data slice.
442    #[inline]
443    unsafe fn new(data: &'a mut [T], pos: usize) -> Self {
444        debug_assert!(pos < data.len());
445        let elt = ptr::read(&data[pos]);
446        Hole {
447            data,
448            elt: Some(elt),
449            pos,
450        }
451    }
452
453    #[inline]
454    fn pos(&self) -> usize {
455        self.pos
456    }
457
458    /// Returns a reference to the element removed.
459    #[inline]
460    fn element(&self) -> &T {
461        self.elt.as_ref().unwrap()
462    }
463
464    /// Returns a reference to the element at `index`.
465    ///
466    /// Unsafe because index must be within the data slice and not equal to pos.
467    #[inline]
468    unsafe fn get(&self, index: usize) -> &T {
469        debug_assert!(index != self.pos);
470        debug_assert!(index < self.data.len());
471        self.data.get_unchecked(index)
472    }
473
474    /// Move hole to new location
475    ///
476    /// Unsafe because index must be within the data slice and not equal to pos.
477    #[inline]
478    unsafe fn move_to(&mut self, index: usize) {
479        debug_assert!(index != self.pos);
480        debug_assert!(index < self.data.len());
481        let index_ptr: *const _ = self.data.get_unchecked(index);
482        let hole_ptr = self.data.get_unchecked_mut(self.pos);
483        ptr::copy_nonoverlapping(index_ptr, hole_ptr, 1);
484        self.pos = index;
485    }
486}
487
488impl<'a, T> Drop for Hole<'a, T> {
489    #[inline]
490    fn drop(&mut self) {
491        // fill the hole again
492        unsafe {
493            let pos = self.pos;
494            ptr::write(self.data.get_unchecked_mut(pos), self.elt.take().unwrap());
495        }
496    }
497}
498
499pub struct Iter<'a, T: 'a> {
500    iter: slice::Iter<'a, T>,
501}
502
503// #[stable(feature = "collection_debug", since = "1.17.0")]
504impl<'a, T: 'a + fmt::Debug> fmt::Debug for Iter<'a, T> {
505    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
506        f.debug_tuple("Iter").field(&self.iter.as_slice()).finish()
507    }
508}
509
510// FIXME(#26925) Remove in favor of `#[derive(Clone)]`
511// #[stable(feature = "rust1", since = "1.0.0")]
512impl<'a, T> Clone for Iter<'a, T> {
513    fn clone(&self) -> Iter<'a, T> {
514        Iter {
515            iter: self.iter.clone(),
516        }
517    }
518}
519
520// #[stable(feature = "rust1", since = "1.0.0")]
521impl<'a, T> Iterator for Iter<'a, T> {
522    type Item = &'a T;
523
524    #[inline]
525    fn next(&mut self) -> Option<&'a T> {
526        self.iter.next()
527    }
528
529    #[inline]
530    fn size_hint(&self) -> (usize, Option<usize>) {
531        self.iter.size_hint()
532    }
533}
534
535// #[stable(feature = "rust1", since = "1.0.0")]
536impl<'a, T> DoubleEndedIterator for Iter<'a, T> {
537    #[inline]
538    fn next_back(&mut self) -> Option<&'a T> {
539        self.iter.next_back()
540    }
541}
542
543// #[stable(feature = "rust1", since = "1.0.0")]
544// impl<'a, T> ExactSizeIterator for Iter<'a, T> {
545//     fn is_empty(&self) -> bool {
546//         self.iter.is_empty()
547//     }
548// }
549
550// #[stable(feature = "fused", since = "1.26.0")]
551// impl<'a, T> FusedIterator for Iter<'a, T> {}
552
553/// An owning iterator over the elements of a `BinaryHeap`.
554///
555/// This `struct` is created by the [`into_iter`] method on [`BinaryHeap`][`BinaryHeap`]
556/// (provided by the `IntoIterator` trait). See its documentation for more.
557///
558/// [`into_iter`]: struct.BinaryHeap.html#method.into_iter
559/// [`BinaryHeap`]: struct.BinaryHeap.html
560// #[stable(feature = "rust1", since = "1.0.0")]
561#[derive(Clone)]
562pub struct IntoIter<T> {
563    iter: vec::IntoIter<T>,
564}
565
566// #[stable(feature = "collection_debug", since = "1.17.0")]
567impl<T: fmt::Debug> fmt::Debug for IntoIter<T> {
568    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
569        f.debug_tuple("IntoIter")
570            .field(&self.iter.as_slice())
571            .finish()
572    }
573}
574
575// #[stable(feature = "rust1", since = "1.0.0")]
576impl<T> Iterator for IntoIter<T> {
577    type Item = T;
578
579    #[inline]
580    fn next(&mut self) -> Option<T> {
581        self.iter.next()
582    }
583
584    #[inline]
585    fn size_hint(&self) -> (usize, Option<usize>) {
586        self.iter.size_hint()
587    }
588}
589
590// #[stable(feature = "rust1", since = "1.0.0")]
591impl<T> DoubleEndedIterator for IntoIter<T> {
592    #[inline]
593    fn next_back(&mut self) -> Option<T> {
594        self.iter.next_back()
595    }
596}
597
598// #[stable(feature = "rust1", since = "1.0.0")]
599// impl<T> ExactSizeIterator for IntoIter<T> {
600//     fn is_empty(&self) -> bool {
601//         self.iter.is_empty()
602//     }
603// }
604
605// #[stable(feature = "fused", since = "1.26.0")]
606// impl<T> FusedIterator for IntoIter<T> {}
607
608// #[unstable(feature = "binary_heap_into_iter_sorted", issue = "59278")]
609#[derive(Clone, Debug)]
610pub struct IntoIterSorted<T, C: Compare<T>> {
611    inner: BinaryHeap<T, C>,
612}
613
614// #[unstable(feature = "binary_heap_into_iter_sorted", issue = "59278")]
615impl<T, C: Compare<T>> Iterator for IntoIterSorted<T, C> {
616    type Item = T;
617
618    #[inline]
619    fn next(&mut self) -> Option<T> {
620        self.inner.pop()
621    }
622
623    #[inline]
624    fn size_hint(&self) -> (usize, Option<usize>) {
625        let exact = self.inner.len();
626        (exact, Some(exact))
627    }
628}
629
630/// A draining iterator over the elements of a `BinaryHeap`.
631///
632/// This `struct` is created by the [`drain`] method on [`BinaryHeap`]. See its
633/// documentation for more.
634///
635/// [`drain`]: struct.BinaryHeap.html#method.drain
636/// [`BinaryHeap`]: struct.BinaryHeap.html
637// #[stable(feature = "drain", since = "1.6.0")]
638// #[derive(Debug)]
639pub struct Drain<'a, T: 'a> {
640    iter: vec::Drain<'a, T>,
641}
642
643// #[stable(feature = "drain", since = "1.6.0")]
644impl<'a, T: 'a> Iterator for Drain<'a, T> {
645    type Item = T;
646
647    #[inline]
648    fn next(&mut self) -> Option<T> {
649        self.iter.next()
650    }
651
652    #[inline]
653    fn size_hint(&self) -> (usize, Option<usize>) {
654        self.iter.size_hint()
655    }
656}
657
658// #[stable(feature = "drain", since = "1.6.0")]
659impl<'a, T: 'a> DoubleEndedIterator for Drain<'a, T> {
660    #[inline]
661    fn next_back(&mut self) -> Option<T> {
662        self.iter.next_back()
663    }
664}
665
666// #[stable(feature = "drain", since = "1.6.0")]
667// impl<'a, T: 'a> ExactSizeIterator for Drain<'a, T> {
668//     fn is_empty(&self) -> bool {
669//         self.iter.is_empty()
670//     }
671// }
672
673// #[stable(feature = "fused", since = "1.26.0")]
674// impl<'a, T: 'a> FusedIterator for Drain<'a, T> {}
675
676// #[stable(feature = "binary_heap_extras_15", since = "1.5.0")]
677impl<T: Ord> From<Vec<T>> for BinaryHeap<T> {
678    /// creates a max heap from a vec
679    fn from(vec: Vec<T>) -> Self {
680        BinaryHeap::from_vec(vec)
681    }
682}
683
684// #[stable(feature = "binary_heap_extras_15", since = "1.5.0")]
685// impl<T, C: Compare<T>> From<BinaryHeap<T, C>> for Vec<T> {
686//     fn from(heap: BinaryHeap<T, C>) -> Vec<T> {
687//         heap.data
688//     }
689// }
690
691impl<T, C: Compare<T>> Into<Vec<T>> for BinaryHeap<T, C> {
692    fn into(self) -> Vec<T> {
693        self.data
694    }
695}
696
697// #[stable(feature = "rust1", since = "1.0.0")]
698impl<T: Ord> FromIterator<T> for BinaryHeap<T> {
699    fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> Self {
700        BinaryHeap::from(iter.into_iter().collect::<Vec<_>>())
701    }
702}
703
704// #[stable(feature = "rust1", since = "1.0.0")]
705impl<T, C: Compare<T>> IntoIterator for BinaryHeap<T, C> {
706    type Item = T;
707    type IntoIter = IntoIter<T>;
708
709    fn into_iter(self) -> IntoIter<T> {
710        IntoIter {
711            iter: self.data.into_iter(),
712        }
713    }
714}
715
716// #[stable(feature = "rust1", since = "1.0.0")]
717impl<'a, T, C: Compare<T>> IntoIterator for &'a BinaryHeap<T, C> {
718    type Item = &'a T;
719    type IntoIter = Iter<'a, T>;
720
721    fn into_iter(self) -> Iter<'a, T> {
722        self.iter()
723    }
724}
725
726// #[stable(feature = "rust1", since = "1.0.0")]
727impl<T, C: Compare<T>> Extend<T> for BinaryHeap<T, C> {
728    #[inline]
729    fn extend<I: IntoIterator<Item = T>>(&mut self, iter: I) {
730        // <Self as SpecExtend<I>>::spec_extend(self, iter);
731        self.extend_desugared(iter);
732    }
733}
734
735impl<T, C: Compare<T>> BinaryHeap<T, C> {
736    fn extend_desugared<I: IntoIterator<Item = T>>(&mut self, iter: I) {
737        let iterator = iter.into_iter();
738        let (lower, _) = iterator.size_hint();
739
740        self.reserve(lower);
741
742        for elem in iterator {
743            self.push(elem);
744        }
745    }
746}
747
748// #[stable(feature = "extend_ref", since = "1.2.0")]
749impl<'a, T: 'a + Copy, C: Compare<T>> Extend<&'a T> for BinaryHeap<T, C> {
750    fn extend<I: IntoIterator<Item = &'a T>>(&mut self, iter: I) {
751        self.extend(iter.into_iter().cloned());
752    }
753}