#![allow(missing_docs)]
use core::fmt;
#[cfg(feature = "trusted_len")]
use core::iter::TrustedLen;
use core::iter::{FromIterator, FusedIterator};
#[cfg(feature = "inplace_iteration")]
use core::iter::{InPlaceIterable, SourceIter};
use core::mem::{self, swap, ManuallyDrop};
use core::num::NonZeroUsize;
use core::ops::{Deref, DerefMut};
use core::ptr;
use alloc::collections::TryReserveError;
use alloc::slice;
use alloc::vec::{self, Vec};
use cfg_if::cfg_if;
use super::SpecExtend;
use crate::{
default::{OrdStoredKey, OrdTotalOrder},
SortableBy, TotalOrder,
};
#[cfg(test)]
mod tests;
pub struct BinaryHeap<T, O = OrdTotalOrder<<T as OrdStoredKey>::OrdKeyType>> {
data: Vec<T>,
order: O,
}
pub struct PeekMut<
'a,
T: 'a + SortableBy<O>,
O: TotalOrder = OrdTotalOrder<<T as OrdStoredKey>::OrdKeyType>,
> {
heap: &'a mut BinaryHeap<T, O>,
original_len: Option<NonZeroUsize>,
}
impl<T: SortableBy<O> + fmt::Debug, O: TotalOrder> fmt::Debug for PeekMut<'_, T, O> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_tuple("PeekMut").field(&self.heap.data[0]).finish()
}
}
impl<T: SortableBy<O>, O: TotalOrder> Drop for PeekMut<'_, T, O> {
fn drop(&mut self) {
if let Some(original_len) = self.original_len {
unsafe { self.heap.data.set_len(original_len.get()) };
unsafe { self.heap.sift_down(0) };
}
}
}
impl<T: SortableBy<O>, O: TotalOrder> Deref for PeekMut<'_, T, O> {
type Target = T;
fn deref(&self) -> &T {
debug_assert!(!self.heap.is_empty());
unsafe { self.heap.data.get_unchecked(0) }
}
}
impl<T: SortableBy<O>, O: TotalOrder> DerefMut for PeekMut<'_, T, O> {
fn deref_mut(&mut self) -> &mut T {
debug_assert!(!self.heap.is_empty());
let len = self.heap.len();
if len > 1 {
unsafe {
self.original_len = Some(NonZeroUsize::new_unchecked(len));
self.heap.data.set_len(1);
}
}
unsafe { self.heap.data.get_unchecked_mut(0) }
}
}
impl<'a, T: SortableBy<O>, O: TotalOrder> PeekMut<'a, T, O> {
pub fn pop(mut this: PeekMut<'a, T, O>) -> T {
if let Some(original_len) = this.original_len.take() {
unsafe { this.heap.data.set_len(original_len.get()) };
}
this.heap.pop().unwrap()
}
}
impl<T: Clone, O: Clone> Clone for BinaryHeap<T, O> {
fn clone(&self) -> Self {
BinaryHeap { data: self.data.clone(), order: self.order.clone() }
}
fn clone_from(&mut self, source: &Self) {
self.data.clone_from(&source.data);
}
}
impl<T: SortableBy<O>, O: TotalOrder + Default> Default for BinaryHeap<T, O> {
#[inline]
fn default() -> BinaryHeap<T, O> {
BinaryHeap::new(O::default())
}
}
impl<T: fmt::Debug, O> fmt::Debug for BinaryHeap<T, O> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_list().entries(self.iter()).finish()
}
}
impl<T: SortableBy<O>, O: TotalOrder> BinaryHeap<T, O> {
#[must_use]
pub fn new(order: O) -> BinaryHeap<T, O> {
BinaryHeap { data: vec![], order }
}
#[must_use]
pub fn with_capacity(order: O, capacity: usize) -> BinaryHeap<T, O> {
BinaryHeap { data: Vec::with_capacity(capacity), order }
}
pub fn peek_mut(&mut self) -> Option<PeekMut<'_, T, O>> {
if self.is_empty() { None } else { Some(PeekMut { heap: self, original_len: None }) }
}
pub fn pop(&mut self) -> Option<T> {
self.data.pop().map(|mut item| {
if !self.is_empty() {
swap(&mut item, &mut self.data[0]);
unsafe { self.sift_down_to_bottom(0) };
}
item
})
}
pub fn push(&mut self, item: T) {
let old_len = self.len();
self.data.push(item);
unsafe { self.sift_up(0, old_len) };
}
#[must_use = "`self` will be dropped if the result is not used"]
pub fn into_sorted_vec(mut self) -> Vec<T> {
let mut end = self.len();
while end > 1 {
end -= 1;
unsafe {
let ptr = self.data.as_mut_ptr();
ptr::swap(ptr, ptr.add(end));
}
unsafe { self.sift_down_range(0, end) };
}
self.into_vec()
}
unsafe fn sift_up(&mut self, start: usize, pos: usize) -> usize {
let mut hole = unsafe { Hole::new(&mut self.data, pos) };
while hole.pos() > start {
let parent = (hole.pos() - 1) / 2;
if self.order.le(hole.element(), unsafe { hole.get(parent) }) {
break;
}
unsafe { hole.move_to(parent) };
}
hole.pos()
}
unsafe fn sift_down_range(&mut self, pos: usize, end: usize) {
let mut hole = unsafe { Hole::new(&mut self.data, pos) };
let mut child = 2 * hole.pos() + 1;
while child <= end.saturating_sub(2) {
child += unsafe { self.order.le(hole.get(child), hole.get(child + 1)) } as usize;
if self.order.ge(hole.element(), unsafe { hole.get(child) }) {
return;
}
unsafe { hole.move_to(child) };
child = 2 * hole.pos() + 1;
}
if child == end - 1 && self.order.lt(hole.element(), unsafe { hole.get(child) }) {
unsafe { hole.move_to(child) };
}
}
unsafe fn sift_down(&mut self, pos: usize) {
let len = self.len();
unsafe { self.sift_down_range(pos, len) };
}
unsafe fn sift_down_to_bottom(&mut self, mut pos: usize) {
let end = self.len();
let start = pos;
let mut hole = unsafe { Hole::new(&mut self.data, pos) };
let mut child = 2 * hole.pos() + 1;
while child <= end.saturating_sub(2) {
child += unsafe { self.order.le(hole.get(child), hole.get(child + 1)) } as usize;
unsafe { hole.move_to(child) };
child = 2 * hole.pos() + 1;
}
if child == end - 1 {
unsafe { hole.move_to(child) };
}
pos = hole.pos();
drop(hole);
unsafe { self.sift_up(start, pos) };
}
fn rebuild_tail(&mut self, start: usize) {
if start == self.len() {
return;
}
let tail_len = self.len() - start;
#[inline(always)]
fn log2_fast(x: usize) -> usize {
(usize::BITS - x.leading_zeros() - 1) as usize
}
let better_to_rebuild = if start < tail_len {
true
} else if self.len() <= 2048 {
2 * self.len() < tail_len * log2_fast(start)
} else {
2 * self.len() < tail_len * 11
};
if better_to_rebuild {
self.rebuild();
} else {
for i in start..self.len() {
unsafe { self.sift_up(0, i) };
}
}
}
fn rebuild(&mut self) {
let mut n = self.len() / 2;
while n > 0 {
n -= 1;
unsafe { self.sift_down(n) };
}
}
pub fn append(&mut self, other: &mut Self) {
if self.len() < other.len() {
swap(self, other);
}
let start = self.data.len();
self.data.append(&mut other.data);
self.rebuild_tail(start);
}
#[inline]
#[cfg(feature = "binary_heap_drain_sorted")]
pub fn drain_sorted(&mut self) -> DrainSorted<'_, T, O> {
DrainSorted { inner: self }
}
#[cfg(feature = "binary_heap_retain")]
pub fn retain<F>(&mut self, mut f: F)
where
F: FnMut(&T) -> bool,
{
let mut first_removed = self.len();
let mut i = 0;
self.data.retain(|e| {
let keep = f(e);
if !keep && i < first_removed {
first_removed = i;
}
i += 1;
keep
});
self.rebuild_tail(first_removed);
}
}
impl<T, O> BinaryHeap<T, O> {
pub fn iter(&self) -> Iter<'_, T> {
Iter { iter: self.data.iter() }
}
#[cfg(feature = "binary_heap_into_iter_sorted")]
pub fn into_iter_sorted(self) -> IntoIterSorted<T, O> {
IntoIterSorted { inner: self }
}
#[must_use]
pub fn peek(&self) -> Option<&T> {
self.data.get(0)
}
#[must_use]
pub fn capacity(&self) -> usize {
self.data.capacity()
}
pub fn reserve_exact(&mut self, additional: usize) {
self.data.reserve_exact(additional);
}
pub fn reserve(&mut self, additional: usize) {
self.data.reserve(additional);
}
pub fn try_reserve_exact(&mut self, additional: usize) -> Result<(), TryReserveError> {
self.data.try_reserve_exact(additional)
}
pub fn try_reserve(&mut self, additional: usize) -> Result<(), TryReserveError> {
self.data.try_reserve(additional)
}
pub fn shrink_to_fit(&mut self) {
self.data.shrink_to_fit();
}
#[inline]
pub fn shrink_to(&mut self, min_capacity: usize) {
self.data.shrink_to(min_capacity)
}
#[must_use]
#[cfg(feature = "binary_heap_as_slice")]
pub fn as_slice(&self) -> &[T] {
self.data.as_slice()
}
#[must_use = "`self` will be dropped if the result is not used"]
pub fn into_vec(self) -> Vec<T> {
self.into()
}
#[must_use]
pub fn len(&self) -> usize {
self.data.len()
}
#[must_use]
pub fn is_empty(&self) -> bool {
self.len() == 0
}
#[inline]
pub fn drain(&mut self) -> Drain<'_, T> {
Drain { iter: self.data.drain(..) }
}
pub fn clear(&mut self) {
self.drain();
}
}
struct Hole<'a, T: 'a> {
data: &'a mut [T],
elt: ManuallyDrop<T>,
pos: usize,
}
impl<'a, T> Hole<'a, T> {
#[inline]
unsafe fn new(data: &'a mut [T], pos: usize) -> Self {
debug_assert!(pos < data.len());
let elt = unsafe { ptr::read(data.get_unchecked(pos)) };
Hole { data, elt: ManuallyDrop::new(elt), pos }
}
#[inline]
fn pos(&self) -> usize {
self.pos
}
#[inline]
fn element(&self) -> &T {
&self.elt
}
#[inline]
unsafe fn get(&self, index: usize) -> &T {
debug_assert!(index != self.pos);
debug_assert!(index < self.data.len());
unsafe { self.data.get_unchecked(index) }
}
#[inline]
unsafe fn move_to(&mut self, index: usize) {
debug_assert!(index != self.pos);
debug_assert!(index < self.data.len());
unsafe {
let ptr = self.data.as_mut_ptr();
let index_ptr: *const _ = ptr.add(index);
let hole_ptr = ptr.add(self.pos);
ptr::copy_nonoverlapping(index_ptr, hole_ptr, 1);
}
self.pos = index;
}
}
impl<T> Drop for Hole<'_, T> {
#[inline]
fn drop(&mut self) {
unsafe {
let pos = self.pos;
ptr::copy_nonoverlapping(&*self.elt, self.data.get_unchecked_mut(pos), 1);
}
}
}
#[must_use = "iterators are lazy and do nothing unless consumed"]
pub struct Iter<'a, T: 'a> {
iter: slice::Iter<'a, T>,
}
impl<T: fmt::Debug> fmt::Debug for Iter<'_, T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_tuple("Iter").field(&self.iter.as_slice()).finish()
}
}
impl<T> Clone for Iter<'_, T> {
fn clone(&self) -> Self {
Iter { iter: self.iter.clone() }
}
}
impl<'a, T> Iterator for Iter<'a, T> {
type Item = &'a T;
#[inline]
fn next(&mut self) -> Option<&'a T> {
self.iter.next()
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
self.iter.size_hint()
}
#[inline]
fn last(self) -> Option<&'a T> {
self.iter.last()
}
}
impl<'a, T> DoubleEndedIterator for Iter<'a, T> {
#[inline]
fn next_back(&mut self) -> Option<&'a T> {
self.iter.next_back()
}
}
impl<T> ExactSizeIterator for Iter<'_, T> {
#[cfg(feature = "exact_size_is_empty")]
fn is_empty(&self) -> bool {
self.iter.is_empty()
}
}
impl<T> FusedIterator for Iter<'_, T> {}
cfg_if! {
if #[cfg(not(feature = "inplace_iteration"))] {
#[derive(Clone)]
pub struct IntoIter<T> {
iter: vec::IntoIter<T>,
}
impl<T: fmt::Debug> fmt::Debug for IntoIter<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_tuple("IntoIter").field(&self.iter.as_slice()).finish()
}
}
impl<T> Iterator for IntoIter<T> {
type Item = T;
#[inline]
fn next(&mut self) -> Option<T> {
self.iter.next()
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
self.iter.size_hint()
}
}
impl<T> DoubleEndedIterator for IntoIter<T> {
#[inline]
fn next_back(&mut self) -> Option<T> {
self.iter.next_back()
}
}
impl<T> ExactSizeIterator for IntoIter<T> {
#[cfg(feature = "exact_size_is_empty")]
fn is_empty(&self) -> bool {
self.iter.is_empty()
}
}
impl<T> FusedIterator for IntoIter<T> {}
#[cfg(feature = "inplace_iteration")]
#[doc(hidden)]
unsafe impl<T> SourceIter for IntoIter<T> {
type Source = IntoIter<T>;
#[inline]
unsafe fn as_inner(&mut self) -> &mut Self::Source {
self
}
}
#[cfg(feature = "inplace_iteration")]
#[doc(hidden)]
unsafe impl<I> InPlaceIterable for IntoIter<I> {}
} else {
pub use vec::IntoIter;
}
}
#[must_use = "iterators are lazy and do nothing unless consumed"]
#[cfg(feature = "binary_heap_into_iter_sorted")]
#[derive(Clone, Debug)]
pub struct IntoIterSorted<T, O = OrdTotalOrder<<T as OrdStoredKey>::OrdKeyType>> {
inner: BinaryHeap<T, O>,
}
#[cfg(feature = "binary_heap_into_iter_sorted")]
impl<T: SortableBy<O>, O: TotalOrder> Iterator for IntoIterSorted<T, O> {
type Item = T;
#[inline]
fn next(&mut self) -> Option<T> {
self.inner.pop()
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
let exact = self.inner.len();
(exact, Some(exact))
}
}
#[cfg(feature = "binary_heap_into_iter_sorted")]
impl<T: SortableBy<O>, O: TotalOrder> ExactSizeIterator for IntoIterSorted<T, O> {}
#[cfg(feature = "binary_heap_into_iter_sorted")]
impl<T: SortableBy<O>, O: TotalOrder> FusedIterator for IntoIterSorted<T, O> {}
#[cfg(all(feature = "binary_heap_into_iter_sorted", feature = "trusted_len"))]
unsafe impl<T: SortableBy<O>, O: TotalOrder> TrustedLen for IntoIterSorted<T, O> {}
#[derive(Debug)]
pub struct Drain<'a, T: 'a> {
iter: vec::Drain<'a, T>,
}
impl<T> Iterator for Drain<'_, T> {
type Item = T;
#[inline]
fn next(&mut self) -> Option<T> {
self.iter.next()
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
self.iter.size_hint()
}
}
impl<T> DoubleEndedIterator for Drain<'_, T> {
#[inline]
fn next_back(&mut self) -> Option<T> {
self.iter.next_back()
}
}
impl<T> ExactSizeIterator for Drain<'_, T> {
#[cfg(feature = "exact_size_is_empty")]
fn is_empty(&self) -> bool {
self.iter.is_empty()
}
}
impl<T> FusedIterator for Drain<'_, T> {}
#[cfg(feature = "binary_heap_drain_sorted")]
#[derive(Debug)]
pub struct DrainSorted<
'a,
T: SortableBy<O>,
O: TotalOrder = OrdTotalOrder<<T as OrdStoredKey>::OrdKeyType>,
> {
inner: &'a mut BinaryHeap<T, O>,
}
#[cfg(feature = "binary_heap_drain_sorted")]
impl<'a, T: SortableBy<O>, O: TotalOrder> Drop for DrainSorted<'a, T, O> {
fn drop(&mut self) {
struct DropGuard<'r, 'a, T: SortableBy<O>, O: TotalOrder>(&'r mut DrainSorted<'a, T, O>);
impl<'r, 'a, T: SortableBy<O>, O: TotalOrder> Drop for DropGuard<'r, 'a, T, O> {
fn drop(&mut self) {
while self.0.inner.pop().is_some() {}
}
}
while let Some(item) = self.inner.pop() {
let guard = DropGuard(self);
drop(item);
mem::forget(guard);
}
}
}
#[cfg(feature = "binary_heap_drain_sorted")]
impl<T: SortableBy<O>, O: TotalOrder> Iterator for DrainSorted<'_, T, O> {
type Item = T;
#[inline]
fn next(&mut self) -> Option<T> {
self.inner.pop()
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
let exact = self.inner.len();
(exact, Some(exact))
}
}
#[cfg(feature = "binary_heap_drain_sorted")]
impl<T: SortableBy<O>, O: TotalOrder> ExactSizeIterator for DrainSorted<'_, T, O> {}
#[cfg(feature = "binary_heap_drain_sorted")]
impl<T: SortableBy<O>, O: TotalOrder> FusedIterator for DrainSorted<'_, T, O> {}
#[cfg(all(feature = "binary_heap_drain_sorted", feature = "trusted_len"))]
unsafe impl<T: SortableBy<O>, O: TotalOrder> TrustedLen for DrainSorted<'_, T, O> {}
impl<T: SortableBy<O>, O: TotalOrder + Default> From<Vec<T>> for BinaryHeap<T, O> {
fn from(vec: Vec<T>) -> BinaryHeap<T, O> {
let mut heap = BinaryHeap { data: vec, order: O::default() };
heap.rebuild();
heap
}
}
impl<T: SortableBy<O>, O: TotalOrder + Default, const N: usize> From<[T; N]> for BinaryHeap<T, O> {
fn from(arr: [T; N]) -> Self {
Self::from_iter(arr)
}
}
impl<T, O> From<BinaryHeap<T, O>> for Vec<T> {
fn from(heap: BinaryHeap<T, O>) -> Vec<T> {
heap.data
}
}
impl<T: SortableBy<O>, O: TotalOrder + Default> FromIterator<T> for BinaryHeap<T, O> {
fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> BinaryHeap<T, O> {
BinaryHeap::from(iter.into_iter().collect::<Vec<_>>())
}
}
impl<T, O> IntoIterator for BinaryHeap<T, O> {
type Item = T;
type IntoIter = IntoIter<T>;
fn into_iter(self) -> IntoIter<T> {
let iter = self.data.into_iter();
cfg_if! {
if #[cfg(not(feature = "inplace_iteration"))] {
IntoIter { iter }
} else {
iter
}
}
}
}
impl<'a, T, O> IntoIterator for &'a BinaryHeap<T, O> {
type Item = &'a T;
type IntoIter = Iter<'a, T>;
fn into_iter(self) -> Iter<'a, T> {
self.iter()
}
}
impl<T: SortableBy<O>, O: TotalOrder> Extend<T> for BinaryHeap<T, O> {
#[inline]
fn extend<I: IntoIterator<Item = T>>(&mut self, iter: I) {
<Self as SpecExtend<I>>::spec_extend(self, iter);
}
#[inline]
#[cfg(feature = "extend_one")]
fn extend_one(&mut self, item: T) {
self.push(item);
}
#[inline]
#[cfg(feature = "extend_one")]
fn extend_reserve(&mut self, additional: usize) {
self.reserve(additional);
}
}
cfg_if! {
if #[cfg(feature = "specialization")] {
impl<T: SortableBy<O>, O: TotalOrder, I: IntoIterator<Item = T>> SpecExtend<I> for BinaryHeap<T, O> {
default fn spec_extend(&mut self, iter: I) {
self.extend_desugared(iter.into_iter());
}
}
impl<T: SortableBy<O>, O: TotalOrder> SpecExtend<Vec<T>> for BinaryHeap<T, O> {
fn spec_extend(&mut self, ref mut other: Vec<T>) {
let start = self.data.len();
self.data.append(other);
self.rebuild_tail(start);
}
}
impl<T: SortableBy<O>, O: TotalOrder> SpecExtend<BinaryHeap<T, O>> for BinaryHeap<T, O> {
fn spec_extend(&mut self, ref mut other: BinaryHeap<T, O>) {
self.append(other);
}
}
} else {
impl<T: SortableBy<O>, O: TotalOrder, I: IntoIterator<Item = T>> SpecExtend<I> for BinaryHeap<T, O> {
fn spec_extend(&mut self, iter: I) {
self.extend_desugared(iter.into_iter());
}
}
}
}
impl<T: SortableBy<O>, O: TotalOrder> BinaryHeap<T, O> {
fn extend_desugared<I: IntoIterator<Item = T>>(&mut self, iter: I) {
let iterator = iter.into_iter();
let (lower, _) = iterator.size_hint();
self.reserve(lower);
iterator.for_each(move |elem| self.push(elem));
}
}
impl<'a, T: 'a + SortableBy<O> + Copy, O: TotalOrder> Extend<&'a T> for BinaryHeap<T, O> {
fn extend<I: IntoIterator<Item = &'a T>>(&mut self, iter: I) {
self.extend(iter.into_iter().cloned());
}
#[inline]
#[cfg(feature = "extend_one")]
fn extend_one(&mut self, &item: &'a T) {
self.push(item);
}
#[inline]
#[cfg(feature = "extend_one")]
fn extend_reserve(&mut self, additional: usize) {
self.reserve(additional);
}
}