#![deny(unsafe_op_in_unsafe_fn)]
use std::cmp::{min, Ordering};
use std::collections::HashMap;
use std::hash::Hash;
use std::marker::PhantomData;
use compare::Compare;
use core::fmt;
use core::mem::{swap, ManuallyDrop};
use core::ptr;
#[cfg(feature = "serde")]
use serde::{
de::{self, MapAccess, SeqAccess, Visitor},
ser::SerializeStruct,
Deserialize, Deserializer, Serialize, Serializer,
};
use std::ops::Deref;
use std::ops::DerefMut;
use std::vec;
pub struct BinaryHeap<K, T, C = MaxComparator> {
data: Vec<(K, T)>,
cmp: C,
keys: HashMap<K, usize>,
_not_sync: PhantomData<std::cell::Cell<()>>,
}
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[derive(Clone, Copy, Default, PartialEq, Eq, Debug)]
pub struct MaxComparator;
impl<T: Ord> Compare<T> for MaxComparator {
fn compare(&self, a: &T, b: &T) -> Ordering {
a.cmp(b)
}
}
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[derive(Clone, Copy, Default, PartialEq, Eq, Debug)]
pub struct MinComparator;
impl<T: Ord> Compare<T> for MinComparator {
fn compare(&self, a: &T, b: &T) -> Ordering {
b.cmp(a)
}
}
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[derive(Clone, Copy, Default, PartialEq, Eq, Debug)]
pub struct FnComparator<F>(pub F);
impl<T, F> Compare<T> for FnComparator<F>
where
F: Fn(&T, &T) -> Ordering,
{
fn compare(&self, a: &T, b: &T) -> Ordering {
self.0(a, b)
}
}
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
#[derive(Clone, Copy, Default, PartialEq, Eq, Debug)]
pub struct KeyComparator<F>(pub F);
impl<K: Ord, T, F> Compare<T> for KeyComparator<F>
where
F: Fn(&T) -> K,
{
fn compare(&self, a: &T, b: &T) -> Ordering {
self.0(a).cmp(&self.0(b))
}
}
pub struct PeekMut<'a, K: Hash + Eq, T: 'a, C: 'a + Compare<T>> {
heap: &'a mut BinaryHeap<K, T, C>,
sift: bool,
}
impl<K: fmt::Debug + Hash + Eq, T: fmt::Debug, C: Compare<T>> fmt::Debug for PeekMut<'_, K, T, C> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_tuple("PeekMut").field(&self.heap.data[0]).finish()
}
}
impl<K: Hash + Eq, T, C: Compare<T>> Drop for PeekMut<'_, K, T, C> {
fn drop(&mut self) {
if self.sift {
unsafe { self.heap.sift_down(0) };
}
}
}
impl<K: Hash + Eq, T, C: Compare<T>> Deref for PeekMut<'_, K, T, C> {
type Target = T;
fn deref(&self) -> &T {
self.key_value().1
}
}
impl<K: Hash + Eq, T, C: Compare<T>> DerefMut for PeekMut<'_, K, T, C> {
fn deref_mut(&mut self) -> &mut T {
self.key_value_mut().1
}
}
impl<K: Hash + Eq, T, C: Compare<T>> PeekMut<'_, K, T, C> {
pub fn key(&self) -> &K {
debug_assert!(!self.heap.is_empty());
let key_value = unsafe { self.heap.data.get_unchecked(0) };
&key_value.0
}
pub fn key_value(&self) -> (&K, &T) {
debug_assert!(!self.heap.is_empty());
let key_value = unsafe { self.heap.data.get_unchecked(0) };
(&key_value.0, &key_value.1)
}
pub fn key_value_mut(&mut self) -> (&mut K, &mut T) {
debug_assert!(!self.heap.is_empty());
self.sift = true;
let key_value = unsafe { self.heap.data.get_unchecked_mut(0) };
(&mut key_value.0, &mut key_value.1)
}
pub fn pop(mut self) -> T {
let value = self.heap.pop().unwrap();
self.sift = false;
value
}
pub fn pop_with_key(mut self) -> (K, T) {
let key_value = self.heap.pop_with_key().unwrap();
self.sift = false;
key_value
}
}
pub struct RefMut<'a, K: 'a + Hash + Eq, T: 'a, C: 'a + Compare<T>> {
heap: &'a mut BinaryHeap<K, T, C>,
pos: usize,
key: &'a K,
}
impl<K: fmt::Debug + Hash + Eq, T: fmt::Debug, C: Compare<T>> fmt::Debug for RefMut<'_, K, T, C> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_tuple("RefMut")
.field(&self.key)
.field(&self.heap.data.get(self.pos))
.finish()
}
}
impl<K: Hash + Eq, T, C: Compare<T>> Drop for RefMut<'_, K, T, C> {
fn drop(&mut self) {
self.heap.update(self.key);
}
}
impl<K: Hash + Eq, T, C: Compare<T>> Deref for RefMut<'_, K, T, C> {
type Target = T;
fn deref(&self) -> &T {
&self.heap.data[self.pos].1
}
}
impl<K: Hash + Eq, T, C: Compare<T>> DerefMut for RefMut<'_, K, T, C> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.heap.data[self.pos].1
}
}
impl<K: Hash + Eq, T, C: Compare<T>> RefMut<'_, K, T, C> {
pub fn key(&self) -> &K {
self.key
}
pub fn key_value(&self) -> (&K, &T) {
(self.key, self)
}
pub fn key_value_mut(&mut self) -> (&K, &mut T) {
(self.key, self)
}
}
impl<K: Clone, T: Clone, C: Clone> Clone for BinaryHeap<K, T, C> {
fn clone(&self) -> Self {
BinaryHeap {
data: self.data.clone(),
cmp: self.cmp.clone(),
keys: self.keys.clone(),
_not_sync: PhantomData::default(),
}
}
fn clone_from(&mut self, source: &Self) {
self.data.clone_from(&source.data);
self.keys.clone_from(&source.keys);
self.cmp = source.cmp.clone();
}
}
impl<K: Hash + Eq, T, C: Compare<T> + Default> Default for BinaryHeap<K, T, C> {
#[inline]
fn default() -> BinaryHeap<K, T, C> {
BinaryHeap::new()
}
}
impl<K: fmt::Debug, T: fmt::Debug, C> fmt::Debug for BinaryHeap<K, T, C> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_list().entries(self.iter()).finish()
}
}
impl<K: Hash + Eq, T, C: Compare<T> + Default> BinaryHeap<K, T, C> {
#[must_use]
pub fn new() -> Self {
unsafe { BinaryHeap::new_from_data_raw(Vec::new(), HashMap::new(), C::default(), false) }
}
#[must_use]
pub fn with_capacity(capacity: usize) -> Self {
unsafe {
BinaryHeap::new_from_data_raw(
Vec::with_capacity(capacity),
HashMap::with_capacity(capacity),
C::default(),
false,
)
}
}
}
impl<K: Hash + Eq + Clone, T, C: Compare<T> + Default> BinaryHeap<K, T, C> {
pub fn from<I: IntoIterator<Item = T>, F: Fn(&T) -> K>(values: I, key_selector: F) -> Self {
values
.into_iter()
.map(|value| (key_selector(&value), value))
.collect()
}
}
impl<K: Hash + Eq, T, C: Compare<T>> BinaryHeap<K, T, C> {
#[must_use]
#[doc(hidden)]
pub unsafe fn new_from_data_raw(
data: Vec<(K, T)>,
keys: HashMap<K, usize>,
cmp: C,
rebuild: bool,
) -> Self {
let mut heap = BinaryHeap {
data,
cmp,
keys,
_not_sync: PhantomData::default(),
};
debug_assert!(heap.data.len() == heap.keys.len());
if rebuild && !heap.data.is_empty() {
heap.rebuild();
}
heap
}
}
impl<K: Hash + Eq, T: Ord> BinaryHeap<K, T, MinComparator> {
#[must_use]
pub fn new_min() -> Self {
unsafe { BinaryHeap::new_from_data_raw(Vec::new(), HashMap::new(), MinComparator, false) }
}
#[must_use]
pub fn with_capacity_min(capacity: usize) -> Self {
unsafe {
BinaryHeap::new_from_data_raw(
Vec::with_capacity(capacity),
HashMap::with_capacity(capacity),
MinComparator,
false,
)
}
}
}
impl<K: Hash + Eq, T, F> BinaryHeap<K, T, FnComparator<F>>
where
F: Fn(&T, &T) -> Ordering,
{
#[must_use]
pub fn new_by(f: F) -> Self {
unsafe { BinaryHeap::new_from_data_raw(Vec::new(), HashMap::new(), FnComparator(f), false) }
}
#[must_use]
pub fn with_capacity_by(capacity: usize, f: F) -> Self {
unsafe {
BinaryHeap::new_from_data_raw(
Vec::with_capacity(capacity),
HashMap::with_capacity(capacity),
FnComparator(f),
false,
)
}
}
}
impl<K: Hash + Eq, T, F, C: Ord> BinaryHeap<K, T, KeyComparator<F>>
where
F: Fn(&T) -> C,
{
#[must_use]
pub fn new_by_sort_key(f: F) -> Self {
unsafe {
BinaryHeap::new_from_data_raw(Vec::new(), HashMap::new(), KeyComparator(f), false)
}
}
#[must_use]
pub fn with_capacity_by_sort_key(capacity: usize, f: F) -> Self {
unsafe {
BinaryHeap::new_from_data_raw(
Vec::with_capacity(capacity),
HashMap::with_capacity(capacity),
KeyComparator(f),
false,
)
}
}
}
impl<K: Hash + Eq + Clone, T, C: Compare<T>> BinaryHeap<K, T, C> {
pub fn push(&mut self, key: K, item: T) -> Option<T> {
if let Some(pos) = self.keys.get(&key).copied() {
let mut old = std::mem::replace(&mut self.data[pos], (key, item));
std::mem::swap(&mut old.0, &mut self.data[pos].0);
self.update(&old.0);
Some(old.1)
} else {
let old_len = self.len();
self.data.push((key.clone(), item));
self.keys.insert(key, old_len);
unsafe { self.sift_up(0, old_len) };
None
}
}
}
impl<K: Hash + Eq, T, C: Compare<T>> BinaryHeap<K, T, C> {
pub fn peek_mut(&mut self) -> Option<PeekMut<'_, K, T, C>> {
if self.is_empty() {
None
} else {
Some(PeekMut {
heap: self,
sift: false,
})
}
}
pub fn pop(&mut self) -> Option<T> {
self.pop_with_key().map(|kv| kv.1)
}
pub fn pop_with_key(&mut self) -> Option<(K, T)> {
let item = self.data.pop().map(|mut item| {
if !self.data.is_empty() {
swap(&mut item, &mut self.data[0]);
unsafe { self.sift_down_to_bottom(0) };
}
item
});
item.as_ref().and_then(|kv| self.keys.remove(&kv.0));
item
}
pub fn contains_key(&self, key: &K) -> bool {
self.keys.contains_key(key)
}
pub fn get(&self, key: &K) -> Option<&T> {
self.keys.get(key).map(|index| &self.data[*index].1)
}
pub fn get_mut<'a>(&'a mut self, key: &'a K) -> Option<RefMut<'a, K, T, C>> {
self.keys.get(key).copied().map(|pos| RefMut {
heap: self,
pos,
key,
})
}
pub fn remove(&mut self, key: &K) -> Option<(K, T)> {
if let Some(pos) = self.keys.get(key).copied() {
let item = self.data.pop().map(|mut item| {
if !self.data.is_empty() && pos < self.data.len() {
swap(&mut item, &mut self.data[pos]);
unsafe { self.sift_down_to_bottom(pos) };
}
item
});
item.as_ref().and_then(|kv| self.keys.remove(&kv.0));
item
} else {
None
}
}
#[doc(hidden)]
pub fn update(&mut self, key: &K) {
let pos = self.keys[key];
let pos_after_sift_up = unsafe { self.sift_up(0, pos) };
if pos_after_sift_up != pos {
return;
}
unsafe {
self.sift_down(pos);
}
}
pub fn reserve(&mut self, additional: usize) {
self.data.reserve(additional);
self.keys.reserve(additional);
}
pub fn shrink_to_fit(&mut self) {
self.data.shrink_to_fit();
self.keys.shrink_to_fit();
}
#[inline]
pub fn shrink_to(&mut self, min_capacity: usize) {
self.data.shrink_to(min_capacity);
self.keys.shrink_to(min_capacity);
}
unsafe fn sift_up(&mut self, start: usize, pos: usize) -> usize {
let mut hole = unsafe { Hole::new(&mut self.data, &mut self.keys, pos) };
while hole.pos() > start {
let parent = (hole.pos() - 1) / 2;
if self
.cmp
.compares_le(hole.element(), unsafe { hole.get(parent) })
{
break;
}
unsafe { hole.move_to(parent) };
}
hole.pos()
}
unsafe fn sift_down_range(&mut self, pos: usize, end: usize) {
let mut hole = unsafe { Hole::new(&mut self.data, &mut self.keys, pos) };
let mut child = 2 * hole.pos() + 1;
while child <= end.saturating_sub(2) {
child += unsafe { self.cmp.compares_le(hole.get(child), hole.get(child + 1)) } as usize;
if self
.cmp
.compares_ge(hole.element(), unsafe { hole.get(child) })
{
return;
}
unsafe { hole.move_to(child) };
child = 2 * hole.pos() + 1;
}
if child == end - 1
&& self
.cmp
.compares_lt(hole.element(), unsafe { hole.get(child) })
{
unsafe { hole.move_to(child) };
}
}
unsafe fn sift_down(&mut self, pos: usize) {
let len = self.data.len();
unsafe { self.sift_down_range(pos, len) };
}
unsafe fn sift_down_to_bottom(&mut self, mut pos: usize) {
let end = self.data.len();
let start = pos;
let mut hole = unsafe { Hole::new(&mut self.data, &mut self.keys, pos) };
let mut child = 2 * hole.pos() + 1;
while child <= end.saturating_sub(2) {
child += unsafe { self.cmp.compares_le(hole.get(child), hole.get(child + 1)) } as usize;
unsafe { hole.move_to(child) };
child = 2 * hole.pos() + 1;
}
if child == end - 1 {
unsafe { hole.move_to(child) };
}
pos = hole.pos();
drop(hole);
unsafe { self.sift_up(start, pos) };
}
#[allow(dead_code)] fn rebuild_tail(&mut self, start: usize) {
if start == self.len() {
return;
}
let tail_len = self.len() - start;
#[inline(always)]
fn log2_fast(x: usize) -> usize {
(usize::BITS - x.leading_zeros() - 1) as usize
}
let better_to_rebuild = if start < tail_len {
true
} else if self.data.len() <= 2048 {
2 * self.data.len() < tail_len * log2_fast(start)
} else {
2 * self.data.len() < tail_len * 11
};
if better_to_rebuild {
self.rebuild();
} else {
for i in start..self.data.len() {
unsafe { self.sift_up(0, i) };
}
}
}
fn rebuild(&mut self) {
let mut n = self.len() / 2;
while n > 0 {
n -= 1;
unsafe { self.sift_down(n) };
}
}
}
impl<K, T, C> BinaryHeap<K, T, C> {
pub fn iter(&self) -> Iter<'_, K, T> {
Iter {
iter: self.data.iter(),
}
}
pub fn iter_values(&self) -> IterValues<'_, K, T> {
IterValues {
iter: self.data.iter(),
}
}
pub fn iter_keys(&self) -> IterKeys<'_, K, T> {
IterKeys {
iter: self.data.iter(),
}
}
pub fn into_values(self) -> IntoValues<K, T> {
IntoValues {
iter: self.data.into_iter(),
}
}
pub fn into_keys(self) -> IntoKeys<K, T> {
IntoKeys {
iter: self.data.into_iter(),
}
}
pub fn into_iter_sorted(self) -> IntoIterSorted<K, T, C> {
IntoIterSorted { inner: self }
}
#[must_use]
pub fn peek(&self) -> Option<&T> {
self.peek_with_key().map(|kv| kv.1)
}
#[must_use]
pub fn peek_with_key(&self) -> Option<(&K, &T)> {
let kv = self.data.get(0);
kv.map(|kv| (&kv.0, &kv.1))
}
#[must_use]
pub fn capacity(&self) -> (usize, usize) {
(self.data.capacity(), self.keys.capacity())
}
#[must_use]
pub fn capacity_min(&self) -> usize {
min(self.data.capacity(), self.keys.capacity())
}
#[must_use]
pub fn len(&self) -> usize {
debug_assert!(self.data.len() == self.keys.len());
self.data.len()
}
#[must_use]
pub fn is_empty(&self) -> bool {
self.len() == 0
}
#[inline]
pub fn drain(&mut self) -> Drain<'_, (K, T)> {
self.keys.clear();
Drain {
iter: self.data.drain(..),
}
}
pub fn clear(&mut self) {
self.drain();
}
}
#[cfg(feature = "serde")]
impl<K: Hash + Eq + Serialize, T: Serialize, C: Serialize> Serialize for BinaryHeap<K, T, C> {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let mut state = serializer.serialize_struct("BinaryHeap", 3)?;
state.serialize_field("data", &self.data)?;
state.serialize_field("cmp", &self.cmp)?;
state.serialize_field("keys", &self.keys)?;
state.end()
}
}
#[cfg(feature = "serde")]
impl<'de, K: Hash + Eq + Deserialize<'de>, T: Deserialize<'de>, C: Deserialize<'de>>
Deserialize<'de> for BinaryHeap<K, T, C>
{
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
enum Field {
Data,
Cmp,
Keys,
}
impl<'de> Deserialize<'de> for Field {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
struct FieldVisitor;
impl<'de_f> Visitor<'de_f> for FieldVisitor {
type Value = Field;
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.write_str("`data` or `cmp` or `keys`")
}
fn visit_str<E>(self, value: &str) -> Result<Field, E>
where
E: de::Error,
{
match value {
"data" => Ok(Field::Data),
"cmp" => Ok(Field::Cmp),
"keys" => Ok(Field::Keys),
_ => Err(de::Error::unknown_field(value, FIELDS)),
}
}
}
deserializer.deserialize_identifier(FieldVisitor)
}
}
struct BinaryHeapVisitor<
'de_bh,
K: Hash + Eq + Deserialize<'de_bh>,
T: Deserialize<'de_bh>,
C: Deserialize<'de_bh>,
> {
_phandom_de: std::marker::PhantomData<&'de_bh ()>,
_phantom_k: std::marker::PhantomData<K>,
_phantom_t: std::marker::PhantomData<T>,
_phtatom_c: std::marker::PhantomData<C>,
}
impl<
'de_bh,
K: Hash + Eq + Deserialize<'de_bh>,
T: Deserialize<'de_bh>,
C: Deserialize<'de_bh>,
> Visitor<'de_bh> for BinaryHeapVisitor<'de_bh, K, T, C>
{
type Value = BinaryHeap<K, T, C>;
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.write_str("struct BinaryHeap")
}
fn visit_seq<V>(self, mut seq: V) -> Result<Self::Value, V::Error>
where
V: SeqAccess<'de_bh>,
{
let data = seq
.next_element()?
.ok_or_else(|| de::Error::invalid_length(0, &self))?;
let cmp = seq
.next_element()?
.ok_or_else(|| de::Error::invalid_length(1, &self))?;
let keys = seq
.next_element()?
.ok_or_else(|| de::Error::invalid_length(2, &self))?;
Ok(BinaryHeap {
data,
cmp,
keys,
_not_sync: PhantomData::default(),
})
}
fn visit_map<V>(self, mut map: V) -> Result<Self::Value, V::Error>
where
V: MapAccess<'de_bh>,
{
let mut data = None;
let mut cmp = None;
let mut keys = None;
while let Some(key) = map.next_key()? {
match key {
Field::Data => {
if data.is_some() {
return Err(de::Error::duplicate_field("data"));
}
data = Some(map.next_value()?);
}
Field::Cmp => {
if cmp.is_some() {
return Err(de::Error::duplicate_field("cmp"));
}
cmp = Some(map.next_value()?);
}
Field::Keys => {
if keys.is_some() {
return Err(de::Error::duplicate_field("keys"));
}
keys = Some(map.next_value()?);
}
}
}
let data = data.ok_or_else(|| de::Error::missing_field("data"))?;
let cmp = cmp.ok_or_else(|| de::Error::missing_field("cmp"))?;
let keys = keys.ok_or_else(|| de::Error::missing_field("keys"))?;
Ok(BinaryHeap {
data,
cmp,
keys,
_not_sync: PhantomData::default(),
})
}
}
let visitor = BinaryHeapVisitor {
_phandom_de: Default::default(),
_phantom_k: Default::default(),
_phantom_t: Default::default(),
_phtatom_c: Default::default(),
};
const FIELDS: &'static [&'static str] = &["data", "cmp", "keys"];
deserializer.deserialize_struct("BinaryHeap", FIELDS, visitor)
}
}
struct Hole<'a, K: Hash + Eq, T: 'a> {
data: &'a mut [(K, T)],
keys: &'a mut HashMap<K, usize>,
elt: ManuallyDrop<(K, T)>,
pos: usize,
}
impl<'a, K: Hash + Eq, T> Hole<'a, K, T> {
#[inline]
unsafe fn new(data: &'a mut [(K, T)], keys: &'a mut HashMap<K, usize>, pos: usize) -> Self {
debug_assert!(pos < data.len());
let elt = unsafe { ptr::read(data.get_unchecked(pos)) };
debug_assert!(keys.contains_key(&elt.0));
Hole {
data,
keys,
elt: ManuallyDrop::new(elt),
pos,
}
}
#[inline]
fn pos(&self) -> usize {
self.pos
}
#[inline]
fn element(&self) -> &T {
&self.elt.1
}
#[inline]
unsafe fn get(&self, index: usize) -> &T {
debug_assert!(index != self.pos);
debug_assert!(index < self.data.len());
let key_value = unsafe { self.data.get_unchecked(index) };
&key_value.1
}
#[inline]
unsafe fn move_to(&mut self, target_position: usize) {
debug_assert!(target_position != self.pos);
debug_assert!(target_position < self.data.len());
unsafe {
let ptr = self.data.as_mut_ptr();
let target_ptr: *const _ = ptr.add(target_position);
let target_element: &(K, T) = &*target_ptr;
*self.keys.get_mut(&target_element.0).expect(
"Hole can only exist for key values pairs, that are already part of the heap.",
) = self.pos;
let hole_ptr = ptr.add(self.pos);
ptr::copy_nonoverlapping(target_ptr, hole_ptr, 1);
}
self.pos = target_position;
}
}
impl<K: Hash + Eq, T> Drop for Hole<'_, K, T> {
#[inline]
fn drop(&mut self) {
unsafe {
let pos = self.pos;
ptr::copy_nonoverlapping(&*self.elt, self.data.get_unchecked_mut(pos), 1);
}
let key = &self.elt.0;
*self.keys.get_mut(key).expect(
"Hole can only exist for key values pairs, that are already part of the heap.",
) = self.pos;
}
}
#[must_use = "iterators are lazy and do nothing unless consumed"]
#[derive(Clone, Debug)]
pub struct IntoIterSorted<K, T, C> {
inner: BinaryHeap<K, T, C>,
}
impl<K: Hash + Eq, T, C: Compare<T>> Iterator for IntoIterSorted<K, T, C> {
type Item = T;
#[inline]
fn next(&mut self) -> Option<T> {
self.inner.pop()
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
let exact = self.inner.len();
(exact, Some(exact))
}
}
#[derive(Debug)]
pub struct Drain<'a, T: 'a> {
iter: vec::Drain<'a, T>,
}
impl<T> Iterator for Drain<'_, T> {
type Item = T;
#[inline]
fn next(&mut self) -> Option<T> {
self.iter.next()
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
self.iter.size_hint()
}
}
impl<T> DoubleEndedIterator for Drain<'_, T> {
#[inline]
fn next_back(&mut self) -> Option<T> {
self.iter.next_back()
}
}
impl<K: Hash + Eq + Clone, T, C: Compare<T> + Default> FromIterator<(K, T)>
for BinaryHeap<K, T, C>
{
fn from_iter<I: IntoIterator<Item = (K, T)>>(iter: I) -> Self {
let iter = iter.into_iter();
let size_hint = iter.size_hint().0;
let mut heap = BinaryHeap::with_capacity(size_hint);
for (key, value) in iter {
heap.data.push((key.clone(), value));
let existing = heap.keys.insert(key, heap.data.len() - 1);
#[cfg(debug_assertions)]
if let Some(existing_key) = existing {
debug_assert!(
false,
"Tried to insert the same key multiple times: {}",
existing_key
);
}
}
heap.rebuild();
heap
}
}
impl<K, T, C> IntoIterator for BinaryHeap<K, T, C> {
type Item = (K, T);
type IntoIter = IntoIter<K, T>;
fn into_iter(self) -> IntoIter<K, T> {
IntoIter {
iter: self.data.into_iter(),
}
}
}
#[derive(Clone)]
pub struct IntoIter<K, T> {
iter: vec::IntoIter<(K, T)>,
}
impl<K, T> Iterator for IntoIter<K, T> {
type Item = (K, T);
fn next(&mut self) -> Option<Self::Item> {
self.iter.next()
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
self.iter.size_hint()
}
}
#[derive(Clone)]
pub struct IntoValues<K, V> {
iter: vec::IntoIter<(K, V)>,
}
impl<K, V> Iterator for IntoValues<K, V> {
type Item = V;
fn next(&mut self) -> Option<Self::Item> {
self.iter.next().map(|kv| kv.1)
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
self.iter.size_hint()
}
}
#[derive(Clone)]
pub struct IntoKeys<K, V> {
iter: vec::IntoIter<(K, V)>,
}
impl<K, V> Iterator for IntoKeys<K, V> {
type Item = K;
fn next(&mut self) -> Option<Self::Item> {
self.iter.next().map(|kv| kv.0)
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
self.iter.size_hint()
}
}
#[derive(Clone)]
pub struct Iter<'a, K, T> {
iter: std::slice::Iter<'a, (K, T)>,
}
impl<'a, K, T> Iterator for Iter<'a, K, T> {
type Item = (&'a K, &'a T);
fn next(&mut self) -> Option<Self::Item> {
self.iter.next().map(|kv| (&kv.0, &kv.1))
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
self.iter.size_hint()
}
#[inline]
fn last(self) -> Option<Self::Item>
where
Self: Sized,
{
self.iter.last().map(|kv| (&kv.0, &kv.1))
}
}
impl<'a, K, T> DoubleEndedIterator for Iter<'a, K, T> {
fn next_back(&mut self) -> Option<Self::Item> {
self.iter.next_back().map(|kv| (&kv.0, &kv.1))
}
}
#[derive(Clone)]
pub struct IterValues<'a, K, T> {
iter: std::slice::Iter<'a, (K, T)>,
}
impl<'a, K, T> Iterator for IterValues<'a, K, T> {
type Item = &'a T;
fn next(&mut self) -> Option<Self::Item> {
self.iter.next().map(|kv| &kv.1)
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
self.iter.size_hint()
}
#[inline]
fn last(self) -> Option<Self::Item>
where
Self: Sized,
{
self.iter.last().map(|kv| (&kv.1))
}
}
#[derive(Clone)]
pub struct IterKeys<'a, K, T> {
iter: std::slice::Iter<'a, (K, T)>,
}
impl<'a, K, T> Iterator for IterKeys<'a, K, T> {
type Item = &'a K;
fn next(&mut self) -> Option<Self::Item> {
self.iter.next().map(|kv| &kv.0)
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
self.iter.size_hint()
}
#[inline]
fn last(self) -> Option<Self::Item>
where
Self: Sized,
{
self.iter.last().map(|kv| (&kv.0))
}
}
impl<'a, K, T, C> IntoIterator for &'a BinaryHeap<K, T, C> {
type Item = (&'a K, &'a T);
type IntoIter = Iter<'a, K, T>;
fn into_iter(self) -> Self::IntoIter {
self.iter()
}
}
pub struct MutIter<'a, K: Hash + Eq, T, C: Compare<T>> {
heap: *mut BinaryHeap<K, T, C>,
iter: std::slice::Iter<'a, (K, T)>,
}
impl<'a, K: Hash + Eq, T, C: Compare<T>> IntoIterator for &'a mut BinaryHeap<K, T, C> {
type Item = (&'a K, &'a mut T);
type IntoIter = MutIter<'a, K, T, C>;
fn into_iter(self) -> Self::IntoIter {
MutIter {
heap: self,
iter: self.data.iter(),
}
}
}
impl<'a, K: Hash + Eq, T, C: Compare<T>> Iterator for MutIter<'a, K, T, C> {
type Item = (&'a K, &'a mut T);
fn next(&mut self) -> Option<Self::Item> {
if let Some(kv) = self.iter.next() {
let key = &kv.0;
let ptr: *const T = &kv.1 as *const T;
let mut_ptr: *mut T = ptr as *mut T;
let value = unsafe { &mut *mut_ptr };
Some((key, value))
} else {
None
}
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
self.iter.size_hint()
}
}
impl<'a, K: Hash + Eq, T, C: Compare<T>> Drop for MutIter<'a, K, T, C> {
fn drop(&mut self) {
let heap = unsafe { &mut *self.heap };
heap.rebuild();
}
}
#[cfg(test)]
mod test {
use crate::BinaryHeap;
use std::collections::HashMap;
use std::hash::Hash;
fn is_normal<T: Send + Unpin>() {}
#[test]
fn check_is_send_unpin() {
is_normal::<BinaryHeap<i64, i64>>();
assert!(true);
}
fn assert_key_map_valid<K: Hash + Eq + Clone, T, C>(bh: &BinaryHeap<K, T, C>) {
let mut expected_keys = HashMap::new();
for (i, kv) in bh.data.iter().enumerate() {
expected_keys.insert(kv.0.clone(), i);
}
for key_index in &expected_keys {
let key = &key_index.0;
let index = *key_index.1;
assert!(bh.keys.contains_key(&key));
assert_eq!(bh.keys[key], index);
}
assert_eq!(bh.keys.len(), expected_keys.len());
}
#[test]
fn valid_key_map() {
let mut heap: BinaryHeap<_, _> = BinaryHeap::new();
assert_key_map_valid(&heap);
heap.push(0, 0);
assert_key_map_valid(&heap);
heap.push(1, 10);
heap.push(2, 15);
heap.push(3, 5);
heap.push(4, 8);
assert_key_map_valid(&heap);
assert_eq!(heap.pop_with_key(), Some((2, 15)));
assert_key_map_valid(&heap);
assert_eq!(heap.pop_with_key(), Some((1, 10)));
assert_eq!(heap.pop_with_key(), Some((4, 8)));
heap.push(5, 2);
assert_key_map_valid(&heap);
assert_eq!(heap.pop_with_key(), Some((3, 5)));
assert_eq!(heap.pop_with_key(), Some((5, 2)));
assert_eq!(heap.pop_with_key(), Some((0, 0)));
assert_key_map_valid(&heap);
assert_eq!(heap.pop_with_key(), None);
assert_key_map_valid(&heap);
}
#[test]
fn valid_key_map_after_clear() {
let mut heap: BinaryHeap<_, _> = BinaryHeap::new();
assert_key_map_valid(&heap);
heap.push(0, 0);
assert_key_map_valid(&heap);
heap.push(1, 10);
heap.push(2, 15);
heap.push(3, 5);
heap.push(4, 8);
assert_key_map_valid(&heap);
heap.clear();
assert_key_map_valid(&heap);
assert_eq!(heap.len(), 0);
}
}