use super::{IntoIter, Iter, IterMut, RefMut, tables::TriHashMapTables};
use crate::{
DefaultHashBuilder, TriHashItem,
errors::DuplicateItem,
internal::ValidationError,
support::{
alloc::{AllocWrapper, Allocator, Global, global_alloc},
borrow::DormantMutRef,
fmt_utils::StrDisplayAsDebug,
item_set::ItemSet,
map_hash::MapHash,
},
};
use alloc::{collections::BTreeSet, vec::Vec};
use core::{
fmt,
hash::{BuildHasher, Hash},
};
use equivalent::Equivalent;
use hashbrown::hash_table::{Entry, VacantEntry};
#[derive(Clone)]
pub struct TriHashMap<T, S = DefaultHashBuilder, A: Allocator = Global> {
pub(super) items: ItemSet<T, A>,
tables: TriHashMapTables<S, A>,
}
impl<T: TriHashItem, S: Default, A: Allocator + Default> Default
for TriHashMap<T, S, A>
{
fn default() -> Self {
Self {
items: ItemSet::with_capacity_in(0, A::default()),
tables: TriHashMapTables::default(),
}
}
}
#[cfg(feature = "default-hasher")]
impl<T: TriHashItem> TriHashMap<T> {
#[inline]
pub fn new() -> Self {
Self { items: ItemSet::new(), tables: TriHashMapTables::default() }
}
pub fn with_capacity(capacity: usize) -> Self {
Self {
items: ItemSet::with_capacity_in(capacity, global_alloc()),
tables: TriHashMapTables::with_capacity_and_hasher_in(
capacity,
DefaultHashBuilder::default(),
global_alloc(),
),
}
}
}
impl<T: TriHashItem, S: BuildHasher> TriHashMap<T, S> {
pub const fn with_hasher(hasher: S) -> Self {
Self {
items: ItemSet::new(),
tables: TriHashMapTables::with_hasher(hasher),
}
}
pub fn with_capacity_and_hasher(capacity: usize, hasher: S) -> Self {
Self {
items: ItemSet::with_capacity_in(capacity, global_alloc()),
tables: TriHashMapTables::with_capacity_and_hasher_in(
capacity,
hasher,
global_alloc(),
),
}
}
}
#[cfg(feature = "default-hasher")]
impl<T: TriHashItem, A: Clone + Allocator>
TriHashMap<T, DefaultHashBuilder, A>
{
pub fn new_in(alloc: A) -> Self {
Self {
items: ItemSet::with_capacity_in(0, alloc.clone()),
tables: TriHashMapTables::with_capacity_and_hasher_in(
0,
DefaultHashBuilder::default(),
alloc,
),
}
}
pub fn with_capacity_in(capacity: usize, alloc: A) -> Self {
Self {
items: ItemSet::with_capacity_in(capacity, alloc.clone()),
tables: TriHashMapTables::with_capacity_and_hasher_in(
capacity,
DefaultHashBuilder::default(),
alloc,
),
}
}
}
impl<T: TriHashItem, S: Clone + BuildHasher, A: Clone + Allocator>
TriHashMap<T, S, A>
{
pub fn with_hasher_in(hasher: S, alloc: A) -> Self {
Self {
items: ItemSet::with_capacity_in(0, alloc.clone()),
tables: TriHashMapTables::with_capacity_and_hasher_in(
0,
hasher.clone(),
alloc,
),
}
}
pub fn with_capacity_and_hasher_in(
capacity: usize,
hasher: S,
alloc: A,
) -> Self {
Self {
items: ItemSet::with_capacity_in(capacity, alloc.clone()),
tables: TriHashMapTables::with_capacity_and_hasher_in(
capacity, hasher, alloc,
),
}
}
}
impl<T: TriHashItem, S: Clone + BuildHasher, A: Allocator> TriHashMap<T, S, A> {
#[cfg(feature = "daft")]
#[inline]
pub(crate) fn hasher(&self) -> &S {
self.tables.hasher()
}
#[inline]
pub fn allocator(&self) -> &A {
self.items.allocator()
}
pub fn capacity(&self) -> usize {
self.items.capacity()
}
#[inline]
pub fn is_empty(&self) -> bool {
self.items.is_empty()
}
#[inline]
pub fn len(&self) -> usize {
self.items.len()
}
pub fn clear(&mut self) {
self.items.clear();
self.tables.k1_to_item.clear();
self.tables.k2_to_item.clear();
self.tables.k3_to_item.clear();
}
pub fn reserve(&mut self, additional: usize) {
self.items.reserve(additional);
let items = &self.items;
let state = &self.tables.state;
self.tables
.k1_to_item
.reserve(additional, |ix| state.hash_one(items[*ix].key1()));
self.tables
.k2_to_item
.reserve(additional, |ix| state.hash_one(items[*ix].key2()));
self.tables
.k3_to_item
.reserve(additional, |ix| state.hash_one(items[*ix].key3()));
}
pub fn try_reserve(
&mut self,
additional: usize,
) -> Result<(), crate::errors::TryReserveError> {
self.items
.try_reserve(additional)
.map_err(crate::errors::TryReserveError::from_hashbrown)?;
let items = &self.items;
let state = &self.tables.state;
self.tables
.k1_to_item
.try_reserve(additional, |ix| state.hash_one(items[*ix].key1()))
.map_err(crate::errors::TryReserveError::from_hashbrown)?;
self.tables
.k2_to_item
.try_reserve(additional, |ix| state.hash_one(items[*ix].key2()))
.map_err(crate::errors::TryReserveError::from_hashbrown)?;
self.tables
.k3_to_item
.try_reserve(additional, |ix| state.hash_one(items[*ix].key3()))
.map_err(crate::errors::TryReserveError::from_hashbrown)?;
Ok(())
}
pub fn shrink_to_fit(&mut self) {
self.items.shrink_to_fit();
let items = &self.items;
let state = &self.tables.state;
self.tables
.k1_to_item
.shrink_to_fit(|ix| state.hash_one(items[*ix].key1()));
self.tables
.k2_to_item
.shrink_to_fit(|ix| state.hash_one(items[*ix].key2()));
self.tables
.k3_to_item
.shrink_to_fit(|ix| state.hash_one(items[*ix].key3()));
}
pub fn shrink_to(&mut self, min_capacity: usize) {
self.items.shrink_to(min_capacity);
let items = &self.items;
let state = &self.tables.state;
self.tables
.k1_to_item
.shrink_to(min_capacity, |ix| state.hash_one(items[*ix].key1()));
self.tables
.k2_to_item
.shrink_to(min_capacity, |ix| state.hash_one(items[*ix].key2()));
self.tables
.k3_to_item
.shrink_to(min_capacity, |ix| state.hash_one(items[*ix].key3()));
}
#[inline]
pub fn iter(&self) -> Iter<'_, T> {
Iter::new(&self.items)
}
#[inline]
pub fn iter_mut(&mut self) -> IterMut<'_, T, S, A> {
IterMut::new(&self.tables, &mut self.items)
}
#[doc(hidden)]
pub fn validate(
&self,
compactness: crate::internal::ValidateCompact,
) -> Result<(), ValidationError>
where
T: fmt::Debug,
{
self.items.validate(compactness)?;
self.tables.validate(self.len(), compactness)?;
for (&ix, item) in self.items.iter() {
let key1 = item.key1();
let key2 = item.key2();
let key3 = item.key3();
let Some(ix1) = self.find1_index(&key1) else {
return Err(ValidationError::general(format!(
"item at index {ix} has no key1 index"
)));
};
let Some(ix2) = self.find2_index(&key2) else {
return Err(ValidationError::general(format!(
"item at index {ix} has no key2 index"
)));
};
let Some(ix3) = self.find3_index(&key3) else {
return Err(ValidationError::general(format!(
"item at index {ix} has no key3 index"
)));
};
if ix1 != ix || ix2 != ix || ix3 != ix {
return Err(ValidationError::general(format!(
"item at index {ix} has inconsistent indexes: {ix1}/{ix2}/{ix3}"
)));
}
}
Ok(())
}
#[doc(alias = "insert")]
pub fn insert_overwrite(&mut self, value: T) -> Vec<T> {
let mut duplicates = Vec::new();
duplicates.extend(self.remove1(&value.key1()));
duplicates.extend(self.remove2(&value.key2()));
duplicates.extend(self.remove3(&value.key3()));
if self.insert_unique(value).is_err() {
panic!("insert_unique failed after removing duplicates");
}
duplicates
}
pub fn insert_unique(
&mut self,
value: T,
) -> Result<(), DuplicateItem<T, &T>> {
let mut duplicates = BTreeSet::new();
let state = &self.tables.state;
let (e1, e2, e3) = {
let k1 = value.key1();
let k2 = value.key2();
let k3 = value.key3();
let e1 = detect_dup_or_insert(
self.tables
.k1_to_item
.entry(state, k1, |index| self.items[index].key1()),
&mut duplicates,
);
let e2 = detect_dup_or_insert(
self.tables
.k2_to_item
.entry(state, k2, |index| self.items[index].key2()),
&mut duplicates,
);
let e3 = detect_dup_or_insert(
self.tables
.k3_to_item
.entry(state, k3, |index| self.items[index].key3()),
&mut duplicates,
);
(e1, e2, e3)
};
if !duplicates.is_empty() {
return Err(DuplicateItem::__internal_new(
value,
duplicates.iter().map(|ix| &self.items[*ix]).collect(),
));
}
let next_index = self.items.insert_at_next_index(value);
e1.unwrap().insert(next_index);
e2.unwrap().insert(next_index);
e3.unwrap().insert(next_index);
Ok(())
}
pub fn contains_key_unique<'a, Q1, Q2, Q3>(
&'a self,
key1: &Q1,
key2: &Q2,
key3: &Q3,
) -> bool
where
Q1: Hash + Equivalent<T::K1<'a>> + ?Sized,
Q2: Hash + Equivalent<T::K2<'a>> + ?Sized,
Q3: Hash + Equivalent<T::K3<'a>> + ?Sized,
{
self.get_unique(key1, key2, key3).is_some()
}
pub fn get_unique<'a, Q1, Q2, Q3>(
&'a self,
key1: &Q1,
key2: &Q2,
key3: &Q3,
) -> Option<&'a T>
where
Q1: Hash + Equivalent<T::K1<'a>> + ?Sized,
Q2: Hash + Equivalent<T::K2<'a>> + ?Sized,
Q3: Hash + Equivalent<T::K3<'a>> + ?Sized,
{
let index = self.find1_index(key1)?;
let item = &self.items[index];
if key2.equivalent(&item.key2()) && key3.equivalent(&item.key3()) {
Some(item)
} else {
None
}
}
pub fn get_mut_unique<'a, Q1, Q2, Q3>(
&'a mut self,
key1: &Q1,
key2: &Q2,
key3: &Q3,
) -> Option<RefMut<'a, T, S>>
where
Q1: Hash + Equivalent<T::K1<'a>> + ?Sized,
Q2: Hash + Equivalent<T::K2<'a>> + ?Sized,
Q3: Hash + Equivalent<T::K3<'a>> + ?Sized,
{
let (dormant_map, index) = {
let (map, dormant_map) = DormantMutRef::new(self);
let index = map.find1_index(key1)?;
let item = &map.items[index];
if !key2.equivalent(&item.key2()) || !key3.equivalent(&item.key3())
{
return None;
}
(dormant_map, index)
};
let awakened_map = unsafe { dormant_map.awaken() };
let item = &mut awakened_map.items[index];
let state = awakened_map.tables.state.clone();
let hashes = awakened_map.tables.make_hashes(&item);
Some(RefMut::new(state, hashes, item))
}
pub fn remove_unique<'a, Q1, Q2, Q3>(
&'a mut self,
key1: &Q1,
key2: &Q2,
key3: &Q3,
) -> Option<T>
where
Q1: Hash + Equivalent<T::K1<'a>> + ?Sized,
Q2: Hash + Equivalent<T::K2<'a>> + ?Sized,
Q3: Hash + Equivalent<T::K3<'a>> + ?Sized,
{
let (dormant_map, remove_index) = {
let (map, dormant_map) = DormantMutRef::new(self);
let remove_index = map.find1_index(key1)?;
let item = &map.items[remove_index];
if !key2.equivalent(&item.key2()) && !key3.equivalent(&item.key3())
{
return None;
}
(dormant_map, remove_index)
};
let awakened_map = unsafe { dormant_map.awaken() };
awakened_map.remove_by_index(remove_index)
}
pub fn contains_key1<'a, Q>(&'a self, key1: &Q) -> bool
where
Q: Hash + Equivalent<T::K1<'a>> + ?Sized,
{
self.find1_index(key1).is_some()
}
pub fn get1<'a, Q>(&'a self, key1: &Q) -> Option<&'a T>
where
Q: Hash + Equivalent<T::K1<'a>> + ?Sized,
{
self.find1(key1)
}
pub fn get1_mut<'a, Q>(&'a mut self, key1: &Q) -> Option<RefMut<'a, T, S>>
where
Q: Hash + Equivalent<T::K1<'a>> + ?Sized,
{
let (dormant_map, index) = {
let (map, dormant_map) = DormantMutRef::new(self);
let index = map.find1_index(key1)?;
(dormant_map, index)
};
let awakened_map = unsafe { dormant_map.awaken() };
let item = &mut awakened_map.items[index];
let state = awakened_map.tables.state.clone();
let hashes = awakened_map.tables.make_hashes(&item);
Some(RefMut::new(state, hashes, item))
}
pub fn remove1<'a, Q>(&'a mut self, key1: &Q) -> Option<T>
where
Q: Hash + Equivalent<T::K1<'a>> + ?Sized,
{
let (dormant_map, remove_index) = {
let (map, dormant_map) = DormantMutRef::new(self);
let remove_index = map.find1_index(key1)?;
(dormant_map, remove_index)
};
let awakened_map = unsafe { dormant_map.awaken() };
awakened_map.remove_by_index(remove_index)
}
pub fn contains_key2<'a, Q>(&'a self, key2: &Q) -> bool
where
Q: Hash + Equivalent<T::K2<'a>> + ?Sized,
{
self.find2_index(key2).is_some()
}
pub fn get2<'a, Q>(&'a self, key2: &Q) -> Option<&'a T>
where
Q: Hash + Equivalent<T::K2<'a>> + ?Sized,
{
self.find2(key2)
}
pub fn get2_mut<'a, Q>(&'a mut self, key2: &Q) -> Option<RefMut<'a, T, S>>
where
Q: Hash + Equivalent<T::K2<'a>> + ?Sized,
{
let (dormant_map, index) = {
let (map, dormant_map) = DormantMutRef::new(self);
let index = map.find2_index(key2)?;
(dormant_map, index)
};
let awakened_map = unsafe { dormant_map.awaken() };
let item = &mut awakened_map.items[index];
let state = awakened_map.tables.state.clone();
let hashes = awakened_map.tables.make_hashes(&item);
Some(RefMut::new(state, hashes, item))
}
pub fn remove2<'a, Q>(&'a mut self, key2: &Q) -> Option<T>
where
Q: Hash + Equivalent<T::K2<'a>> + ?Sized,
{
let (dormant_map, remove_index) = {
let (map, dormant_map) = DormantMutRef::new(self);
let remove_index = map.find2_index(key2)?;
(dormant_map, remove_index)
};
let awakened_map = unsafe { dormant_map.awaken() };
awakened_map.remove_by_index(remove_index)
}
pub fn contains_key3<'a, Q>(&'a self, key3: &Q) -> bool
where
Q: Hash + Equivalent<T::K3<'a>> + ?Sized,
{
self.find3_index(key3).is_some()
}
pub fn get3<'a, Q>(&'a self, key3: &Q) -> Option<&'a T>
where
Q: Hash + Equivalent<T::K3<'a>> + ?Sized,
{
self.find3(key3)
}
pub fn get3_mut<'a, Q>(&'a mut self, key3: &Q) -> Option<RefMut<'a, T, S>>
where
Q: Hash + Equivalent<T::K3<'a>> + ?Sized,
{
let (dormant_map, index) = {
let (map, dormant_map) = DormantMutRef::new(self);
let index = map.find3_index(key3)?;
(dormant_map, index)
};
let awakened_map = unsafe { dormant_map.awaken() };
let item = &mut awakened_map.items[index];
let state = awakened_map.tables.state.clone();
let hashes = awakened_map.tables.make_hashes(&item);
Some(RefMut::new(state, hashes, item))
}
pub fn remove3<'a, Q>(&'a mut self, key3: &Q) -> Option<T>
where
Q: Hash + Equivalent<T::K3<'a>> + ?Sized,
{
let (dormant_map, remove_index) = {
let (map, dormant_map) = DormantMutRef::new(self);
let remove_index = map.find3_index(key3)?;
(dormant_map, remove_index)
};
let awakened_map = unsafe { dormant_map.awaken() };
awakened_map.remove_by_index(remove_index)
}
pub fn retain<'a, F>(&'a mut self, mut f: F)
where
F: FnMut(RefMut<'a, T, S>) -> bool,
{
let hash_state = self.tables.state.clone();
let (_, mut dormant_items) = DormantMutRef::new(&mut self.items);
self.tables.k1_to_item.retain(|index| {
let (item, dormant_items) = {
let items = unsafe { dormant_items.reborrow() };
let (items, dormant_items) = DormantMutRef::new(items);
let item: &'a mut T = items
.get_mut(index)
.expect("all indexes are present in self.items");
(item, dormant_items)
};
let (hashes, dormant_item) = {
let (item, dormant_item): (&'a mut T, _) =
DormantMutRef::new(item);
let key1 = T::key1(item);
let key2 = T::key2(item);
let key3 = T::key3(item);
let hash1 = hash_state.hash_one(key1);
let hash2 = hash_state.hash_one(key2);
let hash3 = hash_state.hash_one(key3);
(
[
MapHash::new(hash1),
MapHash::new(hash2),
MapHash::new(hash3),
],
dormant_item,
)
};
let hash2 = hashes[1].hash();
let hash3 = hashes[2].hash();
let retain = {
let item = unsafe { dormant_item.awaken() };
let ref_mut = RefMut::new(hash_state.clone(), hashes, item);
f(ref_mut)
};
if retain {
true
} else {
let items = unsafe { dormant_items.awaken() };
items.remove(index);
let k2_entry = self
.tables
.k2_to_item
.find_entry_by_hash(hash2, |map2_index| {
map2_index == index
});
let k3_entry = self
.tables
.k3_to_item
.find_entry_by_hash(hash3, |map3_index| {
map3_index == index
});
let mut is_inconsistent = false;
if let Ok(k2_entry) = k2_entry {
k2_entry.remove();
} else {
is_inconsistent = true;
}
if let Ok(k3_entry) = k3_entry {
k3_entry.remove();
} else {
is_inconsistent = true;
}
if is_inconsistent {
panic!(
"inconsistency among k1_to_item, k2_to_item, k3_to_item"
);
}
false
}
});
}
fn find1<'a, Q>(&'a self, k: &Q) -> Option<&'a T>
where
Q: Hash + Equivalent<T::K1<'a>> + ?Sized,
{
self.find1_index(k).map(|ix| &self.items[ix])
}
fn find1_index<'a, Q>(&'a self, k: &Q) -> Option<usize>
where
Q: Hash + Equivalent<T::K1<'a>> + ?Sized,
{
self.tables
.k1_to_item
.find_index(&self.tables.state, k, |index| self.items[index].key1())
}
fn find2<'a, Q>(&'a self, k: &Q) -> Option<&'a T>
where
Q: Hash + Equivalent<T::K2<'a>> + ?Sized,
{
self.find2_index(k).map(|ix| &self.items[ix])
}
fn find2_index<'a, Q>(&'a self, k: &Q) -> Option<usize>
where
Q: Hash + Equivalent<T::K2<'a>> + ?Sized,
{
self.tables
.k2_to_item
.find_index(&self.tables.state, k, |index| self.items[index].key2())
}
fn find3<'a, Q>(&'a self, k: &Q) -> Option<&'a T>
where
Q: Hash + Equivalent<T::K3<'a>> + ?Sized,
{
self.find3_index(k).map(|ix| &self.items[ix])
}
fn find3_index<'a, Q>(&'a self, k: &Q) -> Option<usize>
where
Q: Hash + Equivalent<T::K3<'a>> + ?Sized,
{
self.tables
.k3_to_item
.find_index(&self.tables.state, k, |index| self.items[index].key3())
}
pub(super) fn remove_by_index(&mut self, remove_index: usize) -> Option<T> {
let value = self.items.remove(remove_index)?;
let state = &self.tables.state;
let Ok(item1) =
self.tables.k1_to_item.find_entry(state, &value.key1(), |index| {
if index == remove_index {
value.key1()
} else {
self.items[index].key1()
}
})
else {
panic!("remove_index {remove_index} not found in k1_to_item");
};
let Ok(item2) =
self.tables.k2_to_item.find_entry(state, &value.key2(), |index| {
if index == remove_index {
value.key2()
} else {
self.items[index].key2()
}
})
else {
panic!("remove_index {remove_index} not found in k2_to_item")
};
let Ok(item3) =
self.tables.k3_to_item.find_entry(state, &value.key3(), |index| {
if index == remove_index {
value.key3()
} else {
self.items[index].key3()
}
})
else {
panic!("remove_index {remove_index} not found in k3_to_item")
};
item1.remove();
item2.remove();
item3.remove();
Some(value)
}
}
impl<'a, T, S, A: Allocator> fmt::Debug for TriHashMap<T, S, A>
where
T: TriHashItem + fmt::Debug,
T::K1<'a>: fmt::Debug,
T::K2<'a>: fmt::Debug,
T::K3<'a>: fmt::Debug,
T: 'a,
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let mut map = f.debug_map();
for item in self.items.values() {
let key: KeyMap<'_, T> = KeyMap {
key1: item.key1(),
key2: item.key2(),
key3: item.key3(),
};
let key: KeyMap<'a, T> = unsafe {
core::mem::transmute::<KeyMap<'_, T>, KeyMap<'a, T>>(key)
};
map.entry(&key, item);
}
map.finish()
}
}
struct KeyMap<'a, T: TriHashItem + 'a> {
key1: T::K1<'a>,
key2: T::K2<'a>,
key3: T::K3<'a>,
}
impl<'a, T: TriHashItem> fmt::Debug for KeyMap<'a, T>
where
T::K1<'a>: fmt::Debug,
T::K2<'a>: fmt::Debug,
T::K3<'a>: fmt::Debug,
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_map()
.entry(&StrDisplayAsDebug("k1"), &self.key1)
.entry(&StrDisplayAsDebug("k2"), &self.key2)
.entry(&StrDisplayAsDebug("k3"), &self.key3)
.finish()
}
}
impl<T: TriHashItem + PartialEq, S: Clone + BuildHasher, A: Allocator> PartialEq
for TriHashMap<T, S, A>
{
fn eq(&self, other: &Self) -> bool {
if self.items.len() != other.items.len() {
return false;
}
for item in self.items.values() {
let k1 = item.key1();
let k2 = item.key2();
let k3 = item.key3();
let Some(other_ix1) = other.find1_index(&k1) else {
return false;
};
let Some(other_ix2) = other.find2_index(&k2) else {
return false;
};
let Some(other_ix3) = other.find3_index(&k3) else {
return false;
};
if other_ix1 != other_ix2 || other_ix1 != other_ix3 {
return false;
}
let other_item = &other.items[other_ix1];
if item != other_item {
return false;
}
}
true
}
}
impl<T: TriHashItem + Eq, S: Clone + BuildHasher, A: Allocator> Eq
for TriHashMap<T, S, A>
{
}
impl<T: TriHashItem, S: Clone + BuildHasher, A: Allocator> Extend<T>
for TriHashMap<T, S, A>
{
fn extend<I: IntoIterator<Item = T>>(&mut self, iter: I) {
let iter = iter.into_iter();
let reserve = if self.is_empty() {
iter.size_hint().0
} else {
iter.size_hint().0.div_ceil(2)
};
self.reserve(reserve);
for item in iter {
self.insert_overwrite(item);
}
}
}
fn detect_dup_or_insert<'a, A: Allocator>(
item: Entry<'a, usize, AllocWrapper<A>>,
duplicates: &mut BTreeSet<usize>,
) -> Option<VacantEntry<'a, usize, AllocWrapper<A>>> {
match item {
Entry::Vacant(slot) => Some(slot),
Entry::Occupied(slot) => {
duplicates.insert(*slot.get());
None
}
}
}
impl<'a, T: TriHashItem, S: Clone + BuildHasher, A: Allocator> IntoIterator
for &'a TriHashMap<T, S, A>
{
type Item = &'a T;
type IntoIter = Iter<'a, T>;
#[inline]
fn into_iter(self) -> Self::IntoIter {
self.iter()
}
}
impl<'a, T: TriHashItem, S: Clone + BuildHasher, A: Allocator> IntoIterator
for &'a mut TriHashMap<T, S, A>
{
type Item = RefMut<'a, T, S>;
type IntoIter = IterMut<'a, T, S, A>;
#[inline]
fn into_iter(self) -> Self::IntoIter {
self.iter_mut()
}
}
impl<T: TriHashItem, S: Clone + BuildHasher, A: Allocator> IntoIterator
for TriHashMap<T, S, A>
{
type Item = T;
type IntoIter = IntoIter<T, A>;
#[inline]
fn into_iter(self) -> Self::IntoIter {
IntoIter::new(self.items)
}
}
impl<T: TriHashItem, S: Default + Clone + BuildHasher, A: Default + Allocator>
FromIterator<T> for TriHashMap<T, S, A>
{
fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> Self {
let mut map = TriHashMap::default();
for item in iter {
map.insert_overwrite(item);
}
map
}
}