use super::{
Entry, IntoIter, Iter, IterMut, OccupiedEntry, RefMut, VacantEntry,
entry::OccupiedEntryRef,
entry_indexes::{DisjointKeys, EntryIndexes},
tables::BiHashMapTables,
};
use crate::{
BiHashItem, DefaultHashBuilder,
bi_hash_map::entry::OccupiedEntryMut,
errors::DuplicateItem,
internal::{ValidateCompact, ValidationError},
support::{
alloc::{AllocWrapper, Allocator, Global, global_alloc},
borrow::DormantMutRef,
fmt_utils::StrDisplayAsDebug,
item_set::ItemSet,
map_hash::MapHash,
},
};
use alloc::{collections::BTreeSet, vec::Vec};
use core::{
fmt,
hash::{BuildHasher, Hash},
};
use equivalent::Equivalent;
use hashbrown::hash_table;
#[derive(Clone)]
pub struct BiHashMap<T, S = DefaultHashBuilder, A: Allocator = Global> {
pub(super) items: ItemSet<T, A>,
pub(super) tables: BiHashMapTables<S, A>,
}
impl<T: BiHashItem, S: Default, A: Allocator + Default> Default
for BiHashMap<T, S, A>
{
fn default() -> Self {
Self {
items: ItemSet::with_capacity_in(0, A::default()),
tables: BiHashMapTables::default(),
}
}
}
#[cfg(feature = "default-hasher")]
impl<T: BiHashItem> BiHashMap<T> {
#[inline]
pub fn new() -> Self {
Self { items: ItemSet::new(), tables: BiHashMapTables::default() }
}
pub fn with_capacity(capacity: usize) -> Self {
Self {
items: ItemSet::with_capacity_in(capacity, global_alloc()),
tables: BiHashMapTables::with_capacity_and_hasher_in(
capacity,
DefaultHashBuilder::default(),
global_alloc(),
),
}
}
}
impl<T: BiHashItem, S: BuildHasher> BiHashMap<T, S> {
pub const fn with_hasher(hasher: S) -> Self {
Self {
items: ItemSet::new(),
tables: BiHashMapTables::with_hasher(hasher),
}
}
pub fn with_capacity_and_hasher(capacity: usize, hasher: S) -> Self {
Self {
items: ItemSet::with_capacity_in(capacity, global_alloc()),
tables: BiHashMapTables::with_capacity_and_hasher_in(
capacity,
hasher,
global_alloc(),
),
}
}
}
#[cfg(feature = "default-hasher")]
impl<T: BiHashItem, A: Clone + Allocator> BiHashMap<T, DefaultHashBuilder, A> {
pub fn new_in(alloc: A) -> Self {
Self {
items: ItemSet::with_capacity_in(0, alloc.clone()),
tables: BiHashMapTables::with_capacity_and_hasher_in(
0,
DefaultHashBuilder::default(),
alloc,
),
}
}
pub fn with_capacity_in(capacity: usize, alloc: A) -> Self {
Self {
items: ItemSet::with_capacity_in(capacity, alloc.clone()),
tables: BiHashMapTables::with_capacity_and_hasher_in(
capacity,
DefaultHashBuilder::default(),
alloc,
),
}
}
}
impl<T: BiHashItem, S: Clone + BuildHasher, A: Clone + Allocator>
BiHashMap<T, S, A>
{
pub fn with_hasher_in(hasher: S, alloc: A) -> Self {
Self {
items: ItemSet::with_capacity_in(0, alloc.clone()),
tables: BiHashMapTables::with_capacity_and_hasher_in(
0, hasher, alloc,
),
}
}
pub fn with_capacity_and_hasher_in(
capacity: usize,
hasher: S,
alloc: A,
) -> Self {
Self {
items: ItemSet::with_capacity_in(capacity, alloc.clone()),
tables: BiHashMapTables::with_capacity_and_hasher_in(
capacity, hasher, alloc,
),
}
}
}
impl<T: BiHashItem, S: Clone + BuildHasher, A: Allocator> BiHashMap<T, S, A> {
#[cfg(feature = "daft")]
#[inline]
pub(crate) fn hasher(&self) -> &S {
self.tables.hasher()
}
#[inline]
pub fn allocator(&self) -> &A {
self.items.allocator()
}
pub fn capacity(&self) -> usize {
self.items.capacity()
}
#[inline]
pub fn is_empty(&self) -> bool {
self.items.is_empty()
}
#[inline]
pub fn len(&self) -> usize {
self.items.len()
}
pub fn clear(&mut self) {
self.items.clear();
self.tables.k1_to_item.clear();
self.tables.k2_to_item.clear();
}
pub fn reserve(&mut self, additional: usize) {
self.items.reserve(additional);
let items = &self.items;
let state = &self.tables.state;
self.tables
.k1_to_item
.reserve(additional, |ix| state.hash_one(items[*ix].key1()));
self.tables
.k2_to_item
.reserve(additional, |ix| state.hash_one(items[*ix].key2()));
}
pub fn try_reserve(
&mut self,
additional: usize,
) -> Result<(), crate::errors::TryReserveError> {
self.items
.try_reserve(additional)
.map_err(crate::errors::TryReserveError::from_hashbrown)?;
let items = &self.items;
let state = &self.tables.state;
self.tables
.k1_to_item
.try_reserve(additional, |ix| state.hash_one(items[*ix].key1()))
.map_err(crate::errors::TryReserveError::from_hashbrown)?;
self.tables
.k2_to_item
.try_reserve(additional, |ix| state.hash_one(items[*ix].key2()))
.map_err(crate::errors::TryReserveError::from_hashbrown)?;
Ok(())
}
pub fn shrink_to_fit(&mut self) {
self.items.shrink_to_fit();
let items = &self.items;
let state = &self.tables.state;
self.tables
.k1_to_item
.shrink_to_fit(|ix| state.hash_one(items[*ix].key1()));
self.tables
.k2_to_item
.shrink_to_fit(|ix| state.hash_one(items[*ix].key2()));
}
pub fn shrink_to(&mut self, min_capacity: usize) {
self.items.shrink_to(min_capacity);
let items = &self.items;
let state = &self.tables.state;
self.tables
.k1_to_item
.shrink_to(min_capacity, |ix| state.hash_one(items[*ix].key1()));
self.tables
.k2_to_item
.shrink_to(min_capacity, |ix| state.hash_one(items[*ix].key2()));
}
#[inline]
pub fn iter(&self) -> Iter<'_, T> {
Iter::new(&self.items)
}
#[inline]
pub fn iter_mut(&mut self) -> IterMut<'_, T, S, A> {
IterMut::new(&self.tables, &mut self.items)
}
#[doc(hidden)]
pub fn validate(
&self,
compactness: ValidateCompact,
) -> Result<(), ValidationError>
where
T: fmt::Debug,
{
self.items.validate(compactness)?;
self.tables.validate(self.len(), compactness)?;
for (&ix, item) in self.items.iter() {
let key1 = item.key1();
let key2 = item.key2();
let Some(ix1) = self.find1_index(&key1) else {
return Err(ValidationError::general(format!(
"item at index {ix} has no key1 index"
)));
};
let Some(ix2) = self.find2_index(&key2) else {
return Err(ValidationError::general(format!(
"item at index {ix} has no key2 index"
)));
};
if ix1 != ix || ix2 != ix {
return Err(ValidationError::general(format!(
"item at index {ix} has inconsistent indexes: {ix1}/{ix2}"
)));
}
}
Ok(())
}
#[doc(alias = "insert")]
pub fn insert_overwrite(&mut self, value: T) -> Vec<T> {
let mut duplicates = Vec::new();
duplicates.extend(self.remove1(&value.key1()));
duplicates.extend(self.remove2(&value.key2()));
if self.insert_unique(value).is_err() {
panic!("insert_unique failed after removing duplicates");
}
duplicates
}
pub fn insert_unique(
&mut self,
value: T,
) -> Result<(), DuplicateItem<T, &T>> {
let _ = self.insert_unique_impl(value)?;
Ok(())
}
pub fn contains_key_unique<'a, Q1, Q2>(
&'a self,
key1: &Q1,
key2: &Q2,
) -> bool
where
Q1: Hash + Equivalent<T::K1<'a>> + ?Sized,
Q2: Hash + Equivalent<T::K2<'a>> + ?Sized,
{
self.get_unique(key1, key2).is_some()
}
pub fn get_unique<'a, Q1, Q2>(
&'a self,
key1: &Q1,
key2: &Q2,
) -> Option<&'a T>
where
Q1: Hash + Equivalent<T::K1<'a>> + ?Sized,
Q2: Hash + Equivalent<T::K2<'a>> + ?Sized,
{
let index = self.find1_index(key1)?;
let item = &self.items[index];
if key2.equivalent(&item.key2()) { Some(item) } else { None }
}
pub fn get_mut_unique<'a, Q1, Q2>(
&'a mut self,
key1: &Q1,
key2: &Q2,
) -> Option<RefMut<'a, T, S>>
where
Q1: Hash + Equivalent<T::K1<'a>> + ?Sized,
Q2: Hash + Equivalent<T::K2<'a>> + ?Sized,
{
let (dormant_map, index) = {
let (map, dormant_map) = DormantMutRef::new(self);
let index = map.find1_index(key1)?;
if !key2.equivalent(&map.items[index].key2()) {
return None;
}
(dormant_map, index)
};
let awakened_map = unsafe { dormant_map.awaken() };
let item = &mut awakened_map.items[index];
let state = awakened_map.tables.state.clone();
let hashes =
awakened_map.tables.make_hashes::<T>(&item.key1(), &item.key2());
Some(RefMut::new(state, hashes, item))
}
pub fn remove_unique<'a, Q1, Q2>(
&'a mut self,
key1: &Q1,
key2: &Q2,
) -> Option<T>
where
Q1: Hash + Equivalent<T::K1<'a>> + ?Sized,
Q2: Hash + Equivalent<T::K2<'a>> + ?Sized,
{
let (dormant_map, remove_index) = {
let (map, dormant_map) = DormantMutRef::new(self);
let remove_index = map.find1_index(key1)?;
if !key2.equivalent(&map.items[remove_index].key2()) {
return None;
}
(dormant_map, remove_index)
};
let awakened_map = unsafe { dormant_map.awaken() };
awakened_map.remove_by_index(remove_index)
}
pub fn contains_key1<'a, Q>(&'a self, key1: &Q) -> bool
where
Q: Hash + Equivalent<T::K1<'a>> + ?Sized,
{
self.find1_index(key1).is_some()
}
pub fn get1<'a, Q>(&'a self, key1: &Q) -> Option<&'a T>
where
Q: Hash + Equivalent<T::K1<'a>> + ?Sized,
{
self.find1(key1)
}
pub fn get1_mut<'a, Q>(&'a mut self, key1: &Q) -> Option<RefMut<'a, T, S>>
where
Q: Hash + Equivalent<T::K1<'a>> + ?Sized,
{
let (dormant_map, index) = {
let (map, dormant_map) = DormantMutRef::new(self);
let index = map.find1_index(key1)?;
(dormant_map, index)
};
let awakened_map = unsafe { dormant_map.awaken() };
let item = &mut awakened_map.items[index];
let state = awakened_map.tables.state.clone();
let hashes =
awakened_map.tables.make_hashes::<T>(&item.key1(), &item.key2());
Some(RefMut::new(state, hashes, item))
}
pub fn remove1<'a, Q>(&'a mut self, key1: &Q) -> Option<T>
where
Q: Hash + Equivalent<T::K1<'a>> + ?Sized,
{
let (dormant_map, remove_index) = {
let (map, dormant_map) = DormantMutRef::new(self);
let remove_index = map.find1_index(key1)?;
(dormant_map, remove_index)
};
let awakened_map = unsafe { dormant_map.awaken() };
awakened_map.remove_by_index(remove_index)
}
pub fn contains_key2<'a, Q>(&'a self, key2: &Q) -> bool
where
Q: Hash + Equivalent<T::K2<'a>> + ?Sized,
{
self.find2_index(key2).is_some()
}
pub fn get2<'a, Q>(&'a self, key2: &Q) -> Option<&'a T>
where
Q: Hash + Equivalent<T::K2<'a>> + ?Sized,
{
self.find2(key2)
}
pub fn get2_mut<'a, Q>(&'a mut self, key2: &Q) -> Option<RefMut<'a, T, S>>
where
Q: Hash + Equivalent<T::K2<'a>> + ?Sized,
{
let (dormant_map, index) = {
let (map, dormant_map) = DormantMutRef::new(self);
let index = map.find2_index(key2)?;
(dormant_map, index)
};
let awakened_map = unsafe { dormant_map.awaken() };
let item = &mut awakened_map.items[index];
let state = awakened_map.tables.state.clone();
let hashes =
awakened_map.tables.make_hashes::<T>(&item.key1(), &item.key2());
Some(RefMut::new(state, hashes, item))
}
pub fn remove2<'a, Q>(&'a mut self, key2: &Q) -> Option<T>
where
Q: Hash + Equivalent<T::K2<'a>> + ?Sized,
{
let (dormant_map, remove_index) = {
let (map, dormant_map) = DormantMutRef::new(self);
let remove_index = map.find2_index(key2)?;
(dormant_map, remove_index)
};
let awakened_map = unsafe { dormant_map.awaken() };
awakened_map.remove_by_index(remove_index)
}
pub fn entry<'a>(
&'a mut self,
key1: T::K1<'_>,
key2: T::K2<'_>,
) -> Entry<'a, T, S, A> {
let (map, dormant_map) = DormantMutRef::new(self);
let key1 = T::upcast_key1(key1);
let key2 = T::upcast_key2(key2);
let (index1, index2) = {
let index1: Option<usize> = map.tables.k1_to_item.find_index(
&map.tables.state,
&key1,
|index| map.items[index].key1(),
);
let index2: Option<usize> = map.tables.k2_to_item.find_index(
&map.tables.state,
&key2,
|index| map.items[index].key2(),
);
(index1, index2)
};
match (index1, index2) {
(Some(index1), Some(index2)) if index1 == index2 => {
drop(key1);
Entry::Occupied(
unsafe {
OccupiedEntry::new(
dormant_map,
EntryIndexes::Unique(index1),
)
},
)
}
(None, None) => {
let hashes = map.tables.make_hashes::<T>(&key1, &key2);
Entry::Vacant(
unsafe { VacantEntry::new(dormant_map, hashes) },
)
}
(index1, index2) => Entry::Occupied(
unsafe {
OccupiedEntry::new(
dormant_map,
EntryIndexes::NonUnique { index1, index2 },
)
},
),
}
}
pub fn retain<'a, F>(&'a mut self, mut f: F)
where
F: FnMut(RefMut<'a, T, S>) -> bool,
{
let hash_state = self.tables.state.clone();
let (_, mut dormant_items) = DormantMutRef::new(&mut self.items);
self.tables.k1_to_item.retain(|index| {
let (item, dormant_items) = {
let items = unsafe { dormant_items.reborrow() };
let (items, dormant_items) = DormantMutRef::new(items);
let item: &'a mut T = items
.get_mut(index)
.expect("all indexes are present in self.items");
(item, dormant_items)
};
let (hashes, dormant_item) = {
let (item, dormant_item): (&'a mut T, _) =
DormantMutRef::new(item);
let key1 = T::key1(item);
let key2 = T::key2(item);
let hash1 = hash_state.hash_one(key1);
let hash2 = hash_state.hash_one(key2);
([MapHash::new(hash1), MapHash::new(hash2)], dormant_item)
};
let hash2 = hashes[1].hash();
let retain = {
let item = unsafe { dormant_item.awaken() };
let ref_mut = RefMut::new(hash_state.clone(), hashes, item);
f(ref_mut)
};
if retain {
true
} else {
let items = unsafe { dormant_items.awaken() };
items.remove(index);
let k2_entry = self
.tables
.k2_to_item
.find_entry_by_hash(hash2, |map2_index| {
map2_index == index
});
match k2_entry {
Ok(entry) => {
entry.remove();
}
Err(_) => {
panic!(
"inconsistency between k1_to_item and k2_to_item"
);
}
}
false
}
});
}
fn find1<'a, Q>(&'a self, k: &Q) -> Option<&'a T>
where
Q: Hash + Equivalent<T::K1<'a>> + ?Sized,
{
self.find1_index(k).map(|ix| &self.items[ix])
}
fn find1_index<'a, Q>(&'a self, k: &Q) -> Option<usize>
where
Q: Hash + Equivalent<T::K1<'a>> + ?Sized,
{
self.tables
.k1_to_item
.find_index(&self.tables.state, k, |index| self.items[index].key1())
}
fn find2<'a, Q>(&'a self, k: &Q) -> Option<&'a T>
where
Q: Hash + Equivalent<T::K2<'a>> + ?Sized,
{
self.find2_index(k).map(|ix| &self.items[ix])
}
fn find2_index<'a, Q>(&'a self, k: &Q) -> Option<usize>
where
Q: Hash + Equivalent<T::K2<'a>> + ?Sized,
{
self.tables
.k2_to_item
.find_index(&self.tables.state, k, |index| self.items[index].key2())
}
pub(super) fn get_by_entry_index(
&self,
indexes: EntryIndexes,
) -> OccupiedEntryRef<'_, T> {
match indexes {
EntryIndexes::Unique(index) => OccupiedEntryRef::Unique(
self.items.get(index).expect("index is valid"),
),
EntryIndexes::NonUnique { index1, index2 } => {
let by_key1 = index1
.map(|k| self.items.get(k).expect("key1 index is valid"));
let by_key2 = index2
.map(|k| self.items.get(k).expect("key2 index is valid"));
OccupiedEntryRef::NonUnique { by_key1, by_key2 }
}
}
}
pub(super) fn get_by_entry_index_mut(
&mut self,
indexes: EntryIndexes,
) -> OccupiedEntryMut<'_, T, S> {
match indexes.disjoint_keys() {
DisjointKeys::Unique(index) => {
let item = self.items.get_mut(index).expect("index is valid");
let state = self.tables.state.clone();
let hashes =
self.tables.make_hashes::<T>(&item.key1(), &item.key2());
OccupiedEntryMut::Unique(RefMut::new(state, hashes, item))
}
DisjointKeys::Key1(index1) => {
let item =
self.items.get_mut(index1).expect("key1 index is valid");
let state = self.tables.state.clone();
let hashes =
self.tables.make_hashes::<T>(&item.key1(), &item.key2());
OccupiedEntryMut::NonUnique {
by_key1: Some(RefMut::new(state, hashes, item)),
by_key2: None,
}
}
DisjointKeys::Key2(index2) => {
let item =
self.items.get_mut(index2).expect("key2 index is valid");
let state = self.tables.state.clone();
let hashes =
self.tables.make_hashes::<T>(&item.key1(), &item.key2());
OccupiedEntryMut::NonUnique {
by_key1: None,
by_key2: Some(RefMut::new(state, hashes, item)),
}
}
DisjointKeys::Key12(indexes) => {
let state = self.tables.state.clone();
let mut items = self.items.get_disjoint_mut(indexes);
let item1 = items[0].take().expect("key1 index is valid");
let item2 = items[1].take().expect("key2 index is valid");
let hashes1 =
self.tables.make_hashes::<T>(&item1.key1(), &item1.key2());
let hashes2 =
self.tables.make_hashes::<T>(&item2.key1(), &item2.key2());
OccupiedEntryMut::NonUnique {
by_key1: Some(RefMut::new(state.clone(), hashes1, item1)),
by_key2: Some(RefMut::new(state, hashes2, item2)),
}
}
}
}
pub(super) fn get_by_index_mut(
&mut self,
index: usize,
) -> Option<RefMut<'_, T, S>> {
let borrowed = self.items.get_mut(index)?;
let state = self.tables.state.clone();
let hashes =
self.tables.make_hashes::<T>(&borrowed.key1(), &borrowed.key2());
let item = &mut self.items[index];
Some(RefMut::new(state, hashes, item))
}
pub(super) fn insert_unique_impl(
&mut self,
value: T,
) -> Result<usize, DuplicateItem<T, &T>> {
let mut duplicates = BTreeSet::new();
let state = &self.tables.state;
let (e1, e2) = {
let k1 = value.key1();
let k2 = value.key2();
let e1 = detect_dup_or_insert(
self.tables
.k1_to_item
.entry(state, k1, |index| self.items[index].key1()),
&mut duplicates,
);
let e2 = detect_dup_or_insert(
self.tables
.k2_to_item
.entry(state, k2, |index| self.items[index].key2()),
&mut duplicates,
);
(e1, e2)
};
if !duplicates.is_empty() {
return Err(DuplicateItem::__internal_new(
value,
duplicates.iter().map(|ix| &self.items[*ix]).collect(),
));
}
let next_index = self.items.insert_at_next_index(value);
e1.unwrap().insert(next_index);
e2.unwrap().insert(next_index);
Ok(next_index)
}
pub(super) fn remove_by_entry_index(
&mut self,
indexes: EntryIndexes,
) -> Vec<T> {
match indexes {
EntryIndexes::Unique(index) => {
let old_item =
self.remove_by_index(index).expect("index is valid");
vec![old_item]
}
EntryIndexes::NonUnique { index1, index2 } => {
let mut old_items = Vec::new();
if let Some(index1) = index1 {
old_items.push(
self.remove_by_index(index1).expect("index1 is valid"),
);
}
if let Some(index2) = index2 {
old_items.push(
self.remove_by_index(index2).expect("index2 is valid"),
);
}
old_items
}
}
}
pub(super) fn remove_by_index(&mut self, remove_index: usize) -> Option<T> {
let value = self.items.remove(remove_index)?;
let state = &self.tables.state;
let Ok(item1) =
self.tables.k1_to_item.find_entry(state, &value.key1(), |index| {
if index == remove_index {
value.key1()
} else {
self.items[index].key1()
}
})
else {
panic!("remove_index {remove_index} not found in k1_to_item");
};
let Ok(item2) =
self.tables.k2_to_item.find_entry(state, &value.key2(), |index| {
if index == remove_index {
value.key2()
} else {
self.items[index].key2()
}
})
else {
panic!("remove_index {remove_index} not found in k2_to_item")
};
item1.remove();
item2.remove();
Some(value)
}
pub(super) fn replace_at_indexes(
&mut self,
indexes: EntryIndexes,
value: T,
) -> (usize, Vec<T>) {
match indexes {
EntryIndexes::Unique(index) => {
let old_item = &self.items[index];
if old_item.key1() != value.key1() {
panic!("key1 mismatch");
}
if old_item.key2() != value.key2() {
panic!("key2 mismatch");
}
let old_item = self.items.replace(index, value);
(index, vec![old_item])
}
EntryIndexes::NonUnique { index1, index2 } => {
let mut old_items = Vec::new();
if let Some(index1) = index1 {
let old_item = &self.items[index1];
if old_item.key1() != value.key1() {
panic!("key1 mismatch");
}
old_items.push(self.remove_by_index(index1).unwrap());
}
if let Some(index2) = index2 {
let old_item = &self.items[index2];
if old_item.key2() != value.key2() {
panic!("key2 mismatch");
}
old_items.push(self.remove_by_index(index2).unwrap());
}
let Ok(next_index) = self.insert_unique_impl(value) else {
unreachable!(
"insert_unique cannot fail after removing duplicates"
);
};
(next_index, old_items)
}
}
}
}
impl<'a, T, S, A> fmt::Debug for BiHashMap<T, S, A>
where
T: BiHashItem + fmt::Debug,
T::K1<'a>: fmt::Debug,
T::K2<'a>: fmt::Debug,
T: 'a,
A: Allocator,
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let mut map = f.debug_map();
for item in self.items.values() {
let key: KeyMap<'_, T> =
KeyMap { key1: item.key1(), key2: item.key2() };
let key: KeyMap<'a, T> = unsafe {
core::mem::transmute::<KeyMap<'_, T>, KeyMap<'a, T>>(key)
};
map.entry(&key as &dyn fmt::Debug, item);
}
map.finish()
}
}
struct KeyMap<'a, T: BiHashItem + 'a> {
key1: T::K1<'a>,
key2: T::K2<'a>,
}
impl<'a, T: BiHashItem + 'a> fmt::Debug for KeyMap<'a, T>
where
T::K1<'a>: fmt::Debug,
T::K2<'a>: fmt::Debug,
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_map()
.entry(&StrDisplayAsDebug("k1"), &self.key1)
.entry(&StrDisplayAsDebug("k2"), &self.key2)
.finish()
}
}
impl<T: BiHashItem + PartialEq, S: Clone + BuildHasher, A: Allocator> PartialEq
for BiHashMap<T, S, A>
{
fn eq(&self, other: &Self) -> bool {
if self.items.len() != other.items.len() {
return false;
}
for item in self.items.values() {
let k1 = item.key1();
let k2 = item.key2();
let Some(other_ix1) = other.find1_index(&k1) else {
return false;
};
let Some(other_ix2) = other.find2_index(&k2) else {
return false;
};
if other_ix1 != other_ix2 {
return false;
}
let other_item = &other.items[other_ix1];
if item != other_item {
return false;
}
}
true
}
}
impl<T: BiHashItem + Eq, S: Clone + BuildHasher, A: Allocator> Eq
for BiHashMap<T, S, A>
{
}
fn detect_dup_or_insert<'a, A: Allocator>(
item: hash_table::Entry<'a, usize, AllocWrapper<A>>,
duplicates: &mut BTreeSet<usize>,
) -> Option<hash_table::VacantEntry<'a, usize, AllocWrapper<A>>> {
match item {
hash_table::Entry::Vacant(slot) => Some(slot),
hash_table::Entry::Occupied(slot) => {
duplicates.insert(*slot.get());
None
}
}
}
impl<T: BiHashItem, S: Clone + BuildHasher, A: Allocator> Extend<T>
for BiHashMap<T, S, A>
{
fn extend<I: IntoIterator<Item = T>>(&mut self, iter: I) {
let iter = iter.into_iter();
let reserve = if self.is_empty() {
iter.size_hint().0
} else {
iter.size_hint().0.div_ceil(2)
};
self.reserve(reserve);
for item in iter {
self.insert_overwrite(item);
}
}
}
impl<'a, T: BiHashItem, S: Clone + BuildHasher, A: Allocator> IntoIterator
for &'a BiHashMap<T, S, A>
{
type Item = &'a T;
type IntoIter = Iter<'a, T>;
#[inline]
fn into_iter(self) -> Self::IntoIter {
self.iter()
}
}
impl<'a, T: BiHashItem, S: Clone + BuildHasher, A: Allocator> IntoIterator
for &'a mut BiHashMap<T, S, A>
{
type Item = RefMut<'a, T, S>;
type IntoIter = IterMut<'a, T, S, A>;
#[inline]
fn into_iter(self) -> Self::IntoIter {
self.iter_mut()
}
}
impl<T: BiHashItem, S: Clone + BuildHasher, A: Allocator> IntoIterator
for BiHashMap<T, S, A>
{
type Item = T;
type IntoIter = IntoIter<T, A>;
#[inline]
fn into_iter(self) -> Self::IntoIter {
IntoIter::new(self.items)
}
}
impl<T: BiHashItem, S: Clone + BuildHasher + Default, A: Default + Allocator>
FromIterator<T> for BiHashMap<T, S, A>
{
fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> Self {
let mut map = BiHashMap::default();
for item in iter {
map.insert_overwrite(item);
}
map
}
}