use std::collections::hash_map::RandomState;
use std::fmt::{self, Debug};
use std::hash::{BuildHasher, Hash};
use std::iter::FusedIterator;
use std::ops::{Deref, RangeInclusive};
use std::panic::UnwindSafe;
use std::ptr;
use std::sync::atomic::AtomicUsize;
use std::sync::atomic::Ordering::{Acquire, Relaxed};
use sdd::{AtomicShared, Guard, Shared};
use super::Equivalent;
use super::hash_table::bucket::{Bucket, EntryPtr, INDEX};
use super::hash_table::bucket_array::BucketArray;
use super::hash_table::{HashTable, LockedEntry};
use crate::async_helper::SendableGuard;
pub struct HashIndex<K, V, H = RandomState>
where
H: BuildHasher,
{
bucket_array: AtomicShared<BucketArray<K, V, (), INDEX>>,
minimum_capacity: AtomicUsize,
build_hasher: H,
}
pub enum Entry<'h, K, V, H = RandomState>
where
H: BuildHasher,
{
Occupied(OccupiedEntry<'h, K, V, H>),
Vacant(VacantEntry<'h, K, V, H>),
}
pub struct OccupiedEntry<'h, K, V, H = RandomState>
where
H: BuildHasher,
{
hashindex: &'h HashIndex<K, V, H>,
locked_entry: LockedEntry<'h, K, V, (), INDEX>,
}
pub struct VacantEntry<'h, K, V, H = RandomState>
where
H: BuildHasher,
{
hashindex: &'h HashIndex<K, V, H>,
key: K,
hash: u64,
locked_entry: LockedEntry<'h, K, V, (), INDEX>,
}
pub struct Reserve<'h, K, V, H = RandomState>
where
K: 'static + Eq + Hash,
V: 'static,
H: BuildHasher,
{
hashindex: &'h HashIndex<K, V, H>,
additional: usize,
}
pub struct Iter<'h, 'g, K, V, H = RandomState>
where
H: BuildHasher,
{
hashindex: &'h HashIndex<K, V, H>,
bucket_array: Option<&'g BucketArray<K, V, (), INDEX>>,
index: usize,
bucket: Option<&'g Bucket<K, V, (), INDEX>>,
entry_ptr: EntryPtr<'g, K, V, INDEX>,
guard: &'g Guard,
}
impl<K, V, H> HashIndex<K, V, H>
where
H: BuildHasher,
{
#[cfg(not(feature = "loom"))]
#[inline]
pub const fn with_hasher(build_hasher: H) -> Self {
Self {
bucket_array: AtomicShared::null(),
minimum_capacity: AtomicUsize::new(0),
build_hasher,
}
}
#[cfg(feature = "loom")]
#[inline]
pub fn with_hasher(build_hasher: H) -> Self {
Self {
bucket_array: AtomicShared::null(),
minimum_capacity: AtomicUsize::new(0),
build_hasher,
}
}
#[inline]
pub fn with_capacity_and_hasher(capacity: usize, build_hasher: H) -> Self {
let (array, minimum_capacity) = if capacity == 0 {
(AtomicShared::null(), AtomicUsize::new(0))
} else {
let array = unsafe {
Shared::new_unchecked(BucketArray::<K, V, (), INDEX>::new(
capacity,
AtomicShared::null(),
))
};
let minimum_capacity = array.num_slots();
(
AtomicShared::from(array),
AtomicUsize::new(minimum_capacity),
)
};
Self {
bucket_array: array,
minimum_capacity,
build_hasher,
}
}
}
impl<K, V, H> HashIndex<K, V, H>
where
K: 'static + Eq + Hash,
V: 'static,
H: BuildHasher,
{
#[inline]
pub fn reserve(&self, additional_capacity: usize) -> Option<Reserve<'_, K, V, H>> {
let additional = self.reserve_capacity(additional_capacity);
if additional == 0 {
None
} else {
Some(Reserve {
hashindex: self,
additional,
})
}
}
#[inline]
pub async fn entry_async(&self, key: K) -> Entry<'_, K, V, H> {
let hash = self.hash(&key);
let sendable_guard = SendableGuard::default();
self.writer_async(hash, &sendable_guard, |writer, data_block, index, len| {
let guard = sendable_guard.guard();
let entry_ptr = writer.get_entry_ptr(data_block, &key, hash, guard);
let locked_entry =
LockedEntry::new(writer, data_block, entry_ptr.clone(), index, len, guard)
.prolong_lifetime(self);
if entry_ptr.is_valid() {
Entry::Occupied(OccupiedEntry {
hashindex: self,
locked_entry,
})
} else {
let vacant_entry = VacantEntry {
hashindex: self,
key,
hash,
locked_entry,
};
Entry::Vacant(vacant_entry)
}
})
.await
}
#[inline]
pub fn entry_sync(&self, key: K) -> Entry<'_, K, V, H> {
let hash = self.hash(&key);
let guard = Guard::new();
self.writer_sync(hash, &guard, |writer, data_block, index, len| {
let entry_ptr = writer.get_entry_ptr(data_block, &key, hash, &guard);
let locked_entry =
LockedEntry::new(writer, data_block, entry_ptr.clone(), index, len, &guard)
.prolong_lifetime(self);
if entry_ptr.is_valid() {
Entry::Occupied(OccupiedEntry {
hashindex: self,
locked_entry,
})
} else {
let vacant_entry = VacantEntry {
hashindex: self,
key,
hash,
locked_entry,
};
Entry::Vacant(vacant_entry)
}
})
}
#[inline]
pub fn try_entry(&self, key: K) -> Option<Entry<'_, K, V, H>> {
let guard = Guard::new();
let hash = self.hash(&key);
let locked_entry = self.try_reserve_entry(&key, hash, self.prolonged_guard_ref(&guard))?;
if locked_entry.entry_ptr.is_valid() {
Some(Entry::Occupied(OccupiedEntry {
hashindex: self,
locked_entry,
}))
} else {
Some(Entry::Vacant(VacantEntry {
hashindex: self,
key,
hash,
locked_entry,
}))
}
}
#[inline]
pub async fn begin_async(&self) -> Option<OccupiedEntry<'_, K, V, H>> {
self.any_async(|_, _| true).await
}
#[inline]
pub fn begin_sync(&self) -> Option<OccupiedEntry<'_, K, V, H>> {
self.any_sync(|_, _| true)
}
#[inline]
pub async fn any_async<P: FnMut(&K, &V) -> bool>(
&self,
mut pred: P,
) -> Option<OccupiedEntry<'_, K, V, H>> {
let mut entry = None;
let sendable_guard = SendableGuard::default();
self.for_each_writer_async(0, 0, &sendable_guard, |writer, data_block, index, len| {
let guard = sendable_guard.guard();
let mut entry_ptr = EntryPtr::new(guard);
while entry_ptr.move_to_next(&writer, guard) {
let (k, v) = entry_ptr.get(data_block);
if pred(k, v) {
let locked_entry =
LockedEntry::new(writer, data_block, entry_ptr, index, len, guard)
.prolong_lifetime(self);
entry = Some(OccupiedEntry {
hashindex: self,
locked_entry,
});
return (true, false);
}
}
(false, false)
})
.await;
entry
}
#[inline]
pub fn any_sync<P: FnMut(&K, &V) -> bool>(
&self,
mut pred: P,
) -> Option<OccupiedEntry<'_, K, V, H>> {
let mut entry = None;
let guard = Guard::new();
self.for_each_writer_sync(0, 0, &guard, |writer, data_block, index, len| {
let mut entry_ptr = EntryPtr::new(&guard);
while entry_ptr.move_to_next(&writer, &guard) {
let (k, v) = entry_ptr.get(data_block);
if pred(k, v) {
let locked_entry =
LockedEntry::new(writer, data_block, entry_ptr, index, len, &guard)
.prolong_lifetime(self);
entry = Some(OccupiedEntry {
hashindex: self,
locked_entry,
});
return (true, false);
}
}
(false, false)
});
entry
}
#[inline]
pub async fn insert_async(&self, key: K, val: V) -> Result<(), (K, V)> {
let hash = self.hash(&key);
let sendable_guard = SendableGuard::default();
self.writer_async(hash, &sendable_guard, |writer, data_block, _, _| {
let guard = sendable_guard.guard();
let partial_hash = hash;
if writer
.get_entry_ptr(data_block, &key, partial_hash, guard)
.is_valid()
{
Err((key, val))
} else {
writer.insert_with(data_block, partial_hash, || (key, val), guard);
Ok(())
}
})
.await
}
#[inline]
pub fn insert_sync(&self, key: K, val: V) -> Result<(), (K, V)> {
let hash = self.hash(&key);
let guard = Guard::new();
self.writer_sync(hash, &guard, |writer, data_block, _, _| {
let partial_hash = hash;
if writer
.get_entry_ptr(data_block, &key, partial_hash, &guard)
.is_valid()
{
Err((key, val))
} else {
writer.insert_with(data_block, partial_hash, || (key, val), &guard);
Ok(())
}
})
}
#[inline]
pub async fn remove_async<Q>(&self, key: &Q) -> bool
where
Q: Equivalent<K> + Hash + ?Sized,
{
self.remove_if_async(key, |_| true).await
}
#[inline]
pub fn remove_sync<Q>(&self, key: &Q) -> bool
where
Q: Equivalent<K> + Hash + ?Sized,
{
self.remove_if_sync(key, |_| true)
}
#[inline]
pub async fn remove_if_async<Q, F: FnOnce(&V) -> bool>(&self, key: &Q, condition: F) -> bool
where
Q: Equivalent<K> + Hash + ?Sized,
{
let hash = self.hash(key);
let sendable_guard = SendableGuard::default();
self.optional_writer_async(hash, &sendable_guard, |writer, data_block, _, _| {
let mut entry_ptr = writer.get_entry_ptr(data_block, key, hash, sendable_guard.guard());
if entry_ptr.is_valid() && condition(&mut entry_ptr.get_mut(data_block, &writer).1) {
writer.mark_removed(&mut entry_ptr, sendable_guard.guard());
(true, writer.need_rebuild())
} else {
(false, false)
}
})
.await
.ok()
.is_some_and(|removed| removed)
}
#[inline]
pub fn remove_if_sync<Q, F: FnOnce(&V) -> bool>(&self, key: &Q, condition: F) -> bool
where
Q: Equivalent<K> + Hash + ?Sized,
{
let hash = self.hash(key);
let guard = Guard::default();
self.optional_writer_sync(hash, &guard, |writer, data_block, _, _| {
let mut entry_ptr = writer.get_entry_ptr(data_block, key, hash, &guard);
if entry_ptr.is_valid() && condition(&mut entry_ptr.get_mut(data_block, &writer).1) {
writer.mark_removed(&mut entry_ptr, &guard);
(true, writer.need_rebuild())
} else {
(false, false)
}
})
.ok()
.is_some_and(|removed| removed)
}
#[inline]
pub async fn get_async<Q>(&self, key: &Q) -> Option<OccupiedEntry<'_, K, V, H>>
where
Q: Equivalent<K> + Hash + ?Sized,
{
let hash = self.hash(key);
let sendable_guard = SendableGuard::default();
self.optional_writer_async(hash, &sendable_guard, |writer, data_block, index, len| {
let guard = sendable_guard.guard();
let entry_ptr = writer.get_entry_ptr(data_block, key, hash, guard);
if entry_ptr.is_valid() {
let locked_entry =
LockedEntry::new(writer, data_block, entry_ptr, index, len, guard)
.prolong_lifetime(self);
return (
Some(OccupiedEntry {
hashindex: self,
locked_entry,
}),
false,
);
}
(None, false)
})
.await
.ok()
.flatten()
}
#[inline]
pub fn get_sync<Q>(&self, key: &Q) -> Option<OccupiedEntry<'_, K, V, H>>
where
Q: Equivalent<K> + Hash + ?Sized,
{
let hash = self.hash(key);
let guard = Guard::default();
self.optional_writer_sync(hash, &guard, |writer, data_block, index, len| {
let entry_ptr = writer.get_entry_ptr(data_block, key, hash, &guard);
if entry_ptr.is_valid() {
let locked_entry =
LockedEntry::new(writer, data_block, entry_ptr, index, len, &guard)
.prolong_lifetime(self);
return (
Some(OccupiedEntry {
hashindex: self,
locked_entry,
}),
false,
);
}
(None, false)
})
.ok()
.flatten()
}
#[inline]
pub fn peek<'g, Q>(&self, key: &Q, guard: &'g Guard) -> Option<&'g V>
where
Q: Equivalent<K> + Hash + ?Sized,
{
self.peek_entry(key, self.hash(key), guard).map(|(_, v)| v)
}
#[inline]
pub fn peek_with<Q, R, F: FnOnce(&K, &V) -> R>(&self, key: &Q, reader: F) -> Option<R>
where
Q: Equivalent<K> + Hash + ?Sized,
{
let guard = Guard::new();
self.peek_entry(key, self.hash(key), &guard)
.map(|(k, v)| reader(k, v))
}
#[inline]
pub fn contains<Q>(&self, key: &Q) -> bool
where
Q: Equivalent<K> + Hash + ?Sized,
{
self.peek_with(key, |_, _| ()).is_some()
}
#[inline]
pub async fn iter_async<F: FnMut(&K, &V) -> bool>(&self, mut f: F) -> bool {
let mut result = true;
let sendable_guard = SendableGuard::default();
self.for_each_reader_async(&sendable_guard, |reader, data_block| {
let guard = sendable_guard.guard();
let mut entry_ptr = EntryPtr::new(guard);
while entry_ptr.move_to_next(&reader, guard) {
let (k, v) = entry_ptr.get(data_block);
if !f(k, v) {
result = false;
return false;
}
}
true
})
.await;
result
}
#[inline]
pub fn iter_sync<F: FnMut(&K, &V) -> bool>(&self, mut f: F) -> bool {
let mut result = true;
let guard = Guard::new();
self.for_each_reader_sync(&guard, |reader, data_block| {
let mut entry_ptr = EntryPtr::new(&guard);
while entry_ptr.move_to_next(&reader, &guard) {
let (k, v) = entry_ptr.get(data_block);
if !f(k, v) {
result = false;
return false;
}
}
true
});
result
}
#[inline]
pub async fn retain_async<F: FnMut(&K, &V) -> bool>(&self, mut pred: F) {
let sendable_guard = SendableGuard::default();
self.for_each_writer_async(0, 0, &sendable_guard, |writer, data_block, _, _| {
let mut removed = false;
let guard = sendable_guard.guard();
let mut entry_ptr = EntryPtr::new(guard);
while entry_ptr.move_to_next(&writer, guard) {
let (k, v) = entry_ptr.get_mut(data_block, &writer);
if !pred(k, v) {
writer.mark_removed(&mut entry_ptr, guard);
removed = true;
}
}
(false, removed)
})
.await;
}
#[inline]
pub fn retain_sync<F: FnMut(&K, &V) -> bool>(&self, mut pred: F) {
let guard = Guard::new();
self.for_each_writer_sync(0, 0, &guard, |writer, data_block, _, _| {
let mut removed = false;
let mut entry_ptr = EntryPtr::new(&guard);
while entry_ptr.move_to_next(&writer, &guard) {
let (k, v) = entry_ptr.get_mut(data_block, &writer);
if !pred(k, v) {
writer.mark_removed(&mut entry_ptr, &guard);
removed = true;
}
}
(false, removed)
});
}
pub async fn clear_async(&self) {
self.retain_async(|_, _| false).await;
}
pub fn clear_sync(&self) {
self.retain_sync(|_, _| false);
}
#[inline]
pub fn len(&self) -> usize {
self.num_entries(&Guard::new())
}
#[inline]
pub fn is_empty(&self) -> bool {
!self.has_entry(&Guard::new())
}
#[inline]
pub fn capacity(&self) -> usize {
self.num_slots(&Guard::new())
}
#[inline]
pub fn capacity_range(&self) -> RangeInclusive<usize> {
self.minimum_capacity.load(Relaxed)..=self.maximum_capacity()
}
#[inline]
pub fn bucket_index<Q>(&self, key: &Q) -> usize
where
Q: Equivalent<K> + Hash + ?Sized,
{
self.calculate_bucket_index(key)
}
#[inline]
pub fn iter<'h, 'g>(&'h self, guard: &'g Guard) -> Iter<'h, 'g, K, V, H> {
Iter {
hashindex: self,
bucket_array: None,
index: 0,
bucket: None,
entry_ptr: EntryPtr::new(guard),
guard,
}
}
}
impl<K, V, H> Clone for HashIndex<K, V, H>
where
K: 'static + Clone + Eq + Hash,
V: 'static + Clone,
H: BuildHasher + Clone,
{
#[inline]
fn clone(&self) -> Self {
let self_clone = Self::with_capacity_and_hasher(self.capacity(), self.hasher().clone());
for (k, v) in self.iter(&Guard::new()) {
let _result = self_clone.insert_sync(k.clone(), v.clone());
}
self_clone
}
}
impl<K, V, H> Debug for HashIndex<K, V, H>
where
K: 'static + Debug + Eq + Hash,
V: 'static + Debug,
H: BuildHasher,
{
#[inline]
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let guard = Guard::new();
f.debug_map().entries(self.iter(&guard)).finish()
}
}
impl<K, V> HashIndex<K, V, RandomState>
where
K: 'static + Eq + Hash,
V: 'static,
{
#[inline]
#[must_use]
pub fn new() -> Self {
Self::default()
}
#[inline]
#[must_use]
pub fn with_capacity(capacity: usize) -> Self {
Self::with_capacity_and_hasher(capacity, RandomState::new())
}
}
impl<K, V, H> Default for HashIndex<K, V, H>
where
K: 'static,
V: 'static,
H: BuildHasher + Default,
{
#[inline]
fn default() -> Self {
Self::with_hasher(H::default())
}
}
impl<K, V, H> FromIterator<(K, V)> for HashIndex<K, V, H>
where
K: 'static + Eq + Hash,
V: 'static,
H: BuildHasher + Default,
{
#[inline]
fn from_iter<T: IntoIterator<Item = (K, V)>>(iter: T) -> Self {
let into_iter = iter.into_iter();
let hashindex = Self::with_capacity_and_hasher(
Self::capacity_from_size_hint(into_iter.size_hint()),
H::default(),
);
into_iter.for_each(|e| {
let _result = hashindex.insert_sync(e.0, e.1);
});
hashindex
}
}
impl<K, V, H> HashTable<K, V, H, (), INDEX> for HashIndex<K, V, H>
where
K: 'static + Eq + Hash,
V: 'static,
H: BuildHasher,
{
#[inline]
fn hasher(&self) -> &H {
&self.build_hasher
}
#[inline]
fn bucket_array(&self) -> &AtomicShared<BucketArray<K, V, (), INDEX>> {
&self.bucket_array
}
#[inline]
fn minimum_capacity(&self) -> &AtomicUsize {
&self.minimum_capacity
}
#[inline]
fn maximum_capacity(&self) -> usize {
1_usize << (usize::BITS - 1)
}
}
impl<K, V, H> PartialEq for HashIndex<K, V, H>
where
K: 'static + Eq + Hash,
V: 'static + PartialEq,
H: BuildHasher,
{
#[inline]
fn eq(&self, other: &Self) -> bool {
let guard = Guard::new();
if !self
.iter(&guard)
.any(|(k, v)| other.peek_with(k, |_, ov| v == ov) != Some(true))
{
return !other
.iter(&guard)
.any(|(k, v)| self.peek_with(k, |_, sv| v == sv) != Some(true));
}
false
}
}
impl<'h, K, V, H> Entry<'h, K, V, H>
where
K: 'static + Eq + Hash,
V: 'static,
H: BuildHasher,
{
#[inline]
pub fn or_insert(self, val: V) -> OccupiedEntry<'h, K, V, H> {
self.or_insert_with(|| val)
}
#[inline]
pub fn or_insert_with<F: FnOnce() -> V>(self, constructor: F) -> OccupiedEntry<'h, K, V, H> {
self.or_insert_with_key(|_| constructor())
}
#[inline]
pub fn or_insert_with_key<F: FnOnce(&K) -> V>(
self,
constructor: F,
) -> OccupiedEntry<'h, K, V, H> {
match self {
Self::Occupied(o) => o,
Self::Vacant(v) => {
let val = constructor(v.key());
v.insert_entry(val)
}
}
}
#[inline]
pub fn key(&self) -> &K {
match self {
Self::Occupied(o) => o.key(),
Self::Vacant(v) => v.key(),
}
}
#[inline]
#[must_use]
pub unsafe fn and_modify<F>(self, f: F) -> Self
where
F: FnOnce(&mut V),
{
unsafe {
match self {
Self::Occupied(mut o) => {
f(o.get_mut());
Self::Occupied(o)
}
Self::Vacant(_) => self,
}
}
}
}
impl<'h, K, V, H> Entry<'h, K, V, H>
where
K: 'static + Eq + Hash,
V: 'static + Default,
H: BuildHasher,
{
#[inline]
pub fn or_default(self) -> OccupiedEntry<'h, K, V, H> {
match self {
Self::Occupied(o) => o,
Self::Vacant(v) => v.insert_entry(Default::default()),
}
}
}
impl<K, V, H> Debug for Entry<'_, K, V, H>
where
K: 'static + Debug + Eq + Hash,
V: 'static + Debug,
H: BuildHasher,
{
#[inline]
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::Vacant(v) => f.debug_tuple("Entry").field(v).finish(),
Self::Occupied(o) => f.debug_tuple("Entry").field(o).finish(),
}
}
}
impl<'h, K, V, H> OccupiedEntry<'h, K, V, H>
where
K: 'static + Eq + Hash,
V: 'static,
H: BuildHasher,
{
#[inline]
#[must_use]
pub fn key(&self) -> &K {
&self
.locked_entry
.entry_ptr
.get(self.locked_entry.data_block)
.0
}
#[inline]
pub fn remove_entry(mut self) {
let guard = Guard::new();
self.locked_entry.writer.mark_removed(
&mut self.locked_entry.entry_ptr,
self.hashindex.prolonged_guard_ref(&guard),
);
let hashindex = self.hashindex;
let index = self.locked_entry.index;
drop(self);
hashindex.entry_removed(index, &guard);
}
#[inline]
#[must_use]
pub fn get(&self) -> &V {
&self
.locked_entry
.entry_ptr
.get(self.locked_entry.data_block)
.1
}
#[inline]
pub unsafe fn get_mut(&mut self) -> &mut V {
&mut self
.locked_entry
.entry_ptr
.get_mut(self.locked_entry.data_block, &self.locked_entry.writer)
.1
}
#[inline]
pub async fn remove_and_async(mut self) -> Option<OccupiedEntry<'h, K, V, H>> {
let guard = Guard::new();
self.locked_entry.writer.mark_removed(
&mut self.locked_entry.entry_ptr,
self.hashindex.prolonged_guard_ref(&guard),
);
let hashindex = self.hashindex;
if let Some(locked_entry) = self.locked_entry.next_async(hashindex).await {
return Some(OccupiedEntry {
hashindex,
locked_entry,
});
}
None
}
#[inline]
#[must_use]
pub fn remove_and_sync(mut self) -> Option<Self> {
let guard = Guard::new();
self.locked_entry.writer.mark_removed(
&mut self.locked_entry.entry_ptr,
self.hashindex.prolonged_guard_ref(&guard),
);
let hashindex = self.hashindex;
if let Some(locked_entry) = self.locked_entry.next_sync(hashindex) {
return Some(OccupiedEntry {
hashindex,
locked_entry,
});
}
None
}
#[inline]
pub async fn next_async(self) -> Option<OccupiedEntry<'h, K, V, H>> {
let hashindex = self.hashindex;
if let Some(locked_entry) = self.locked_entry.next_async(hashindex).await {
return Some(OccupiedEntry {
hashindex,
locked_entry,
});
}
None
}
#[inline]
#[must_use]
pub fn next_sync(self) -> Option<Self> {
let hashindex = self.hashindex;
if let Some(locked_entry) = self.locked_entry.next_sync(hashindex) {
return Some(OccupiedEntry {
hashindex,
locked_entry,
});
}
None
}
}
impl<K, V, H> OccupiedEntry<'_, K, V, H>
where
K: 'static + Clone + Eq + Hash,
V: 'static,
H: BuildHasher,
{
#[inline]
pub fn update(mut self, val: V) {
let key = self.key().clone();
let partial_hash = self
.locked_entry
.entry_ptr
.partial_hash(&self.locked_entry.writer);
let guard = Guard::new();
self.locked_entry.writer.insert_with(
self.locked_entry.data_block,
u64::from(partial_hash),
|| (key, val),
self.hashindex.prolonged_guard_ref(&guard),
);
self.locked_entry.writer.mark_removed(
&mut self.locked_entry.entry_ptr,
self.hashindex.prolonged_guard_ref(&guard),
);
}
}
impl<K, V, H> Debug for OccupiedEntry<'_, K, V, H>
where
K: 'static + Debug + Eq + Hash,
V: 'static + Debug,
H: BuildHasher,
{
#[inline]
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("OccupiedEntry")
.field("key", self.key())
.field("value", self.get())
.finish_non_exhaustive()
}
}
impl<K, V, H> Deref for OccupiedEntry<'_, K, V, H>
where
K: 'static + Debug + Eq + Hash,
V: 'static + Debug,
H: BuildHasher,
{
type Target = V;
#[inline]
fn deref(&self) -> &Self::Target {
self.get()
}
}
impl<'h, K, V, H> VacantEntry<'h, K, V, H>
where
K: 'static + Eq + Hash,
V: 'static,
H: BuildHasher,
{
#[inline]
pub fn key(&self) -> &K {
&self.key
}
#[inline]
pub fn into_key(self) -> K {
self.key
}
#[inline]
pub fn insert_entry(self, val: V) -> OccupiedEntry<'h, K, V, H> {
let guard = Guard::new();
let entry_ptr = self.locked_entry.writer.insert_with(
self.locked_entry.data_block,
self.hash,
|| (self.key, val),
self.hashindex.prolonged_guard_ref(&guard),
);
OccupiedEntry {
hashindex: self.hashindex,
locked_entry: LockedEntry {
index: self.locked_entry.index,
data_block: self.locked_entry.data_block,
writer: self.locked_entry.writer,
entry_ptr,
len: 0,
},
}
}
}
impl<K, V, H> Debug for VacantEntry<'_, K, V, H>
where
K: 'static + Debug + Eq + Hash,
V: 'static + Debug,
H: BuildHasher,
{
#[inline]
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_tuple("VacantEntry").field(self.key()).finish()
}
}
impl<K, V, H> Reserve<'_, K, V, H>
where
K: 'static + Eq + Hash,
V: 'static,
H: BuildHasher,
{
#[inline]
#[must_use]
pub fn additional_capacity(&self) -> usize {
self.additional
}
}
impl<K, V, H> AsRef<HashIndex<K, V, H>> for Reserve<'_, K, V, H>
where
K: 'static + Eq + Hash,
V: 'static,
H: BuildHasher,
{
#[inline]
fn as_ref(&self) -> &HashIndex<K, V, H> {
self.hashindex
}
}
impl<K, V, H> Debug for Reserve<'_, K, V, H>
where
K: 'static + Eq + Hash,
V: 'static,
H: BuildHasher,
{
#[inline]
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_tuple("Reserve").field(&self.additional).finish()
}
}
impl<K, V, H> Deref for Reserve<'_, K, V, H>
where
K: 'static + Eq + Hash,
V: 'static,
H: BuildHasher,
{
type Target = HashIndex<K, V, H>;
#[inline]
fn deref(&self) -> &Self::Target {
self.hashindex
}
}
impl<K, V, H> Drop for Reserve<'_, K, V, H>
where
K: 'static + Eq + Hash,
V: 'static,
H: BuildHasher,
{
#[inline]
fn drop(&mut self) {
let result = self
.hashindex
.minimum_capacity
.fetch_sub(self.additional, Relaxed);
debug_assert!(result >= self.additional);
let guard = Guard::new();
if let Some(current_array) = self.hashindex.bucket_array.load(Acquire, &guard).as_ref() {
self.try_shrink_or_rebuild(current_array, 0, &guard);
}
}
}
impl<K, V, H> Debug for Iter<'_, '_, K, V, H>
where
K: 'static + Eq + Hash,
V: 'static,
H: BuildHasher,
{
#[inline]
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("Iter")
.field("current_index", &self.index)
.field("current_entry_ptr", &self.entry_ptr)
.finish()
}
}
impl<'g, K, V, H> Iterator for Iter<'_, 'g, K, V, H>
where
K: 'static + Eq + Hash,
V: 'static,
H: BuildHasher,
{
type Item = (&'g K, &'g V);
#[inline]
fn next(&mut self) -> Option<Self::Item> {
let mut array = if let Some(&array) = self.bucket_array.as_ref() {
array
} else {
let current_array = self
.hashindex
.bucket_array()
.load(Acquire, self.guard)
.as_ref()?;
let old_array_ptr = current_array.old_array(self.guard);
let array = if let Some(old_array) = old_array_ptr.as_ref() {
old_array
} else {
current_array
};
self.bucket_array.replace(array);
self.bucket.replace(array.bucket(0));
array
};
loop {
if let Some(bucket) = self.bucket.take() {
if bucket.len() != 0 && self.entry_ptr.move_to_next(bucket, self.guard) {
let (k, v) = self.entry_ptr.get(array.data_block(self.index));
self.bucket.replace(bucket);
return Some((k, v));
}
}
self.entry_ptr = EntryPtr::new(self.guard);
if self.index + 1 == array.len() {
self.index = 0;
let current_array = self
.hashindex
.bucket_array()
.load(Acquire, self.guard)
.as_ref()?;
if self
.bucket_array
.as_ref()
.is_some_and(|&a| ptr::eq(a, current_array))
{
break;
}
array = if let Some(old_array) = current_array.old_array(self.guard).as_ref() {
if self
.bucket_array
.as_ref()
.is_some_and(|&a| ptr::eq(a, old_array))
{
array = current_array;
self.bucket_array.replace(current_array);
self.bucket.replace(current_array.bucket(0));
continue;
}
old_array
} else {
current_array
};
self.bucket_array.replace(array);
self.bucket.replace(array.bucket(0));
} else {
self.index += 1;
self.bucket.replace(array.bucket(self.index));
}
}
None
}
}
impl<K, V, H> FusedIterator for Iter<'_, '_, K, V, H>
where
K: 'static + Eq + Hash,
V: 'static,
H: BuildHasher,
{
}
impl<K, V, H> UnwindSafe for Iter<'_, '_, K, V, H>
where
K: 'static + Eq + Hash + UnwindSafe,
V: 'static + UnwindSafe,
H: BuildHasher + UnwindSafe,
{
}