#![forbid(
missing_docs,
unsafe_op_in_unsafe_fn,
clippy::missing_safety_doc,
clippy::multiple_unsafe_ops_per_block,
clippy::undocumented_unsafe_blocks
)]
mod lock;
use crossbeam_utils::CachePadded;
use hashbrown::{HashTable, hash_table};
use lock::{RwLock, RwLockReadGuardDetached, RwLockWriteGuardDetached};
use std::ops::Deref;
use std::sync::LazyLock;
pub struct DashTable<T> {
shift: u32,
shards: Box<[CachePadded<RwLock<HashTable<T>>>]>,
}
fn default_shard_shift() -> u32 {
static DEFAULT_SHARD_SHIFT: LazyLock<u32> = LazyLock::new(|| {
(std::thread::available_parallelism().map_or(1, usize::from) * 4)
.next_power_of_two()
.ilog2()
});
*DEFAULT_SHARD_SHIFT
}
impl<T> Default for DashTable<T> {
fn default() -> Self {
Self::new()
}
}
impl<T> DashTable<T> {
pub fn new() -> Self {
let shard_shift = default_shard_shift();
assert!(shard_shift > 0);
let shard_amount = 1 << shard_shift;
let shift = usize::BITS - shard_shift;
let shards = (0..shard_amount)
.map(|_| CachePadded::new(RwLock::new(HashTable::new())))
.collect();
Self { shift, shards }
}
pub fn with_capacity(capacity: usize) -> Self {
let shard_shift = default_shard_shift();
assert!(shard_shift > 0);
let shard_amount = 1 << shard_shift;
let shift = usize::BITS - shard_shift;
let cps = (capacity + (shard_amount - 1)) >> shard_shift;
let shards = (0..shard_amount)
.map(|_| CachePadded::new(RwLock::new(HashTable::with_capacity(cps))))
.collect();
Self { shift, shards }
}
pub fn entry<'a>(
&'a self,
hash: u64,
eq: impl FnMut(&T) -> bool,
hasher: impl Fn(&T) -> u64,
) -> Entry<'a, T> {
let shard = self.determine_shard(hash as usize);
let guard = self.shards[shard].write();
let (_guard, shard) = unsafe { RwLockWriteGuardDetached::detach_from(guard) };
match shard.entry(hash, eq, hasher) {
hash_table::Entry::Occupied(entry) => Entry::Occupied(OccupiedEntry { _guard, entry }),
hash_table::Entry::Vacant(entry) => Entry::Vacant(VacantEntry { _guard, entry }),
}
}
pub fn entry_mut<'a>(
&'a mut self,
hash: u64,
eq: impl FnMut(&T) -> bool,
hasher: impl Fn(&T) -> u64,
) -> MutEntry<'a, T> {
let shard = self.determine_shard(hash as usize);
let table = self.shards[shard].get_mut();
match table.entry(hash, eq, hasher) {
hash_table::Entry::Occupied(entry) => MutEntry::Occupied(MutOccupiedEntry(entry)),
hash_table::Entry::Vacant(entry) => MutEntry::Vacant(MutVacantEntry(entry)),
}
}
pub fn find<'a>(&'a self, hash: u64, eq: impl FnMut(&T) -> bool) -> Option<Ref<'a, T>> {
let shard = self.determine_shard(hash as usize);
let guard = self.shards[shard].read();
let (_guard, shard) = unsafe { RwLockReadGuardDetached::detach_from(guard) };
shard
.find(hash, eq)
.map(|reference| Ref { _guard, reference })
}
pub fn insert_unique<'a>(
&'a self,
hash: u64,
value: T,
hasher: impl Fn(&T) -> u64,
) -> OccupiedEntry<'a, T> {
let shard = self.determine_shard(hash as usize);
let guard = self.shards[shard].write();
let (_guard, shard) = unsafe { RwLockWriteGuardDetached::detach_from(guard) };
let entry = shard.insert_unique(hash, value, hasher);
OccupiedEntry { _guard, entry }
}
pub fn insert_unique_mut<'a>(
&'a mut self,
hash: u64,
value: T,
hasher: impl Fn(&T) -> u64,
) -> MutOccupiedEntry<'a, T> {
let shard = self.determine_shard(hash as usize);
let table = self.shards[shard].get_mut();
let entry = table.insert_unique(hash, value, hasher);
MutOccupiedEntry(entry)
}
fn determine_shard(&self, hash: usize) -> usize {
(hash << 7) >> self.shift
}
}
pub struct Ref<'a, T> {
_guard: RwLockReadGuardDetached<'a>,
reference: &'a T,
}
impl<'a, T> Deref for Ref<'a, T> {
type Target = T;
fn deref(&self) -> &Self::Target {
self.reference
}
}
pub enum Entry<'a, T> {
Occupied(OccupiedEntry<'a, T>),
Vacant(VacantEntry<'a, T>),
}
impl<'a, T> Entry<'a, T> {
pub fn or_insert_with(self, default: impl FnOnce() -> T) -> OccupiedEntry<'a, T> {
match self {
Entry::Occupied(entry) => entry,
Entry::Vacant(entry) => entry.insert(default()),
}
}
}
pub struct OccupiedEntry<'a, T> {
_guard: RwLockWriteGuardDetached<'a>,
entry: hash_table::OccupiedEntry<'a, T>,
}
impl<T> OccupiedEntry<'_, T> {
pub fn get(&self) -> &T {
self.entry.get()
}
}
pub struct VacantEntry<'a, T> {
_guard: RwLockWriteGuardDetached<'a>,
entry: hash_table::VacantEntry<'a, T>,
}
impl<'a, T> VacantEntry<'a, T> {
pub fn insert(self, value: T) -> OccupiedEntry<'a, T> {
OccupiedEntry {
_guard: self._guard,
entry: self.entry.insert(value),
}
}
}
pub enum MutEntry<'a, T> {
Occupied(MutOccupiedEntry<'a, T>),
Vacant(MutVacantEntry<'a, T>),
}
impl<'a, T> MutEntry<'a, T> {
pub fn or_insert_with(self, default: impl FnOnce() -> T) -> MutOccupiedEntry<'a, T> {
match self {
MutEntry::Occupied(entry) => entry,
MutEntry::Vacant(entry) => entry.insert(default()),
}
}
}
pub struct MutOccupiedEntry<'a, T>(hash_table::OccupiedEntry<'a, T>);
impl<T> MutOccupiedEntry<'_, T> {
pub fn get(&self) -> &T {
self.0.get()
}
}
pub struct MutVacantEntry<'a, T>(hash_table::VacantEntry<'a, T>);
impl<'a, T> MutVacantEntry<'a, T> {
pub fn insert(self, value: T) -> MutOccupiedEntry<'a, T> {
MutOccupiedEntry(self.0.insert(value))
}
}