1#![forbid(
7 missing_docs,
8 unsafe_op_in_unsafe_fn,
9 clippy::missing_safety_doc,
10 clippy::multiple_unsafe_ops_per_block,
11 clippy::undocumented_unsafe_blocks
12)]
13
14mod lock;
15
16use crossbeam_utils::CachePadded;
17use hashbrown::{HashTable, hash_table};
18use lock::{RwLock, RwLockReadGuardDetached, RwLockWriteGuardDetached};
19use std::ops::Deref;
20use std::sync::LazyLock;
21
22pub struct DashTable<T> {
24 shift: u32,
25 shards: Box<[CachePadded<RwLock<HashTable<T>>>]>,
26}
27
28fn default_shard_shift() -> u32 {
29 static DEFAULT_SHARD_SHIFT: LazyLock<u32> = LazyLock::new(|| {
30 (std::thread::available_parallelism().map_or(1, usize::from) * 4)
31 .next_power_of_two()
32 .ilog2()
33 });
34 *DEFAULT_SHARD_SHIFT
35}
36
37impl<T> Default for DashTable<T> {
38 fn default() -> Self {
39 Self::new()
40 }
41}
42
43impl<T> DashTable<T> {
44 pub fn new() -> Self {
46 let shard_shift = default_shard_shift();
47 assert!(shard_shift > 0);
48 let shard_amount = 1 << shard_shift;
49
50 let shift = usize::BITS - shard_shift;
51
52 let shards = (0..shard_amount)
53 .map(|_| CachePadded::new(RwLock::new(HashTable::new())))
54 .collect();
55
56 Self { shift, shards }
57 }
58
59 pub fn with_capacity(capacity: usize) -> Self {
62 let shard_shift = default_shard_shift();
63 assert!(shard_shift > 0);
64 let shard_amount = 1 << shard_shift;
65
66 let shift = usize::BITS - shard_shift;
67
68 let cps = (capacity + (shard_amount - 1)) >> shard_shift;
69
70 let shards = (0..shard_amount)
71 .map(|_| CachePadded::new(RwLock::new(HashTable::with_capacity(cps))))
72 .collect();
73
74 Self { shift, shards }
75 }
76
77 pub fn entry<'a>(
86 &'a self,
87 hash: u64,
88 eq: impl FnMut(&T) -> bool,
89 hasher: impl Fn(&T) -> u64,
90 ) -> Entry<'a, T> {
91 let shard = self.determine_shard(hash as usize);
92 let guard = self.shards[shard].write();
93 let (_guard, shard) = unsafe { RwLockWriteGuardDetached::detach_from(guard) };
97
98 match shard.entry(hash, eq, hasher) {
99 hash_table::Entry::Occupied(entry) => Entry::Occupied(OccupiedEntry { _guard, entry }),
100 hash_table::Entry::Vacant(entry) => Entry::Vacant(VacantEntry { _guard, entry }),
101 }
102 }
103
104 pub fn entry_mut<'a>(
110 &'a mut self,
111 hash: u64,
112 eq: impl FnMut(&T) -> bool,
113 hasher: impl Fn(&T) -> u64,
114 ) -> MutEntry<'a, T> {
115 let shard = self.determine_shard(hash as usize);
116 let table = self.shards[shard].get_mut();
117 match table.entry(hash, eq, hasher) {
118 hash_table::Entry::Occupied(entry) => MutEntry::Occupied(MutOccupiedEntry(entry)),
119 hash_table::Entry::Vacant(entry) => MutEntry::Vacant(MutVacantEntry(entry)),
120 }
121 }
122
123 pub fn find<'a>(&'a self, hash: u64, eq: impl FnMut(&T) -> bool) -> Option<Ref<'a, T>> {
128 let shard = self.determine_shard(hash as usize);
129 let guard = self.shards[shard].read();
130 let (_guard, shard) = unsafe { RwLockReadGuardDetached::detach_from(guard) };
134
135 shard
136 .find(hash, eq)
137 .map(|reference| Ref { _guard, reference })
138 }
139
140 pub fn insert_unique<'a>(
147 &'a self,
148 hash: u64,
149 value: T,
150 hasher: impl Fn(&T) -> u64,
151 ) -> OccupiedEntry<'a, T> {
152 let shard = self.determine_shard(hash as usize);
153 let guard = self.shards[shard].write();
154 let (_guard, shard) = unsafe { RwLockWriteGuardDetached::detach_from(guard) };
158
159 let entry = shard.insert_unique(hash, value, hasher);
160 OccupiedEntry { _guard, entry }
161 }
162
163 pub fn insert_unique_mut<'a>(
170 &'a mut self,
171 hash: u64,
172 value: T,
173 hasher: impl Fn(&T) -> u64,
174 ) -> MutOccupiedEntry<'a, T> {
175 let shard = self.determine_shard(hash as usize);
176 let table = self.shards[shard].get_mut();
177 let entry = table.insert_unique(hash, value, hasher);
178 MutOccupiedEntry(entry)
179 }
180
181 fn determine_shard(&self, hash: usize) -> usize {
182 (hash << 7) >> self.shift
184 }
185}
186
187pub struct Ref<'a, T> {
189 _guard: RwLockReadGuardDetached<'a>,
190 reference: &'a T,
191}
192
193impl<'a, T> Deref for Ref<'a, T> {
194 type Target = T;
195
196 fn deref(&self) -> &Self::Target {
197 self.reference
198 }
199}
200
201pub enum Entry<'a, T> {
204 Occupied(OccupiedEntry<'a, T>),
206 Vacant(VacantEntry<'a, T>),
208}
209
210impl<'a, T> Entry<'a, T> {
211 pub fn or_insert_with(self, default: impl FnOnce() -> T) -> OccupiedEntry<'a, T> {
213 match self {
214 Entry::Occupied(entry) => entry,
215 Entry::Vacant(entry) => entry.insert(default()),
216 }
217 }
218}
219
220pub struct OccupiedEntry<'a, T> {
222 _guard: RwLockWriteGuardDetached<'a>,
223 entry: hash_table::OccupiedEntry<'a, T>,
224}
225
226impl<T> OccupiedEntry<'_, T> {
227 pub fn get(&self) -> &T {
229 self.entry.get()
230 }
231}
232
233pub struct VacantEntry<'a, T> {
235 _guard: RwLockWriteGuardDetached<'a>,
236 entry: hash_table::VacantEntry<'a, T>,
237}
238
239impl<'a, T> VacantEntry<'a, T> {
240 pub fn insert(self, value: T) -> OccupiedEntry<'a, T> {
242 OccupiedEntry {
243 _guard: self._guard,
244 entry: self.entry.insert(value),
245 }
246 }
247}
248
249pub enum MutEntry<'a, T> {
252 Occupied(MutOccupiedEntry<'a, T>),
254 Vacant(MutVacantEntry<'a, T>),
256}
257
258impl<'a, T> MutEntry<'a, T> {
259 pub fn or_insert_with(self, default: impl FnOnce() -> T) -> MutOccupiedEntry<'a, T> {
261 match self {
262 MutEntry::Occupied(entry) => entry,
263 MutEntry::Vacant(entry) => entry.insert(default()),
264 }
265 }
266}
267
268pub struct MutOccupiedEntry<'a, T>(hash_table::OccupiedEntry<'a, T>);
270
271impl<T> MutOccupiedEntry<'_, T> {
272 pub fn get(&self) -> &T {
274 self.0.get()
275 }
276}
277
278pub struct MutVacantEntry<'a, T>(hash_table::VacantEntry<'a, T>);
280
281impl<'a, T> MutVacantEntry<'a, T> {
282 pub fn insert(self, value: T) -> MutOccupiedEntry<'a, T> {
284 MutOccupiedEntry(self.0.insert(value))
285 }
286}