1use parking_lot::ReentrantMutexGuard;
2use serde::{Deserializer, Serialize, Serializer};
3use std::borrow::Borrow;
4use std::cell::UnsafeCell;
5use std::collections::{
6 hash_map::IntoIter as MapIntoIter, hash_map::Iter as MapIter, hash_map::IterMut as MapIterMut,
7 HashMap as Map, HashMap,
8};
9use std::fmt::{Debug, Formatter};
10use std::hash::Hash;
11use std::ops::{Deref, DerefMut};
12use std::sync::Arc;
13
14pub struct SyncHashMap<K: Eq + Hash, V> {
16 dirty: UnsafeCell<Map<K, V>>,
17
18 #[cfg(feature = "reentrant_lock")]
19 lock: parking_lot::ReentrantMutex<()>,
20
21 #[cfg(feature = "std_lock")]
22 lock: std::sync::Mutex<()>,
23}
24
25unsafe impl<K: Eq + Hash, V> Send for SyncHashMap<K, V> {}
27
28unsafe impl<K: Eq + Hash, V> Sync for SyncHashMap<K, V> {}
30
31impl<K, V> std::ops::Index<&K> for SyncHashMap<K, V>
32where
33 K: Eq + Hash,
34{
35 type Output = V;
36
37 fn index(&self, index: &K) -> &Self::Output {
38 unsafe { &(&*self.dirty.get())[index] }
39 }
40}
41
42impl<K, V> SyncHashMap<K, V>
43where
44 K: Eq + Hash,
45{
46 pub fn new_arc() -> Arc<Self> {
47 Arc::new(Self::new())
48 }
49
50 pub fn new() -> Self {
51 Self {
52 dirty: UnsafeCell::new(Map::new()),
53 lock: Default::default(),
54 }
55 }
56
57 pub fn with_capacity(capacity: usize) -> Self {
58 Self {
59 dirty: UnsafeCell::new(Map::with_capacity(capacity)),
60 lock: Default::default(),
61 }
62 }
63
64 pub fn with_map(map: Map<K, V>) -> Self {
65 Self {
66 dirty: UnsafeCell::new(map),
67 lock: Default::default(),
68 }
69 }
70
71 pub fn insert(&self, k: K, v: V) -> Option<V> {
72 let _lock = self.lock.lock();
73 let m = unsafe { &mut *self.dirty.get() };
74 let r = m.insert(k, v);
75 r
76 }
77
78 pub fn insert_mut(&mut self, k: K, v: V) -> Option<V> {
79 let m = unsafe { &mut *self.dirty.get() };
80 m.insert(k, v)
81 }
82
83 pub fn remove(&self, k: &K) -> Option<V> {
84 let g = self.lock.lock();
85 let m = unsafe { &mut *self.dirty.get() };
86 let r = m.remove(k);
87 drop(g);
88 r
89 }
90
91 pub fn remove_mut(&mut self, k: &K) -> Option<V> {
92 let m = unsafe { &mut *self.dirty.get() };
93 m.remove(k)
94 }
95
96 pub fn len(&self) -> usize {
97 unsafe { (&*self.dirty.get()).len() }
98 }
99
100 pub fn is_empty(&self) -> bool {
101 unsafe { (&*self.dirty.get()).is_empty() }
102 }
103
104 pub fn clear(&self) {
105 let g = self.lock.lock();
106 let m = unsafe { &mut *self.dirty.get() };
107 m.clear();
108 drop(g);
109 }
110
111 pub fn clear_mut(&mut self) {
112 let m = unsafe { &mut *self.dirty.get() };
113 m.clear();
114 }
115
116 pub fn shrink_to_fit(&self) {
117 let g = self.lock.lock();
118 let m = unsafe { &mut *self.dirty.get() };
119 m.shrink_to_fit();
120 drop(g);
121 }
122
123 pub fn shrink_to_fit_mut(&mut self) {
124 let m = unsafe { &mut *self.dirty.get() };
125 m.shrink_to_fit()
126 }
127
128 pub fn from(map: Map<K, V>) -> Self
129 where
130 K: Eq + Hash,
131 {
132 let s = Self::with_map(map);
133 s
134 }
135
136 #[inline]
156 pub fn get<Q: ?Sized>(&self, k: &Q) -> Option<&V>
157 where
158 K: Borrow<Q>,
159 Q: Hash + Eq,
160 {
161 unsafe { (&*self.dirty.get()).get(k) }
162 }
163
164 #[inline]
165 pub fn get_mut<Q: ?Sized>(&self, k: &Q) -> Option<SyncMapRefMut<'_, V>>
166 where
167 K: Borrow<Q>,
168 Q: Hash + Eq,
169 {
170 let m = unsafe { &mut *self.dirty.get() };
171
172 #[cfg(feature = "std_lock")]
173 let _g = match self.lock.lock() {
174 Ok(v) => v,
175 Err(_) => return None,
176 };
177
178 #[cfg(feature = "reentrant_lock")]
179 let _g = self.lock.lock();
180
181 Some(SyncMapRefMut {
182 _g,
183 value: m.get_mut(k)?,
184 })
185 }
186
187 #[inline]
188 pub fn contains_key(&self, x: &K) -> bool
189 where
190 K: PartialEq,
191 {
192 let m = unsafe { &mut *self.dirty.get() };
193 m.contains_key(x)
194 }
195
196 pub fn iter_mut(&self) -> IterMut<'_, K, V> {
197 let m = unsafe { &mut *self.dirty.get() };
198
199 #[cfg(feature = "std_lock")]
200 let _g = self.lock.lock().unwrap();
201
202 #[cfg(feature = "reentrant_lock")]
203 let _g = self.lock.lock();
204
205 return IterMut {
206 _g,
207 inner: m.iter_mut(),
208 };
209 }
210
211 pub fn iter(&self) -> MapIter<'_, K, V> {
212 let m = unsafe { &*self.dirty.get() };
213 return m.iter();
214 }
215
216 pub fn dirty_ref(&self) -> &HashMap<K, V> {
217 unsafe { &*self.dirty.get() }
218 }
219
220 pub fn into_inner(self) -> HashMap<K, V> {
221 self.dirty.into_inner()
222 }
223}
224
225pub struct SyncMapRefMut<'a, V> {
226 #[cfg(feature = "reentrant_lock")]
227 _g: ReentrantMutexGuard<'a, ()>,
228
229 #[cfg(feature = "std_lock")]
230 _g: std::sync::MutexGuard<'a, ()>,
231
232 value: &'a mut V,
233}
234
235impl<'a, V> Deref for SyncMapRefMut<'_, V> {
236 type Target = V;
237
238 fn deref(&self) -> &Self::Target {
239 self.value
240 }
241}
242
243impl<'a, V> DerefMut for SyncMapRefMut<'_, V> {
244 fn deref_mut(&mut self) -> &mut Self::Target {
245 self.value
246 }
247}
248
249impl<'a, V> Debug for SyncMapRefMut<'_, V>
250where
251 V: Debug,
252{
253 fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
254 self.value.fmt(f)
255 }
256}
257
258impl<'a, V> PartialEq<Self> for SyncMapRefMut<'_, V>
259where
260 V: Eq,
261{
262 fn eq(&self, other: &Self) -> bool {
263 self.value.eq(&other.value)
264 }
265}
266
267impl<'a, V> Eq for SyncMapRefMut<'_, V> where V: Eq {}
268
269pub struct IterMy<'a, K, V> {
270 inner: MapIter<'a, K, V>,
271}
272
273impl<'a, K, V> Deref for IterMy<'a, K, V> {
274 type Target = MapIter<'a, K, V>;
275
276 fn deref(&self) -> &Self::Target {
277 &self.inner
278 }
279}
280impl<'a, K, V> Iterator for IterMy<'a, K, V> {
281 type Item = (&'a K, &'a V);
282
283 fn next(&mut self) -> Option<Self::Item> {
284 self.inner.next()
285 }
286}
287
288
289pub struct IterMut<'a, K, V> {
290 #[cfg(feature = "reentrant_lock")]
291 _g: ReentrantMutexGuard<'a, ()>,
292
293 #[cfg(feature = "std_lock")]
294 _g: std::sync::MutexGuard<'a, ()>,
295
296 inner: MapIterMut<'a, K, V>,
297}
298
299impl<'a, K, V> Deref for IterMut<'a, K, V> {
300 type Target = MapIterMut<'a, K, V>;
301
302 fn deref(&self) -> &Self::Target {
303 &self.inner
304 }
305}
306
307impl<'a, K, V> DerefMut for IterMut<'a, K, V> {
308 fn deref_mut(&mut self) -> &mut Self::Target {
309 &mut self.inner
310 }
311}
312
313impl<'a, K, V> Iterator for IterMut<'a, K, V> {
314 type Item = (&'a K, &'a mut V);
315
316 fn next(&mut self) -> Option<Self::Item> {
317 self.inner.next()
318 }
319}
320
321impl<'a, K, V> IntoIterator for &'a SyncHashMap<K, V>
322where
323 K: Eq + Hash,
324{
325 type Item = (&'a K, &'a V);
326 type IntoIter = MapIter<'a, K, V>;
327
328 fn into_iter(self) -> Self::IntoIter {
329 unsafe { (&*self.dirty.get()).iter() }
330 }
331}
332
333impl<K, V> IntoIterator for SyncHashMap<K, V>
334where
335 K: Eq + Hash,
336{
337 type Item = (K, V);
338 type IntoIter = MapIntoIter<K, V>;
339
340 fn into_iter(self) -> Self::IntoIter {
341 self.dirty.into_inner().into_iter()
342 }
343}
344
345impl<K: Eq + Hash, V> From<Map<K, V>> for SyncHashMap<K, V> {
346 fn from(arg: Map<K, V>) -> Self {
347 Self::from(arg)
348 }
349}
350
351impl<K, V> serde::Serialize for SyncHashMap<K, V>
352where
353 K: Eq + Hash + Serialize,
354 V: Serialize,
355{
356 fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
357 where
358 S: Serializer,
359 {
360 self.dirty_ref().serialize(serializer)
361 }
362}
363
364impl<'de, K, V> serde::Deserialize<'de> for SyncHashMap<K, V>
365where
366 K: Eq + Hash + serde::Deserialize<'de>,
367 V: serde::Deserialize<'de>,
368{
369 fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
370 where
371 D: Deserializer<'de>,
372 {
373 let m = Map::deserialize(deserializer)?;
374 Ok(Self::from(m))
375 }
376}
377
378impl<K, V> Debug for SyncHashMap<K, V>
379where
380 K: Eq + Hash + Debug,
381 V: Debug,
382{
383 fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
384 self.dirty_ref().fmt(f)
385 }
386}
387
388impl<K: Clone + Eq + Hash, V: Clone> Clone for SyncHashMap<K, V> {
389 fn clone(&self) -> Self {
390 let c = (*self.dirty_ref()).clone();
391 SyncHashMap::from(c)
392 }
393}
394
395pub mod buckets {
397 use super::{SyncHashMap, SyncMapRefMut};
398 use std::hash::{Hash, Hasher};
399
400 #[derive(Debug, Clone)]
401 pub struct SyncHashMapB<K: Eq + Hash, V> {
402 inner: Vec<SyncHashMap<K, V>>,
403 len: usize,
404 }
405 impl<K: Eq + Hash, V> SyncHashMapB<K, V> {
435 pub fn new(bucket_count: Option<usize>) -> Self {
436 let count = bucket_count.unwrap_or_else(|| 10);
437 let mut arr = vec![];
438 for _ in 0..count {
439 arr.push(SyncHashMap::new());
440 }
441 Self {
442 inner: arr,
443 len: count,
444 }
445 }
446
447 fn key_conv_to_index(&self, k: &K) -> usize {
448 let mut hasher = std::collections::hash_map::DefaultHasher::new();
449 k.hash(&mut hasher);
450 let hash = hasher.finish();
451
452 let index = (hash % self.len as u64) as usize;
456 index
458 }
459
460 pub fn insert(&self, k: K, v: V) -> Option<V> {
471 let index = self.key_conv_to_index(&k);
472 self.inner[index].insert(k, v)
473 }
474
475 pub fn insert_mut(&mut self, k: K, v: V) -> Option<V> {
476 let index = self.key_conv_to_index(&k);
477 self.inner[index].insert_mut(k, v)
478 }
479
480 pub fn remove(&self, k: &K) -> Option<V> {
481 let index = self.key_conv_to_index(&k);
482 self.inner[index].remove(k)
483 }
484
485 pub fn is_empty(&self) -> bool {
486 for ele in &self.inner {
487 if !ele.is_empty() {
488 return false;
489 }
490 }
491 true
492 }
493
494 pub fn len(&self) -> usize {
495 let mut len = 0;
496 for ele in &self.inner {
497 len += ele.len();
498 }
499 len
500 }
501
502 pub fn clear(&self) {
503 for ele in &self.inner {
504 ele.clear();
505 }
506 }
507
508 #[inline]
528 pub fn get(&self, k: &K) -> Option<&V> {
529 let index = self.key_conv_to_index(k);
530 self.inner[index].get(k)
531 }
532
533 pub fn get_mut(&self, k: &K) -> Option<SyncMapRefMut<'_, V>> {
534 let index = self.key_conv_to_index(k);
535 self.inner[index].get_mut(k)
536 }
537 }
538
539 }