1use parking_lot::{ReentrantMutex, ReentrantMutexGuard};
2use serde::{Deserializer, Serialize, Serializer};
3use std::cell::UnsafeCell;
4use std::fmt::{Debug, Display, Formatter};
5
6use std::ops::{Deref, DerefMut, Index};
7use std::slice::{Iter as SliceIter, IterMut as SliceIterMut};
8use std::sync::Arc;
9use std::vec::IntoIter;
10use indexmap::IndexMap;
11
12pub struct SyncVec<V> {
13 dirty: UnsafeCell<Vec<V>>,
14 lock: ReentrantMutex<()>,
15 locks: UnsafeCell<IndexMap<usize, ReentrantMutex<()>>>,
16}
17
18unsafe impl<V> Send for SyncVec<V> {}
20
21unsafe impl<V> Sync for SyncVec<V> {}
23
24impl<V> SyncVec<V> {
25 pub fn new_arc() -> Arc<Self> {
26 Arc::new(Self::new())
27 }
28
29 pub fn new() -> Self {
30 Self {
31 dirty: UnsafeCell::new(Vec::new()),
32 lock: Default::default(),
33 locks: UnsafeCell::new(IndexMap::default()),
34 }
35 }
36
37 pub fn with_capacity(capacity: usize) -> Self {
38 Self {
39 dirty: UnsafeCell::new(Vec::with_capacity(capacity)),
40 lock: Default::default(),
41 locks: UnsafeCell::new(IndexMap::with_capacity(capacity)),
42 }
43 }
44
45 pub fn with_vec(vec: Vec<V>) -> Self {
46 Self {
47 lock: Default::default(),
48 locks: UnsafeCell::new(IndexMap::with_capacity(vec.capacity())),
49 dirty: UnsafeCell::new(vec),
50 }
51 }
52
53 pub fn insert(&self, index: usize, v: V) -> Option<V> {
54 let g = self.lock.lock();
55 let m = unsafe { &mut *self.dirty.get() };
56 m.insert(index, v);
57 drop(g);
58 None
59 }
60
61 pub fn set(&self, index: usize, v: V) -> Option<V> {
62 let g = self.lock.lock();
63 let m = unsafe { &mut *self.dirty.get() };
64 m[index] = v;
65 drop(g);
66 None
67 }
68
69 pub fn push(&self, v: V) -> Option<V> {
70 let g = self.lock.lock();
71 let m = unsafe { &mut *self.dirty.get() };
72 m.push(v);
73 drop(g);
74 None
75 }
76
77 pub fn pushes(&self, arr: Vec<V>) -> Option<V> {
78 let g = self.lock.lock();
79 let m = unsafe { &mut *self.dirty.get() };
80 for v in arr {
81 m.push(v);
82 }
83 drop(g);
84 None
85 }
86
87 pub fn push_mut(&mut self, v: V) -> Option<V> {
88 let m = unsafe { &mut *self.dirty.get() };
89 m.push(v);
90 None
91 }
92
93 pub fn pop(&self) -> Option<V> {
94 let g = self.lock.lock();
95 let m = unsafe { &mut *self.dirty.get() };
96 let r = m.pop();
97 drop(g);
98 r
99 }
100
101 pub fn pop_mut(&mut self) -> Option<V> {
102 let m = unsafe { &mut *self.dirty.get() };
103 m.pop()
104 }
105
106 pub fn remove(&self, index: usize) -> Option<V> {
107 let g = self.lock.lock();
108 let m = unsafe { &mut *self.dirty.get() };
109 if m.len() > index {
110 let v = m.remove(index);
111 drop(g);
112 Some(v)
113 } else {
114 None
115 }
116 }
117
118 pub fn remove_mut(&mut self, index: usize) -> Option<V> {
119 let m = unsafe { &mut *self.dirty.get() };
120 if m.len() > index {
121 let v = m.remove(index);
122 Some(v)
123 } else {
124 None
125 }
126 }
127
128 pub fn len(&self) -> usize {
129 unsafe { (&*self.dirty.get()).len() }
130 }
131
132 pub fn is_empty(&self) -> bool {
133 unsafe { (&*self.dirty.get()).is_empty() }
134 }
135
136 pub fn clear(&self) {
137 let g = self.lock.lock();
138 let m = unsafe { &mut *self.dirty.get() };
139 m.clear();
140 drop(g);
141 }
142
143 pub fn shrink_to_fit(&self) {
144 let g = self.lock.lock();
145 let m = unsafe { &mut *self.dirty.get() };
146 m.shrink_to_fit();
147 drop(g);
148 }
149
150 pub fn from(vec: Vec<V>) -> Self {
151 let s = Self::with_vec(vec);
152 s
153 }
154
155 #[inline]
156 pub fn get(&self, index: usize) -> Option<&V> {
157 unsafe {
158 return (&*self.dirty.get()).get(index);
159 }
160 }
161
162 #[inline]
163 pub unsafe fn get_uncheck(&self, index: usize) -> &V {
164 unsafe { (&*self.dirty.get()).get_unchecked(index) }
165 }
166
167 #[inline]
168 pub fn get_mut(&self, index: usize) -> Option<VecRefMut<'_, V>> {
169 let get_mut_lock = self.lock.lock();
170 let m = unsafe { &mut *self.locks.get() };
171 if m.contains_key(&index) == false {
172 let g = ReentrantMutex::new(());
173 m.insert(index, g);
174 }
175 let g = m.get(&index).unwrap();
176 let v = VecRefMut {
177 k: index,
178 m: self,
179 _g: g.lock(),
180 value: {
181 let m = unsafe { &mut *self.dirty.get() };
182 Some(m.get_mut(index)?)
183 },
184 };
185 drop(get_mut_lock);
186 Some(v)
187 }
188
189 #[inline]
190 pub fn contains(&self, x: &V) -> bool
191 where
192 V: PartialEq,
193 {
194 let m = unsafe { &mut *self.dirty.get() };
195 m.contains(x)
196 }
197
198 pub fn iter(&self) -> std::slice::Iter<'_, V> {
199 unsafe { (&*self.dirty.get()).iter() }
200 }
201
202 pub fn iter_mut(&self) -> VecIterMut<'_, V> {
203 let m = unsafe { &mut *self.dirty.get() };
204 let mut iter = VecIterMut {
205 _g: self.lock.lock(),
206 inner: None,
207 };
208 iter.inner = Some(m.iter_mut());
209 return iter;
210 }
211
212 pub fn into_iter(self) -> IntoIter<V> {
213 let m = self.dirty.into_inner();
214 m.into_iter()
215 }
216
217 pub fn dirty_ref(&self) -> &Vec<V> {
218 unsafe { &*self.dirty.get() }
219 }
220
221 pub fn into_inner(self) -> Vec<V> {
222 self.dirty.into_inner()
223 }
224}
225
226pub struct VecRefMut<'a, V> {
227 k: usize,
228 m: &'a SyncVec<V>,
229 _g: ReentrantMutexGuard<'a, ()>,
230 value: Option<&'a mut V>,
231}
232
233impl<'a, V> Drop for VecRefMut<'a, V> {
234 fn drop(&mut self) {
235 let m = unsafe { &mut *self.m.locks.get() };
236 _ = m.swap_remove(&self.k);
237 }
238}
239
240impl<'a, V> Deref for VecRefMut<'_, V> {
241 type Target = V;
242
243 fn deref(&self) -> &Self::Target {
244 self.value.as_ref().unwrap()
245 }
246}
247
248impl<'a, V> DerefMut for VecRefMut<'_, V> {
249 fn deref_mut(&mut self) -> &mut Self::Target {
250 self.value.as_mut().unwrap()
251 }
252}
253
254impl<'a, V> Debug for VecRefMut<'_, V>
255 where
256 V: Debug,
257{
258 fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
259 self.value.fmt(f)
260 }
261}
262
263impl<'a, V> Display for VecRefMut<'_, V>
264 where
265 V: Display,
266{
267 fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
268 self.value.as_ref().unwrap().fmt(f)
269 }
270}
271
272pub struct Iter<'a, V> {
273 inner: Option<SliceIter<'a, *const V>>,
274}
275
276impl<'a, V> Iterator for Iter<'a, V> {
277 type Item = &'a V;
278
279 fn next(&mut self) -> Option<Self::Item> {
280 let next = self.inner.as_mut().unwrap().next();
281 match next {
282 None => None,
283 Some(v) => {
284 if v.is_null() {
285 None
286 } else {
287 unsafe { Some(&**v) }
288 }
289 }
290 }
291 }
292}
293
294pub struct VecIterMut<'a, V> {
295 _g: ReentrantMutexGuard<'a, ()>,
296 inner: Option<SliceIterMut<'a, V>>,
297}
298
299impl<'a, V> Deref for VecIterMut<'a, V> {
300 type Target = SliceIterMut<'a, V>;
301
302 fn deref(&self) -> &Self::Target {
303 self.inner.as_ref().unwrap()
304 }
305}
306
307impl<'a, V> DerefMut for VecIterMut<'a, V> {
308 fn deref_mut(&mut self) -> &mut Self::Target {
309 self.inner.as_mut().unwrap()
310 }
311}
312
313impl<'a, V> Iterator for VecIterMut<'a, V> {
314 type Item = &'a mut V;
315
316 fn next(&mut self) -> Option<Self::Item> {
317 self.inner.as_mut().unwrap().next()
318 }
319}
320
321impl<'a, V> IntoIterator for &'a SyncVec<V> {
322 type Item = &'a V;
323 type IntoIter = std::slice::Iter<'a, V>;
324
325 fn into_iter(self) -> Self::IntoIter {
326 self.iter()
327 }
328}
329
330impl<V> IntoIterator for SyncVec<V> {
331 type Item = V;
332 type IntoIter = IntoIter<V>;
333
334 fn into_iter(self) -> Self::IntoIter {
335 self.into_iter()
336 }
337}
338
339impl<V> Serialize for SyncVec<V>
340 where
341 V: Serialize,
342{
343 fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
344 where
345 S: Serializer,
346 {
347 self.dirty_ref().serialize(serializer)
348 }
349}
350
351impl<'de, V> serde::Deserialize<'de> for SyncVec<V>
352 where
353 V: serde::Deserialize<'de>,
354{
355 fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
356 where
357 D: Deserializer<'de>,
358 {
359 let m = Vec::deserialize(deserializer)?;
360 Ok(Self::from(m))
361 }
362}
363
364impl<V> Debug for SyncVec<V>
365 where
366 V: Debug,
367{
368 fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
369 self.dirty_ref().fmt(f)
370 }
371}
372
373impl<V> Display for SyncVec<V>
374 where
375 V: Display,
376{
377 fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
378 use std::fmt::Pointer;
379 self.dirty_ref().fmt(f)
380 }
381}
382
383impl<V> Index<usize> for SyncVec<V> {
384 type Output = V;
385
386 fn index(&self, index: usize) -> &Self::Output {
387 self.get(index).expect("Out of bounds access")
388 }
389}
390
391impl<V: PartialEq> PartialEq for SyncVec<V> {
392 fn eq(&self, other: &Self) -> bool {
393 self.dirty_ref().eq(other.dirty_ref())
394 }
395}
396
397impl<V: Clone> Clone for SyncVec<V> {
398 fn clone(&self) -> Self {
399 SyncVec::from(self.dirty_ref().to_vec())
400 }
401}
402
403impl<V> Default for SyncVec<V> {
404 fn default() -> Self {
405 SyncVec::new()
406 }
407}
408
409#[macro_export]
410macro_rules! sync_vec {
411 () => (
412 $crate::sync::SyncVec::new()
413 );
414 ($elem:expr; $n:expr) => (
415 $crate::sync::SyncVec::with_vec(vec![$elem;$n])
416 );
417 ($($x:expr),+ $(,)?) => (
418 $crate::sync::SyncVec::with_vec(vec![$($x),+,])
419 );
420}