fusion_blossom/
pointers.rs

1//! Pointer Types
2//!
3//! Since fusion blossom requires no synchronization with mutex, it's inefficient to wrap everything in a mutex.
4//! At the same time, I want to enjoy the safety check provided by Rust compiler, so I want to limit unsafe code to minimum.
5//! The solution is to write everything in safe Rust, and debug them.
6//! After this, one can enable the feature `unsafe_pointer` to remove the unnecessary locks, thus improving the performance.
7//!
8
9use super::util::*;
10use crate::parking_lot::lock_api::{RwLockReadGuard, RwLockWriteGuard};
11use crate::parking_lot::{RawRwLock, RwLock};
12use std::sync::{Arc, Weak};
13
14/// allows fast reset of vector of objects without iterating over all objects each time: dynamically clear it
15pub trait FastClear {
16    /// user provided method to actually clear the fields
17    fn hard_clear(&mut self);
18
19    /// get timestamp
20    fn get_timestamp(&self) -> FastClearTimestamp;
21
22    /// set timestamp
23    fn set_timestamp(&mut self, timestamp: FastClearTimestamp);
24
25    /// dynamically clear it if not already cleared; it's safe to call many times
26    #[inline(always)]
27    fn dynamic_clear(&mut self, active_timestamp: FastClearTimestamp) {
28        if self.get_timestamp() != active_timestamp {
29            self.hard_clear();
30            self.set_timestamp(active_timestamp);
31        }
32    }
33
34    /// when debugging your program, you can put this function every time you obtained a lock of a new object
35    #[inline(always)]
36    fn debug_assert_dynamic_cleared(&self, active_timestamp: FastClearTimestamp) {
37        debug_assert!(
38            self.get_timestamp() == active_timestamp,
39            "bug detected: not dynamically cleared, expected timestamp: {}, current timestamp: {}",
40            active_timestamp,
41            self.get_timestamp()
42        );
43    }
44}
45
46pub trait FastClearRwLockPtr<ObjType>
47where
48    ObjType: FastClear,
49{
50    fn new_ptr(ptr: Arc<RwLock<ObjType>>) -> Self;
51
52    fn new_value(obj: ObjType) -> Self;
53
54    fn ptr(&self) -> &Arc<RwLock<ObjType>>;
55
56    fn ptr_mut(&mut self) -> &mut Arc<RwLock<ObjType>>;
57
58    #[inline(always)]
59    fn read_recursive(&self, active_timestamp: FastClearTimestamp) -> RwLockReadGuard<RawRwLock, ObjType> {
60        let ret = self.ptr().read_recursive();
61        ret.debug_assert_dynamic_cleared(active_timestamp); // only assert during debug modes
62        ret
63    }
64
65    /// without sanity check: this data might be outdated, so only use when you're read those immutable fields
66    #[inline(always)]
67    fn read_recursive_force(&self) -> RwLockReadGuard<RawRwLock, ObjType> {
68        let ret = self.ptr().read_recursive();
69        ret
70    }
71
72    #[inline(always)]
73    fn write(&self, active_timestamp: FastClearTimestamp) -> RwLockWriteGuard<RawRwLock, ObjType> {
74        let ret = self.ptr().write();
75        ret.debug_assert_dynamic_cleared(active_timestamp); // only assert during debug modes
76        ret
77    }
78
79    /// without sanity check: useful only in implementing hard_clear
80    #[inline(always)]
81    fn write_force(&self) -> RwLockWriteGuard<RawRwLock, ObjType> {
82        let ret = self.ptr().write();
83        ret
84    }
85
86    /// dynamically clear it if not already cleared; it's safe to call many times, but it will acquire a writer lock
87    #[inline(always)]
88    fn dynamic_clear(&self, active_timestamp: FastClearTimestamp) {
89        let mut value = self.write_force();
90        value.dynamic_clear(active_timestamp);
91    }
92
93    fn ptr_eq(&self, other: &Self) -> bool {
94        Arc::ptr_eq(self.ptr(), other.ptr())
95    }
96}
97
98pub trait RwLockPtr<ObjType> {
99    fn new_ptr(ptr: Arc<RwLock<ObjType>>) -> Self;
100
101    fn new_value(obj: ObjType) -> Self;
102
103    fn ptr(&self) -> &Arc<RwLock<ObjType>>;
104
105    fn ptr_mut(&mut self) -> &mut Arc<RwLock<ObjType>>;
106
107    #[inline(always)]
108    fn read_recursive(&self) -> RwLockReadGuard<RawRwLock, ObjType> {
109        let ret = self.ptr().read_recursive();
110        ret
111    }
112
113    #[inline(always)]
114    fn write(&self) -> RwLockWriteGuard<RawRwLock, ObjType> {
115        let ret = self.ptr().write();
116        ret
117    }
118
119    fn ptr_eq(&self, other: &Self) -> bool {
120        Arc::ptr_eq(self.ptr(), other.ptr())
121    }
122}
123
124pub struct ArcRwLock<T> {
125    ptr: Arc<RwLock<T>>,
126}
127
128pub struct WeakRwLock<T> {
129    ptr: Weak<RwLock<T>>,
130}
131
132impl<T> ArcRwLock<T> {
133    pub fn downgrade(&self) -> WeakRwLock<T> {
134        WeakRwLock::<T> {
135            ptr: Arc::downgrade(&self.ptr),
136        }
137    }
138}
139
140impl<T> WeakRwLock<T> {
141    pub fn upgrade_force(&self) -> ArcRwLock<T> {
142        ArcRwLock::<T> {
143            ptr: self.ptr.upgrade().unwrap(),
144        }
145    }
146    pub fn upgrade(&self) -> Option<ArcRwLock<T>> {
147        self.ptr.upgrade().map(|x| ArcRwLock::<T> { ptr: x })
148    }
149}
150
151impl<T> Clone for ArcRwLock<T> {
152    fn clone(&self) -> Self {
153        Self::new_ptr(Arc::clone(self.ptr()))
154    }
155}
156
157impl<T> RwLockPtr<T> for ArcRwLock<T> {
158    fn new_ptr(ptr: Arc<RwLock<T>>) -> Self {
159        Self { ptr }
160    }
161    fn new_value(obj: T) -> Self {
162        Self::new_ptr(Arc::new(RwLock::new(obj)))
163    }
164    #[inline(always)]
165    fn ptr(&self) -> &Arc<RwLock<T>> {
166        &self.ptr
167    }
168    #[inline(always)]
169    fn ptr_mut(&mut self) -> &mut Arc<RwLock<T>> {
170        &mut self.ptr
171    }
172}
173
174impl<T> PartialEq for ArcRwLock<T> {
175    fn eq(&self, other: &Self) -> bool {
176        self.ptr_eq(other)
177    }
178}
179
180impl<T> Eq for ArcRwLock<T> {}
181
182impl<T> Clone for WeakRwLock<T> {
183    fn clone(&self) -> Self {
184        Self { ptr: self.ptr.clone() }
185    }
186}
187
188impl<T> PartialEq for WeakRwLock<T> {
189    fn eq(&self, other: &Self) -> bool {
190        self.ptr.ptr_eq(&other.ptr)
191    }
192}
193
194impl<T> Eq for WeakRwLock<T> {}
195
196impl<T> std::ops::Deref for ArcRwLock<T> {
197    type Target = RwLock<T>;
198    fn deref(&self) -> &Self::Target {
199        &self.ptr
200    }
201}
202
203impl<T> weak_table::traits::WeakElement for WeakRwLock<T> {
204    type Strong = ArcRwLock<T>;
205    fn new(view: &Self::Strong) -> Self {
206        view.downgrade()
207    }
208    fn view(&self) -> Option<Self::Strong> {
209        self.upgrade()
210    }
211    fn clone(view: &Self::Strong) -> Self::Strong {
212        view.clone()
213    }
214}
215
216pub struct FastClearArcRwLock<T: FastClear> {
217    ptr: Arc<RwLock<T>>,
218}
219
220pub struct FastClearWeakRwLock<T: FastClear> {
221    ptr: Weak<RwLock<T>>,
222}
223
224impl<T: FastClear> FastClearArcRwLock<T> {
225    pub fn downgrade(&self) -> FastClearWeakRwLock<T> {
226        FastClearWeakRwLock::<T> {
227            ptr: Arc::downgrade(&self.ptr),
228        }
229    }
230}
231
232impl<T: FastClear> FastClearWeakRwLock<T> {
233    pub fn upgrade_force(&self) -> FastClearArcRwLock<T> {
234        FastClearArcRwLock::<T> {
235            ptr: self.ptr.upgrade().unwrap(),
236        }
237    }
238    pub fn upgrade(&self) -> Option<FastClearArcRwLock<T>> {
239        self.ptr.upgrade().map(|x| FastClearArcRwLock::<T> { ptr: x })
240    }
241}
242
243impl<T: FastClear> Clone for FastClearArcRwLock<T> {
244    fn clone(&self) -> Self {
245        Self::new_ptr(Arc::clone(self.ptr()))
246    }
247}
248
249impl<T: FastClear> FastClearRwLockPtr<T> for FastClearArcRwLock<T> {
250    fn new_ptr(ptr: Arc<RwLock<T>>) -> Self {
251        Self { ptr }
252    }
253    fn new_value(obj: T) -> Self {
254        Self::new_ptr(Arc::new(RwLock::new(obj)))
255    }
256    #[inline(always)]
257    fn ptr(&self) -> &Arc<RwLock<T>> {
258        &self.ptr
259    }
260    #[inline(always)]
261    fn ptr_mut(&mut self) -> &mut Arc<RwLock<T>> {
262        &mut self.ptr
263    }
264}
265
266impl<T: FastClear> PartialEq for FastClearArcRwLock<T> {
267    fn eq(&self, other: &Self) -> bool {
268        self.ptr_eq(other)
269    }
270}
271
272impl<T: FastClear> Eq for FastClearArcRwLock<T> {}
273
274impl<T: FastClear> Clone for FastClearWeakRwLock<T> {
275    fn clone(&self) -> Self {
276        Self { ptr: self.ptr.clone() }
277    }
278}
279
280impl<T: FastClear> PartialEq for FastClearWeakRwLock<T> {
281    fn eq(&self, other: &Self) -> bool {
282        self.ptr.ptr_eq(&other.ptr)
283    }
284}
285
286impl<T: FastClear> Eq for FastClearWeakRwLock<T> {}
287
288impl<T: FastClear> std::ops::Deref for FastClearArcRwLock<T> {
289    type Target = RwLock<T>;
290    fn deref(&self) -> &Self::Target {
291        &self.ptr
292    }
293}
294
295impl<T: FastClear> weak_table::traits::WeakElement for FastClearWeakRwLock<T> {
296    type Strong = FastClearArcRwLock<T>;
297    fn new(view: &Self::Strong) -> Self {
298        view.downgrade()
299    }
300    fn view(&self) -> Option<Self::Strong> {
301        self.upgrade()
302    }
303    fn clone(view: &Self::Strong) -> Self::Strong {
304        view.clone()
305    }
306}
307
308/*
309 * unsafe APIs, used for production environment where speed matters
310 */
311
312cfg_if::cfg_if! {
313    if #[cfg(feature="unsafe_pointer")] {
314        use std::cell::UnsafeCell;
315
316        pub trait FastClearUnsafePtr<ObjType> where ObjType: FastClear {
317
318            fn new_ptr(ptr: Arc<ObjType>) -> Self;
319
320            fn new_value(obj: ObjType) -> Self;
321
322            fn ptr(&self) -> &Arc<ObjType>;
323
324            fn ptr_mut(&mut self) -> &mut Arc<ObjType>;
325
326            #[inline(always)]
327            fn read_recursive(&self, active_timestamp: FastClearTimestamp) -> &ObjType {
328                let ret = self.ptr();
329                ret.debug_assert_dynamic_cleared(active_timestamp);  // only assert during debug modes
330                ret
331            }
332
333            /// without sanity check: this data might be outdated, so only use when you're read those immutable fields
334            #[inline(always)]
335            fn read_recursive_force(&self) -> &ObjType {
336                self.ptr()
337            }
338
339            #[inline(always)]
340            fn write(&self, active_timestamp: FastClearTimestamp) -> &mut ObjType {
341                unsafe {
342                    let ptr = UnsafeCell::new(self.ptr().clone());
343                    let ret = Arc::get_mut_unchecked(&mut *ptr.get());
344                    ret.debug_assert_dynamic_cleared(active_timestamp);  // only assert during debug modes
345                    ret
346                }
347            }
348
349            #[inline(always)]
350            fn try_write(&self, active_timestamp: FastClearTimestamp) -> Option<&mut ObjType> {
351                Some(self.write(active_timestamp))
352            }
353
354            /// without sanity check: useful only in implementing hard_clear
355            #[inline(always)]
356            fn write_force(&self) -> &mut ObjType {
357                unsafe {
358                    let ptr = UnsafeCell::new(self.ptr().clone());
359                    Arc::get_mut_unchecked(&mut *ptr.get())
360                }
361            }
362
363            /// dynamically clear it if not already cleared; it's safe to call many times, but it will acquire a writer lock
364            #[inline(always)]
365            fn dynamic_clear(&self, active_timestamp: FastClearTimestamp) {
366                let value = self.write_force();
367                value.dynamic_clear(active_timestamp);
368            }
369
370            fn ptr_eq(&self, other: &Self) -> bool {
371                Arc::ptr_eq(self.ptr(), other.ptr())
372            }
373
374        }
375
376        pub trait UnsafePtr<ObjType> {
377
378            fn new_ptr(ptr: Arc<ObjType>) -> Self;
379
380            fn new_value(obj: ObjType) -> Self;
381
382            fn ptr(&self) -> &Arc<ObjType>;
383
384            fn ptr_mut(&mut self) -> &mut Arc<ObjType>;
385
386            #[inline(always)]
387            fn read_recursive(&self) -> &ObjType {
388                self.ptr()
389            }
390
391            #[inline(always)]
392            fn write(&self) -> &mut ObjType {
393                unsafe {
394                    let ptr = UnsafeCell::new(self.ptr().clone());
395                    Arc::get_mut_unchecked(&mut *ptr.get())
396                }
397            }
398
399            #[inline(always)]
400            fn try_write(&self) -> Option<&mut ObjType> {
401                Some(self.write())
402            }
403
404            fn ptr_eq(&self, other: &Self) -> bool {
405                Arc::ptr_eq(self.ptr(), other.ptr())
406            }
407
408        }
409
410        pub struct ArcUnsafe<T> {
411            ptr: Arc<T>,
412        }
413
414        pub struct WeakUnsafe<T> {
415            ptr: Weak<T>,
416        }
417
418        impl<T> ArcUnsafe<T> {
419            pub fn downgrade(&self) -> WeakUnsafe<T> {
420                WeakUnsafe::<T> {
421                    ptr: Arc::downgrade(&self.ptr)
422                }
423            }
424        }
425
426        impl<T> WeakUnsafe<T> {
427            pub fn upgrade_force(&self) -> ArcUnsafe<T> {
428                ArcUnsafe::<T> {
429                    ptr: self.ptr.upgrade().unwrap()
430                }
431            }
432            pub fn upgrade(&self) -> Option<ArcUnsafe<T>> {
433                self.ptr.upgrade().map(|x| ArcUnsafe::<T> { ptr: x })
434            }
435        }
436
437        impl<T> Clone for ArcUnsafe<T> {
438            fn clone(&self) -> Self {
439                Self::new_ptr(Arc::clone(self.ptr()))
440            }
441        }
442
443        impl<T> UnsafePtr<T> for ArcUnsafe<T> {
444            fn new_ptr(ptr: Arc<T>) -> Self { Self { ptr }  }
445            fn new_value(obj: T) -> Self { Self::new_ptr(Arc::new(obj)) }
446            #[inline(always)] fn ptr(&self) -> &Arc<T> { &self.ptr }
447            #[inline(always)] fn ptr_mut(&mut self) -> &mut Arc<T> { &mut self.ptr }
448        }
449
450        impl<T> PartialEq for ArcUnsafe<T> {
451            fn eq(&self, other: &Self) -> bool { self.ptr_eq(other) }
452        }
453
454        impl<T> Eq for ArcUnsafe<T> { }
455
456        impl<T> Clone for WeakUnsafe<T> {
457            fn clone(&self) -> Self {
458                Self { ptr: self.ptr.clone() }
459            }
460        }
461
462        impl<T> PartialEq for WeakUnsafe<T> {
463            fn eq(&self, other: &Self) -> bool { self.ptr.ptr_eq(&other.ptr) }
464        }
465
466        impl<T> Eq for WeakUnsafe<T> { }
467
468        impl<T> std::ops::Deref for ArcUnsafe<T> {
469            type Target = T;
470            fn deref(&self) -> &Self::Target {
471                &self.ptr
472            }
473        }
474
475        impl<T> weak_table::traits::WeakElement for WeakUnsafe<T> {
476            type Strong = ArcUnsafe<T>;
477            fn new(view: &Self::Strong) -> Self {
478                view.downgrade()
479            }
480            fn view(&self) -> Option<Self::Strong> {
481                self.upgrade()
482            }
483            fn clone(view: &Self::Strong) -> Self::Strong {
484                view.clone()
485            }
486        }
487
488        pub struct FastClearArcUnsafe<T: FastClear> {
489            ptr: Arc<T>,
490        }
491
492        pub struct FastClearWeakUnsafe<T: FastClear> {
493            ptr: Weak<T>,
494        }
495
496        impl<T: FastClear> FastClearArcUnsafe<T> {
497            pub fn downgrade(&self) -> FastClearWeakUnsafe<T> {
498                FastClearWeakUnsafe::<T> {
499                    ptr: Arc::downgrade(&self.ptr)
500                }
501            }
502        }
503
504        impl<T: FastClear> FastClearWeakUnsafe<T> {
505            pub fn upgrade_force(&self) -> FastClearArcUnsafe<T> {
506                FastClearArcUnsafe::<T> {
507                    ptr: self.ptr.upgrade().unwrap()
508                }
509            }
510            pub fn upgrade(&self) -> Option<FastClearArcUnsafe<T>> {
511                self.ptr.upgrade().map(|x| FastClearArcUnsafe::<T> { ptr: x })
512            }
513        }
514
515        impl<T: FastClear> Clone for FastClearArcUnsafe<T> {
516            fn clone(&self) -> Self {
517                Self::new_ptr(Arc::clone(self.ptr()))
518            }
519        }
520
521        impl<T: FastClear> FastClearUnsafePtr<T> for FastClearArcUnsafe<T> {
522            fn new_ptr(ptr: Arc<T>) -> Self { Self { ptr }  }
523            fn new_value(obj: T) -> Self { Self::new_ptr(Arc::new(obj)) }
524            #[inline(always)] fn ptr(&self) -> &Arc<T> { &self.ptr }
525            #[inline(always)] fn ptr_mut(&mut self) -> &mut Arc<T> { &mut self.ptr }
526        }
527
528        impl<T: FastClear> PartialEq for FastClearArcUnsafe<T> {
529            fn eq(&self, other: &Self) -> bool { self.ptr_eq(other) }
530        }
531
532        impl<T: FastClear> Eq for FastClearArcUnsafe<T> { }
533
534        impl<T: FastClear> Clone for FastClearWeakUnsafe<T> {
535            fn clone(&self) -> Self {
536                Self { ptr: self.ptr.clone() }
537            }
538        }
539
540        impl<T: FastClear> PartialEq for FastClearWeakUnsafe<T> {
541            fn eq(&self, other: &Self) -> bool { self.ptr.ptr_eq(&other.ptr) }
542        }
543
544        impl<T: FastClear> Eq for FastClearWeakUnsafe<T> { }
545
546        impl<T: FastClear> std::ops::Deref for FastClearArcUnsafe<T> {
547            type Target = T;
548            fn deref(&self) -> &Self::Target {
549                &self.ptr
550            }
551        }
552
553        impl<T: FastClear> weak_table::traits::WeakElement for FastClearWeakUnsafe<T> {
554            type Strong = FastClearArcUnsafe<T>;
555            fn new(view: &Self::Strong) -> Self {
556                view.downgrade()
557            }
558            fn view(&self) -> Option<Self::Strong> {
559                self.upgrade()
560            }
561            fn clone(view: &Self::Strong) -> Self::Strong {
562                view.clone()
563            }
564        }
565
566    }
567
568}
569
570cfg_if::cfg_if! {
571    if #[cfg(feature="dangerous_pointer")] {
572
573        pub trait FastClearUnsafePtrDangerous<ObjType> where ObjType: FastClear {
574
575            fn new_ptr(ptr: Arc<ObjType>) -> Self;
576
577            fn new_value(obj: ObjType) -> Self;
578
579            fn ptr(&self) -> *const ObjType;
580
581            #[inline(always)]
582            fn read_recursive(&self, active_timestamp: FastClearTimestamp) -> &ObjType {
583                unsafe {
584                    let ret = &*self.ptr();
585                    ret.debug_assert_dynamic_cleared(active_timestamp);  // only assert during debug modes
586                    ret
587                }
588            }
589
590            /// without sanity check: this data might be outdated, so only use when you're read those immutable fields
591            #[inline(always)]
592            fn read_recursive_force(&self) -> &ObjType {
593                unsafe {
594                    &*self.ptr()
595                }
596            }
597
598            #[inline(always)]
599            fn write(&self, active_timestamp: FastClearTimestamp) -> &mut ObjType {
600                unsafe {
601                    // https://stackoverflow.com/questions/54237610/is-there-a-way-to-make-an-immutable-reference-mutable
602                    let const_ptr = self.ptr();
603                    let mut_ptr = &mut *(const_ptr as *mut ObjType);
604                    mut_ptr.debug_assert_dynamic_cleared(active_timestamp);  // only assert during debug modes
605                    mut_ptr
606                }
607            }
608
609            #[inline(always)]
610            fn try_write(&self, active_timestamp: FastClearTimestamp) -> Option<&mut ObjType> {
611                Some(self.write(active_timestamp))
612            }
613
614            /// without sanity check: useful only in implementing hard_clear
615            #[inline(always)]
616            fn write_force(&self) -> &mut ObjType {
617                unsafe {
618                    // https://stackoverflow.com/questions/54237610/is-there-a-way-to-make-an-immutable-reference-mutable
619                    let const_ptr = self.ptr();
620                    let mut_ptr = const_ptr as *mut ObjType;
621                    &mut *mut_ptr
622                }
623            }
624
625            /// dynamically clear it if not already cleared; it's safe to call many times, but it will acquire a writer lock
626            #[inline(always)]
627            fn dynamic_clear(&self, active_timestamp: FastClearTimestamp) {
628                let value = self.write_force();
629                value.dynamic_clear(active_timestamp);
630            }
631
632            #[inline(always)]
633            fn ptr_eq(&self, other: &Self) -> bool {
634                std::ptr::eq(self.ptr(), other.ptr())
635            }
636
637        }
638
639        pub struct FastClearArcUnsafeDangerous<T: FastClear> {
640            raw_ptr: Arc<T>,
641        }
642
643        pub struct FastClearWeakUnsafeDangerous<T: FastClear> {
644            raw_ptr: *const T,
645        }
646
647        unsafe impl<T: FastClear> Send for FastClearArcUnsafeDangerous<T> {}
648        unsafe impl<T: FastClear> Sync for FastClearArcUnsafeDangerous<T> {}
649
650        unsafe impl<T: FastClear> Send for FastClearWeakUnsafeDangerous<T> {}
651        unsafe impl<T: FastClear> Sync for FastClearWeakUnsafeDangerous<T> {}
652
653        impl<T: FastClear> FastClearArcUnsafeDangerous<T> {
654            #[inline(always)]
655            pub fn downgrade(&self) -> FastClearWeakUnsafeDangerous<T> {
656                FastClearWeakUnsafeDangerous::<T> {
657                    raw_ptr: Arc::as_ptr(&self.raw_ptr)
658                }
659            }
660        }
661
662        impl<T: FastClear> FastClearWeakUnsafeDangerous<T> {
663            #[inline(always)]
664            pub fn downgrade(&self) -> FastClearWeakUnsafeDangerous<T> {
665                FastClearWeakUnsafeDangerous::<T> {
666                    raw_ptr: self.raw_ptr
667                }
668            }
669        }
670
671        impl<T: FastClear> FastClearWeakUnsafeDangerous<T> {
672            #[inline(always)]
673            pub fn upgrade_force(&self) -> FastClearWeakUnsafeDangerous<T> {
674                self.clone()
675            }
676        }
677
678        impl<T: FastClear> Clone for FastClearWeakUnsafeDangerous<T> {
679            #[inline(always)]
680            fn clone(&self) -> Self {
681                Self { raw_ptr: self.raw_ptr }
682            }
683        }
684
685        impl<T: FastClear> FastClearUnsafePtrDangerous<T> for FastClearArcUnsafeDangerous<T> {
686            fn new_ptr(ptr: Arc<T>) -> Self { Self { raw_ptr: ptr } }
687            fn new_value(obj: T) -> Self { Self { raw_ptr: Arc::new(obj) } }
688            #[inline(always)]
689            fn ptr(&self) -> *const T {
690                Arc::as_ptr(&self.raw_ptr)
691            }
692        }
693
694        impl<T: FastClear> FastClearUnsafePtrDangerous<T> for FastClearWeakUnsafeDangerous<T> {
695            fn new_ptr(_ptr: Arc<T>) -> Self { panic!() }
696            fn new_value(_obj: T) -> Self { panic!() }
697            #[inline(always)]
698            fn ptr(&self) -> *const T {
699                self.raw_ptr
700            }
701        }
702
703        impl<T: FastClear> PartialEq for FastClearArcUnsafeDangerous<T> {
704            #[inline(always)]
705            fn eq(&self, other: &Self) -> bool { self.ptr_eq(other) }
706        }
707
708        impl<T: FastClear> PartialEq<FastClearArcUnsafeDangerous<T>> for FastClearWeakUnsafeDangerous<T> {
709            #[inline(always)]
710            fn eq(&self, other: &FastClearArcUnsafeDangerous<T>) -> bool {
711                self.ptr() == other.ptr()
712            }
713        }
714
715        impl<T: FastClear> Eq for FastClearArcUnsafeDangerous<T> { }
716
717        impl<T: FastClear> PartialEq for FastClearWeakUnsafeDangerous<T> {
718            #[inline(always)]
719            fn eq(&self, other: &Self) -> bool { std::ptr::eq(self.ptr(), other.ptr()) }
720        }
721
722        impl<T: FastClear> Eq for FastClearWeakUnsafeDangerous<T> { }
723
724        impl<T: FastClear> std::ops::Deref for FastClearArcUnsafeDangerous<T> {
725            type Target = T;
726            #[inline(always)]
727            fn deref(&self) -> &Self::Target {
728                &self.raw_ptr
729            }
730        }
731
732        impl<T: FastClear> weak_table::traits::WeakElement for FastClearWeakUnsafeDangerous<T> {
733            type Strong = FastClearWeakUnsafeDangerous<T>;
734            #[inline(always)]
735            fn new(view: &Self::Strong) -> Self {
736                view.downgrade()
737            }
738            #[inline(always)]
739            fn view(&self) -> Option<Self::Strong> {
740                Some(self.upgrade_force())
741            }
742            #[inline(always)]
743            fn clone(view: &Self::Strong) -> Self::Strong {
744                view.clone()
745            }
746        }
747
748    }
749}
750
751cfg_if::cfg_if! {
752    if #[cfg(feature="unsafe_pointer")] {
753        pub type FastClearArcManualSafeLock<T> = FastClearArcUnsafe<T>;
754        pub type FastClearWeakManualSafeLock<T> = FastClearWeakUnsafe<T>;
755        pub type ArcManualSafeLock<T> = ArcUnsafe<T>;
756        pub type WeakManualSafeLock<T> = WeakUnsafe<T>;
757        #[macro_export]
758        macro_rules! lock_write {
759            ($variable:ident, $lock:expr) => { let $variable = $lock.write(); };
760            ($variable:ident, $lock:expr, $timestamp:expr) => { let $variable = $lock.write($timestamp); };
761        }
762        #[allow(unused_imports)] pub use lock_write;
763        cfg_if::cfg_if! {
764            if #[cfg(feature="dangerous_pointer")] {
765                pub type FastClearArcManualSafeLockDangerous<T> = FastClearArcUnsafeDangerous<T>;
766                pub type FastClearWeakManualSafeLockDangerous<T> = FastClearWeakUnsafeDangerous<T>;
767            } else {
768                pub type FastClearArcManualSafeLockDangerous<T> = FastClearArcUnsafe<T>;
769                pub type FastClearWeakManualSafeLockDangerous<T> = FastClearWeakUnsafe<T>;
770            }
771        }
772    } else {
773        pub type FastClearArcManualSafeLock<T> = FastClearArcRwLock<T>;
774        pub type FastClearWeakManualSafeLock<T> = FastClearWeakRwLock<T>;
775        pub type ArcManualSafeLock<T> = ArcRwLock<T>;
776        pub type WeakManualSafeLock<T> = WeakRwLock<T>;
777        #[macro_export]
778        macro_rules! lock_write {
779            ($variable:ident, $lock:expr) => { let mut $variable = $lock.write(); };
780            ($variable:ident, $lock:expr, $timestamp:expr) => { let mut $variable = $lock.write($timestamp); };
781        }
782        #[allow(unused_imports)] pub use lock_write;
783        pub type FastClearArcManualSafeLockDangerous<T> = FastClearArcRwLock<T>;
784        pub type FastClearWeakManualSafeLockDangerous<T> = FastClearWeakRwLock<T>;
785    }
786}
787
788#[cfg(test)]
789mod tests {
790    use super::*;
791
792    #[derive(Debug)]
793    struct Tester {
794        idx: usize,
795    }
796
797    type TesterPtr = ArcRwLock<Tester>;
798    type TesterWeak = WeakRwLock<Tester>;
799
800    impl std::fmt::Debug for TesterPtr {
801        fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
802            let value = self.read_recursive();
803            write!(f, "{:?}", value)
804        }
805    }
806
807    impl std::fmt::Debug for TesterWeak {
808        fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
809            self.upgrade_force().fmt(f)
810        }
811    }
812
813    #[test]
814    fn pointers_test_1() {
815        // cargo test pointers_test_1 -- --nocapture
816        let ptr = TesterPtr::new_value(Tester { idx: 0 });
817        let weak = ptr.downgrade();
818        ptr.write().idx = 1;
819        assert_eq!(weak.upgrade_force().read_recursive().idx, 1);
820        weak.upgrade_force().write().idx = 2;
821        assert_eq!(ptr.read_recursive().idx, 2);
822    }
823
824    cfg_if::cfg_if! {
825        if #[cfg(feature="unsafe_pointer")] {
826
827            type TesterUnsafePtr = ArcRwLock<Tester>;
828
829            #[test]
830            fn pointers_test_2() {  // cargo test pointers_test_2 --features unsafe_pointer -- --nocapture
831                let ptr = TesterUnsafePtr::new_value(Tester { idx: 0 });
832                let weak = ptr.downgrade();
833                ptr.write().idx = 1;
834                assert_eq!(weak.upgrade_force().read_recursive().idx, 1);
835                weak.upgrade_force().write().idx = 2;
836                assert_eq!(ptr.read_recursive().idx, 2);
837            }
838
839            #[test]
840            fn pointers_test_3() {  // cargo test pointers_test_3 --features dangerous_pointer -- --nocapture
841                println!("{}", std::mem::size_of::<ArcManualSafeLock<Tester>>());
842                println!("{}", std::mem::size_of::<Arc<Tester>>());
843                println!("{}", std::mem::size_of::<*const Tester>());
844            }
845
846        }
847    }
848}