rustpython_common/
refcount.rs1use crate::atomic::{Ordering::*, PyAtomic, Radium};
2
3const MAX_REFCOUNT: usize = isize::MAX as usize;
9
10pub struct RefCount {
11 strong: PyAtomic<usize>,
12}
13
14impl Default for RefCount {
15 fn default() -> Self {
16 Self::new()
17 }
18}
19
20impl RefCount {
21 const MASK: usize = MAX_REFCOUNT;
22
23 pub fn new() -> Self {
24 RefCount {
25 strong: Radium::new(1),
26 }
27 }
28
29 #[inline]
30 pub fn get(&self) -> usize {
31 self.strong.load(SeqCst)
32 }
33
34 #[inline]
35 pub fn inc(&self) {
36 let old_size = self.strong.fetch_add(1, Relaxed);
37
38 if old_size & Self::MASK == Self::MASK {
39 std::process::abort();
40 }
41 }
42
43 #[inline]
45 pub fn safe_inc(&self) -> bool {
46 self.strong
47 .fetch_update(AcqRel, Acquire, |prev| (prev != 0).then_some(prev + 1))
48 .is_ok()
49 }
50
51 #[inline]
53 pub fn dec(&self) -> bool {
54 if self.strong.fetch_sub(1, Release) != 1 {
55 return false;
56 }
57
58 PyAtomic::<usize>::fence(Acquire);
59
60 true
61 }
62}
63
64impl RefCount {
65 pub fn leak(&self) {
68 debug_assert!(!self.is_leaked());
69 const BIT_MARKER: usize = (isize::MAX as usize) + 1;
70 debug_assert_eq!(BIT_MARKER.count_ones(), 1);
71 debug_assert_eq!(BIT_MARKER.leading_zeros(), 0);
72 self.strong.fetch_add(BIT_MARKER, Relaxed);
73 }
74
75 pub fn is_leaked(&self) -> bool {
76 (self.strong.load(Acquire) as isize) < 0
77 }
78}