rustpython_common/lock/
cell_lock.rs1use lock_api::{
2 GetThreadId, RawMutex, RawRwLock, RawRwLockDowngrade, RawRwLockRecursive, RawRwLockUpgrade,
3 RawRwLockUpgradeDowngrade,
4};
5use std::{cell::Cell, num::NonZeroUsize};
6
7pub struct RawCellMutex {
8 locked: Cell<bool>,
9}
10
11unsafe impl RawMutex for RawCellMutex {
12 #[allow(clippy::declare_interior_mutable_const)]
13 const INIT: Self = RawCellMutex {
14 locked: Cell::new(false),
15 };
16
17 type GuardMarker = lock_api::GuardNoSend;
18
19 #[inline]
20 fn lock(&self) {
21 if self.is_locked() {
22 deadlock("", "Mutex")
23 }
24 self.locked.set(true)
25 }
26
27 #[inline]
28 fn try_lock(&self) -> bool {
29 if self.is_locked() {
30 false
31 } else {
32 self.locked.set(true);
33 true
34 }
35 }
36
37 unsafe fn unlock(&self) {
38 self.locked.set(false)
39 }
40
41 #[inline]
42 fn is_locked(&self) -> bool {
43 self.locked.get()
44 }
45}
46
47const WRITER_BIT: usize = 0b01;
48const ONE_READER: usize = 0b10;
49
50pub struct RawCellRwLock {
51 state: Cell<usize>,
52}
53
54impl RawCellRwLock {
55 #[inline]
56 fn is_exclusive(&self) -> bool {
57 self.state.get() & WRITER_BIT != 0
58 }
59}
60
61unsafe impl RawRwLock for RawCellRwLock {
62 #[allow(clippy::declare_interior_mutable_const)]
63 const INIT: Self = RawCellRwLock {
64 state: Cell::new(0),
65 };
66
67 type GuardMarker = <RawCellMutex as RawMutex>::GuardMarker;
68
69 #[inline]
70 fn lock_shared(&self) {
71 if !self.try_lock_shared() {
72 deadlock("sharedly ", "RwLock")
73 }
74 }
75
76 #[inline]
77 fn try_lock_shared(&self) -> bool {
78 self.try_lock_shared_recursive()
87 }
88
89 #[inline]
90 unsafe fn unlock_shared(&self) {
91 self.state.set(self.state.get() - ONE_READER)
92 }
93
94 #[inline]
95 fn lock_exclusive(&self) {
96 if !self.try_lock_exclusive() {
97 deadlock("exclusively ", "RwLock")
98 }
99 self.state.set(WRITER_BIT)
100 }
101
102 #[inline]
103 fn try_lock_exclusive(&self) -> bool {
104 if self.is_locked() {
105 false
106 } else {
107 self.state.set(WRITER_BIT);
108 true
109 }
110 }
111
112 unsafe fn unlock_exclusive(&self) {
113 self.state.set(0)
114 }
115
116 fn is_locked(&self) -> bool {
117 self.state.get() != 0
118 }
119}
120
121unsafe impl RawRwLockDowngrade for RawCellRwLock {
122 unsafe fn downgrade(&self) {
123 self.state.set(ONE_READER);
124 }
125}
126
127unsafe impl RawRwLockUpgrade for RawCellRwLock {
128 #[inline]
129 fn lock_upgradable(&self) {
130 if !self.try_lock_upgradable() {
131 deadlock("upgradably+sharedly ", "RwLock")
132 }
133 }
134
135 #[inline]
136 fn try_lock_upgradable(&self) -> bool {
137 self.try_lock_shared()
139 }
140
141 #[inline]
142 unsafe fn unlock_upgradable(&self) {
143 self.unlock_shared()
144 }
145
146 #[inline]
147 unsafe fn upgrade(&self) {
148 if !self.try_upgrade() {
149 deadlock("upgrade ", "RwLock")
150 }
151 }
152
153 #[inline]
154 unsafe fn try_upgrade(&self) -> bool {
155 if self.state.get() == ONE_READER {
156 self.state.set(WRITER_BIT);
157 true
158 } else {
159 false
160 }
161 }
162}
163
164unsafe impl RawRwLockUpgradeDowngrade for RawCellRwLock {
165 #[inline]
166 unsafe fn downgrade_upgradable(&self) {
167 }
169
170 #[inline]
171 unsafe fn downgrade_to_upgradable(&self) {
172 self.state.set(ONE_READER);
173 }
174}
175
176unsafe impl RawRwLockRecursive for RawCellRwLock {
177 #[inline]
178 fn lock_shared_recursive(&self) {
179 if !self.try_lock_shared_recursive() {
180 deadlock("recursively+sharedly ", "RwLock")
181 }
182 }
183
184 #[inline]
185 fn try_lock_shared_recursive(&self) -> bool {
186 if self.is_exclusive() {
187 false
188 } else if let Some(new) = self.state.get().checked_add(ONE_READER) {
189 self.state.set(new);
190 true
191 } else {
192 false
193 }
194 }
195}
196
197#[cold]
198#[inline(never)]
199fn deadlock(lock_kind: &str, ty: &str) -> ! {
200 panic!("deadlock: tried to {lock_kind}lock a Cell{ty} twice")
201}
202
203pub struct SingleThreadId(());
204unsafe impl GetThreadId for SingleThreadId {
205 const INIT: Self = SingleThreadId(());
206 fn nonzero_thread_id(&self) -> NonZeroUsize {
207 NonZeroUsize::new(1).unwrap()
208 }
209}