rustpython_common/lock/
cell_lock.rs1use core::{cell::Cell, num::NonZero};
3use lock_api::{
4 GetThreadId, RawMutex, RawRwLock, RawRwLockDowngrade, RawRwLockRecursive, RawRwLockUpgrade,
5 RawRwLockUpgradeDowngrade,
6};
7
8pub struct RawCellMutex {
9 locked: Cell<bool>,
10}
11
12unsafe impl RawMutex for RawCellMutex {
13 #[allow(
14 clippy::declare_interior_mutable_const,
15 reason = "const lock initializer intentionally uses interior mutability"
16 )]
17 const INIT: Self = Self {
18 locked: Cell::new(false),
19 };
20
21 type GuardMarker = lock_api::GuardNoSend;
22
23 #[inline]
24 fn lock(&self) {
25 if self.is_locked() {
26 deadlock("", "Mutex")
27 }
28 self.locked.set(true)
29 }
30
31 #[inline]
32 fn try_lock(&self) -> bool {
33 if self.is_locked() {
34 false
35 } else {
36 self.locked.set(true);
37 true
38 }
39 }
40
41 unsafe fn unlock(&self) {
42 self.locked.set(false)
43 }
44
45 #[inline]
46 fn is_locked(&self) -> bool {
47 self.locked.get()
48 }
49}
50
51const WRITER_BIT: usize = 0b01;
52const ONE_READER: usize = 0b10;
53
54pub struct RawCellRwLock {
55 state: Cell<usize>,
56}
57
58impl RawCellRwLock {
59 #[inline]
60 fn is_exclusive(&self) -> bool {
61 self.state.get() & WRITER_BIT != 0
62 }
63}
64
65unsafe impl RawRwLock for RawCellRwLock {
66 #[allow(
67 clippy::declare_interior_mutable_const,
68 reason = "const rwlock initializer intentionally uses interior mutability"
69 )]
70 const INIT: Self = Self {
71 state: Cell::new(0),
72 };
73
74 type GuardMarker = <RawCellMutex as RawMutex>::GuardMarker;
75
76 #[inline]
77 fn lock_shared(&self) {
78 if !self.try_lock_shared() {
79 deadlock("sharedly ", "RwLock")
80 }
81 }
82
83 #[inline]
84 fn try_lock_shared(&self) -> bool {
85 self.try_lock_shared_recursive()
94 }
95
96 #[inline]
97 unsafe fn unlock_shared(&self) {
98 self.state.update(|x| x - ONE_READER)
99 }
100
101 #[inline]
102 fn lock_exclusive(&self) {
103 if !self.try_lock_exclusive() {
104 deadlock("exclusively ", "RwLock")
105 }
106 self.state.set(WRITER_BIT)
107 }
108
109 #[inline]
110 fn try_lock_exclusive(&self) -> bool {
111 if self.is_locked() {
112 false
113 } else {
114 self.state.set(WRITER_BIT);
115 true
116 }
117 }
118
119 unsafe fn unlock_exclusive(&self) {
120 self.state.set(0)
121 }
122
123 fn is_locked(&self) -> bool {
124 self.state.get() != 0
125 }
126}
127
128unsafe impl RawRwLockDowngrade for RawCellRwLock {
129 unsafe fn downgrade(&self) {
130 self.state.set(ONE_READER);
131 }
132}
133
134unsafe impl RawRwLockUpgrade for RawCellRwLock {
135 #[inline]
136 fn lock_upgradable(&self) {
137 if !self.try_lock_upgradable() {
138 deadlock("upgradably+sharedly ", "RwLock")
139 }
140 }
141
142 #[inline]
143 fn try_lock_upgradable(&self) -> bool {
144 self.try_lock_shared()
146 }
147
148 #[inline]
149 unsafe fn unlock_upgradable(&self) {
150 unsafe { self.unlock_shared() }
151 }
152
153 #[inline]
154 unsafe fn upgrade(&self) {
155 if !unsafe { self.try_upgrade() } {
156 deadlock("upgrade ", "RwLock")
157 }
158 }
159
160 #[inline]
161 unsafe fn try_upgrade(&self) -> bool {
162 if self.state.get() == ONE_READER {
163 self.state.set(WRITER_BIT);
164 true
165 } else {
166 false
167 }
168 }
169}
170
171unsafe impl RawRwLockUpgradeDowngrade for RawCellRwLock {
172 #[inline]
173 unsafe fn downgrade_upgradable(&self) {
174 }
176
177 #[inline]
178 unsafe fn downgrade_to_upgradable(&self) {
179 self.state.set(ONE_READER);
180 }
181}
182
183unsafe impl RawRwLockRecursive for RawCellRwLock {
184 #[inline]
185 fn lock_shared_recursive(&self) {
186 if !self.try_lock_shared_recursive() {
187 deadlock("recursively+sharedly ", "RwLock")
188 }
189 }
190
191 #[inline]
192 fn try_lock_shared_recursive(&self) -> bool {
193 if self.is_exclusive() {
194 false
195 } else if let Some(new) = self.state.get().checked_add(ONE_READER) {
196 self.state.set(new);
197 true
198 } else {
199 false
200 }
201 }
202}
203
204#[cold]
205#[inline(never)]
206fn deadlock(lock_kind: &str, ty: &str) -> ! {
207 panic!("deadlock: tried to {lock_kind}lock a Cell{ty} twice")
208}
209
210#[derive(Clone, Copy)]
211pub struct SingleThreadId(());
212
213unsafe impl GetThreadId for SingleThreadId {
214 const INIT: Self = Self(());
215
216 fn nonzero_thread_id(&self) -> NonZero<usize> {
217 unsafe { NonZero::new_unchecked(1) }
219 }
220}