1use parking_lot::{lock_api::RawMutex as _, RawMutex};
2use std::{
3 borrow::Borrow,
4 cell::UnsafeCell,
5 fmt,
6 hash::{Hash, Hasher},
7 ops,
8 ptr::{self, NonNull},
9 slice,
10 sync::Arc,
11};
12
13pub struct Interned<'a, T: ?Sized>(pub &'a T, Prv);
16
17impl<'a, T: ?Sized> Interned<'a, T> {
18 pub fn raw(&self) -> RawInterned<T> {
19 let ptr = unsafe { NonNull::new_unchecked(self.0 as *const T as *mut T) };
21 RawInterned(ptr)
22 }
23
24 pub fn erased_raw(&self) -> RawInterned {
25 let ptr = unsafe { NonNull::new_unchecked(self.0 as *const T as *mut T) };
26 RawInterned(ptr.cast::<Prv>())
27 }
28
29 pub(crate) fn unique(value: &'a T) -> Self {
31 Self(value, Prv)
32 }
33}
34
35impl<'a, T: ?Sized> Interned<'a, T> {
36 pub unsafe fn from_raw(raw: RawInterned<T>) -> Self {
40 let ref_ = unsafe { raw.0.as_ref() };
41 Self(ref_, Prv)
42 }
43}
44
45impl<'a, T> Interned<'a, T> {
46 pub unsafe fn from_erased_raw(raw: RawInterned) -> Self {
51 let ref_ = unsafe { raw.0.cast::<T>().as_ref() };
52 Self(ref_, Prv)
53 }
54}
55
56impl<T: ?Sized> PartialEq for Interned<'_, T> {
58 fn eq(&self, other: &Self) -> bool {
59 (self.0 as *const T as *const ()) == (other.0 as *const T as *const ())
60 }
61}
62
63impl<T: ?Sized> Eq for Interned<'_, T> {}
65
66impl<T: PartialOrd + ?Sized> PartialOrd for Interned<'_, T> {
67 fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
68 self.0.partial_cmp(other.0)
69 }
70}
71
72impl<T: Ord + ?Sized> Ord for Interned<'_, T> {
73 fn cmp(&self, other: &Self) -> std::cmp::Ordering {
74 self.0.cmp(other.0)
75 }
76}
77
78impl<T: Hash + ?Sized> Hash for Interned<'_, T> {
79 fn hash<H: Hasher>(&self, state: &mut H) {
80 self.0.hash(state)
81 }
82}
83
84impl<T: ?Sized> Borrow<T> for Interned<'_, T> {
85 fn borrow(&self) -> &T {
86 self.0
87 }
88}
89
90impl<T: ?Sized> AsRef<T> for Interned<'_, T> {
91 fn as_ref(&self) -> &T {
92 self.0
93 }
94}
95
96impl<'a, T: ?Sized> ops::Deref for Interned<'a, T> {
97 type Target = T;
98
99 fn deref(&self) -> &Self::Target {
100 self.0
101 }
102}
103
104impl<T: ?Sized> Clone for Interned<'_, T> {
105 fn clone(&self) -> Self {
106 *self
107 }
108}
109
110impl<T: ?Sized> Copy for Interned<'_, T> {}
111
112impl<T: fmt::Debug + ?Sized> fmt::Debug for Interned<'_, T> {
113 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
114 fmt::Debug::fmt(&self.0, f)
115 }
116}
117
118impl<T: fmt::Display + ?Sized> fmt::Display for Interned<'_, T> {
119 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
120 fmt::Display::fmt(&self.0, f)
121 }
122}
123
124pub struct RawInterned<T: ?Sized = Prv>(pub(crate) NonNull<T>);
138
139impl<T: ?Sized> RawInterned<T> {
140 #[inline]
141 pub fn cast<U>(self) -> RawInterned<U> {
142 RawInterned(self.0.cast())
143 }
144
145 #[inline]
146 pub fn erase(self) -> RawInterned {
147 RawInterned(self.0.cast())
148 }
149}
150
151impl<T: ?Sized> PartialEq for RawInterned<T> {
153 fn eq(&self, other: &Self) -> bool {
154 (self.as_ptr() as *mut ()) == (other.as_ptr() as *mut ())
155 }
156}
157
158impl<T: ?Sized> Eq for RawInterned<T> {}
160
161impl<T: ?Sized> PartialOrd for RawInterned<T> {
163 fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
164 Some(self.cmp(other))
165 }
166}
167
168impl<T: ?Sized> Ord for RawInterned<T> {
170 fn cmp(&self, other: &Self) -> std::cmp::Ordering {
171 self.0
172 .as_ptr()
173 .cast::<()>()
174 .cmp(&other.0.as_ptr().cast::<()>())
175 }
176}
177
178impl<T: ?Sized> Hash for RawInterned<T> {
179 fn hash<H: Hasher>(&self, state: &mut H) {
180 self.0.hash(state)
181 }
182}
183
184impl<T: ?Sized> Borrow<NonNull<T>> for RawInterned<T> {
185 fn borrow(&self) -> &NonNull<T> {
186 &self.0
187 }
188}
189
190impl<T: ?Sized> ops::Deref for RawInterned<T> {
191 type Target = NonNull<T>;
192
193 fn deref(&self) -> &Self::Target {
194 &self.0
195 }
196}
197
198impl<T: ?Sized> Clone for RawInterned<T> {
199 fn clone(&self) -> Self {
200 *self
201 }
202}
203
204impl<T: ?Sized> Copy for RawInterned<T> {}
205
206impl<T: ?Sized> fmt::Debug for RawInterned<T> {
207 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
208 self.0.fmt(f)
209 }
210}
211
212#[derive(Clone, Copy, PartialOrd, Ord, PartialEq, Eq, Hash)]
213pub struct Prv;
214
215#[derive(Clone)]
216pub struct UnsafeLock<T: ?Sized> {
217 inner: Arc<ManualMutex<T>>,
218}
219
220unsafe impl<T: ?Sized> Send for UnsafeLock<T> {}
239unsafe impl<T: ?Sized> Sync for UnsafeLock<T> {}
240
241impl<T> UnsafeLock<T> {
242 pub unsafe fn new(value: T) -> Self {
248 Self {
249 inner: Arc::new(ManualMutex {
250 mutex: RawMutex::INIT,
251 data: UnsafeCell::new(value),
252 }),
253 }
254 }
255}
256
257impl<T: ?Sized> UnsafeLock<T> {
258 pub unsafe fn lock(&self) -> NonNull<T> {
265 self.inner.mutex.lock();
266 unsafe { NonNull::new_unchecked(self.inner.data.get()) }
267 }
268
269 pub unsafe fn unlock(&self) {
273 self.inner.mutex.unlock();
274 }
275}
276
277impl<T: fmt::Debug> fmt::Debug for UnsafeLock<T> {
278 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
279 unsafe {
281 let t = self.lock().as_ref();
282 let ret = fmt::Debug::fmt(t, f);
283 self.unlock();
284 ret
285 }
286 }
287}
288
289struct ManualMutex<T: ?Sized> {
290 mutex: RawMutex,
291 data: UnsafeCell<T>,
292}
293
294unsafe impl<T: Send + ?Sized> Send for ManualMutex<T> {}
295unsafe impl<T: Send + ?Sized> Sync for ManualMutex<T> {}
296
297pub(crate) unsafe fn cast_then_drop_slice<T>(ptr: *mut u8, num_elems: usize) {
298 unsafe {
299 let slice = slice::from_raw_parts_mut(ptr.cast::<T>(), num_elems);
300 ptr::drop_in_place(slice);
301 }
302}
303
304#[cfg(test)]
305pub(crate) fn assert_group_addr_eq(groups: &[&[RawInterned]]) {
306 for i in 0..groups.len() {
307 for w in groups[i].windows(2) {
309 assert_eq!(w[0], w[1]);
310 }
311
312 let a = groups[i][0];
314 for group in groups.iter().skip(i + 1) {
315 let b = group[0];
316 assert_ne!(a, b);
317 }
318 }
319}
320
321#[cfg(test)]
322mod tests {
323 use super::*;
324 use std::{cell::Cell, rc::Rc, thread};
325
326 #[test]
327 #[allow(unused_mut)]
328 fn test_unsafelock() {
329 let Ok(num_cpus) = thread::available_parallelism() else {
330 return;
331 };
332 let mut num_threads = num_cpus.get();
333
334 let val = Rc::new(Cell::new(0_usize));
335
336 let lock = unsafe { UnsafeLock::new(val.clone()) };
339
340 const N: usize = 10_000;
342 let mut handles = Vec::new();
343 for _ in 0..num_threads {
344 let c_lock = lock.clone();
345 let handle = thread::spawn(move || {
346 for _ in 0..N {
347 unsafe {
348 let val = c_lock.lock().as_mut();
349 val.set(val.get() + 1);
350 c_lock.unlock();
351 }
352 }
353 });
354 handles.push(handle);
355 }
356
357 for handle in handles {
366 handle.join().unwrap();
367 }
368
369 unsafe {
370 let val = lock.lock().as_ref();
371 assert_eq!(val.get(), N * num_threads);
372 lock.unlock();
373 }
374 }
375}