1use parking_lot::{RawMutex, lock_api::RawMutex as _};
2use std::{
3 borrow::Borrow,
4 cell::UnsafeCell,
5 fmt,
6 hash::{Hash, Hasher},
7 ops,
8 ptr::{self, NonNull},
9 slice,
10 sync::Arc,
11};
12
13pub struct Interned<'a, T: ?Sized>(pub &'a T, Prv);
16
17impl<'a, T: ?Sized> Interned<'a, T> {
18 pub fn raw(&self) -> RawInterned<T> {
19 let ptr = NonNull::from_ref(self.0);
20 RawInterned(ptr)
21 }
22
23 pub fn erased_raw(&self) -> RawInterned {
24 let ptr = NonNull::from_ref(self.0).cast::<Prv>();
25 RawInterned(ptr)
26 }
27
28 pub(crate) fn unique(value: &'a T) -> Self {
30 Self(value, Prv)
31 }
32}
33
34impl<'a, T: ?Sized> Interned<'a, T> {
35 pub unsafe fn from_raw(raw: RawInterned<T>) -> Self {
39 let ref_ = unsafe { raw.0.as_ref() };
40 Self(ref_, Prv)
41 }
42}
43
44impl<'a, T> Interned<'a, T> {
45 pub unsafe fn from_erased_raw(raw: RawInterned) -> Self {
50 let ref_ = unsafe { raw.0.cast::<T>().as_ref() };
51 Self(ref_, Prv)
52 }
53}
54
55impl<T: ?Sized> PartialEq for Interned<'_, T> {
57 fn eq(&self, other: &Self) -> bool {
58 ptr::addr_eq(self.0, other.0)
59 }
60}
61
62impl<T: ?Sized> Eq for Interned<'_, T> {}
64
65impl<T: PartialOrd + ?Sized> PartialOrd for Interned<'_, T> {
66 fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
67 self.0.partial_cmp(other.0)
68 }
69}
70
71impl<T: Ord + ?Sized> Ord for Interned<'_, T> {
72 fn cmp(&self, other: &Self) -> std::cmp::Ordering {
73 self.0.cmp(other.0)
74 }
75}
76
77impl<T: Hash + ?Sized> Hash for Interned<'_, T> {
78 fn hash<H: Hasher>(&self, state: &mut H) {
79 self.0.hash(state)
80 }
81}
82
83impl<T: ?Sized> Borrow<T> for Interned<'_, T> {
84 fn borrow(&self) -> &T {
85 self.0
86 }
87}
88
89impl<T: ?Sized> AsRef<T> for Interned<'_, T> {
90 fn as_ref(&self) -> &T {
91 self.0
92 }
93}
94
95impl<'a, T: ?Sized> ops::Deref for Interned<'a, T> {
96 type Target = T;
97
98 fn deref(&self) -> &Self::Target {
99 self.0
100 }
101}
102
103impl<T: ?Sized> Clone for Interned<'_, T> {
104 fn clone(&self) -> Self {
105 *self
106 }
107}
108
109impl<T: ?Sized> Copy for Interned<'_, T> {}
110
111impl<T: fmt::Debug + ?Sized> fmt::Debug for Interned<'_, T> {
112 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
113 fmt::Debug::fmt(&self.0, f)
114 }
115}
116
117impl<T: fmt::Display + ?Sized> fmt::Display for Interned<'_, T> {
118 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
119 fmt::Display::fmt(&self.0, f)
120 }
121}
122
123pub struct RawInterned<T: ?Sized = Prv>(pub(crate) NonNull<T>);
137
138impl<T: ?Sized> RawInterned<T> {
139 #[inline]
140 pub fn cast<U>(self) -> RawInterned<U> {
141 RawInterned(self.0.cast())
142 }
143
144 #[inline]
145 pub fn erase(self) -> RawInterned {
146 RawInterned(self.0.cast())
147 }
148}
149
150impl<T: ?Sized> PartialEq for RawInterned<T> {
152 fn eq(&self, other: &Self) -> bool {
153 ptr::addr_eq(self.0.as_ptr(), other.0.as_ptr())
154 }
155}
156
157impl<T: ?Sized> Eq for RawInterned<T> {}
159
160impl<T: ?Sized> PartialOrd for RawInterned<T> {
162 fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
163 Some(self.cmp(other))
164 }
165}
166
167impl<T: ?Sized> Ord for RawInterned<T> {
169 fn cmp(&self, other: &Self) -> std::cmp::Ordering {
170 self.0
171 .as_ptr()
172 .cast::<()>()
173 .cmp(&other.0.as_ptr().cast::<()>())
174 }
175}
176
177impl<T: ?Sized> Hash for RawInterned<T> {
178 fn hash<H: Hasher>(&self, state: &mut H) {
179 self.0.hash(state)
180 }
181}
182
183impl<T: ?Sized> Borrow<NonNull<T>> for RawInterned<T> {
184 fn borrow(&self) -> &NonNull<T> {
185 &self.0
186 }
187}
188
189impl<T: ?Sized> ops::Deref for RawInterned<T> {
190 type Target = NonNull<T>;
191
192 fn deref(&self) -> &Self::Target {
193 &self.0
194 }
195}
196
197impl<T: ?Sized> Clone for RawInterned<T> {
198 fn clone(&self) -> Self {
199 *self
200 }
201}
202
203impl<T: ?Sized> Copy for RawInterned<T> {}
204
205impl<T: ?Sized> fmt::Debug for RawInterned<T> {
206 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
207 self.0.fmt(f)
208 }
209}
210
211#[derive(Clone, Copy, PartialOrd, Ord, PartialEq, Eq, Hash)]
212pub struct Prv;
213
214#[derive(Clone)]
215pub struct UnsafeLock<T: ?Sized> {
216 inner: Arc<ManualMutex<T>>,
217}
218
219unsafe impl<T: ?Sized> Send for UnsafeLock<T> {}
238unsafe impl<T: ?Sized> Sync for UnsafeLock<T> {}
239
240impl<T> UnsafeLock<T> {
241 pub unsafe fn new(value: T) -> Self {
247 Self {
248 inner: Arc::new(ManualMutex {
249 mutex: RawMutex::INIT,
250 data: UnsafeCell::new(value),
251 }),
252 }
253 }
254}
255
256impl<T: ?Sized> UnsafeLock<T> {
257 pub unsafe fn lock(&self) -> NonNull<T> {
264 self.inner.mutex.lock();
265 unsafe { NonNull::new_unchecked(self.inner.data.get()) }
266 }
267
268 pub unsafe fn unlock(&self) {
272 unsafe { self.inner.mutex.unlock() };
273 }
274}
275
276impl<T: fmt::Debug> fmt::Debug for UnsafeLock<T> {
277 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
278 unsafe {
280 let t = self.lock().as_ref();
281 let ret = fmt::Debug::fmt(t, f);
282 self.unlock();
283 ret
284 }
285 }
286}
287
288struct ManualMutex<T: ?Sized> {
289 mutex: RawMutex,
290 data: UnsafeCell<T>,
291}
292
293unsafe impl<T: Send + ?Sized> Send for ManualMutex<T> {}
294unsafe impl<T: Send + ?Sized> Sync for ManualMutex<T> {}
295
296pub(crate) unsafe fn cast_then_drop_slice<T>(ptr: *mut u8, num_elems: usize) {
297 unsafe {
298 let slice = slice::from_raw_parts_mut(ptr.cast::<T>(), num_elems);
299 ptr::drop_in_place(slice);
300 }
301}
302
303#[cfg(test)]
304pub(crate) fn assert_group_addr_eq(groups: &[&[RawInterned]]) {
305 for i in 0..groups.len() {
306 for w in groups[i].windows(2) {
308 assert_eq!(w[0], w[1]);
309 }
310
311 let a = groups[i][0];
313 for j in i + 1..groups.len() {
314 let b = groups[j][0];
315 assert_ne!(a, b);
316 }
317 }
318}