1use parking_lot::{RawMutex, lock_api::RawMutex as _};
2use std::{
3 borrow::Borrow,
4 cell::UnsafeCell,
5 fmt,
6 hash::{Hash, Hasher},
7 ops,
8 ptr::{self, NonNull},
9 slice,
10 sync::Arc,
11};
12
13pub struct Interned<'a, T: ?Sized>(pub &'a T, Prv);
16
17impl<'a, T: ?Sized> Interned<'a, T> {
18 pub fn raw(&self) -> RawInterned<T> {
19 let ptr = NonNull::from_ref(self.0);
20 RawInterned(ptr)
21 }
22
23 pub fn erased_raw(&self) -> RawInterned {
24 let ptr = NonNull::from_ref(self.0).cast::<Prv>();
25 RawInterned(ptr)
26 }
27
28 pub(crate) fn unique(value: &'a T) -> Self {
30 Self(value, Prv)
31 }
32}
33
34impl<'a, T: ?Sized> Interned<'a, T> {
35 pub unsafe fn from_raw(raw: RawInterned<T>) -> Self {
39 let ref_ = unsafe { raw.0.as_ref() };
40 Self(ref_, Prv)
41 }
42}
43
44impl<'a, T> Interned<'a, T> {
45 pub unsafe fn from_erased_raw(raw: RawInterned) -> Self {
50 let ref_ = unsafe { raw.0.cast::<T>().as_ref() };
51 Self(ref_, Prv)
52 }
53}
54
55impl<T: ?Sized> PartialEq for Interned<'_, T> {
57 fn eq(&self, other: &Self) -> bool {
58 ptr::addr_eq(self.0, other.0)
59 }
60}
61
62impl<T: ?Sized> Eq for Interned<'_, T> {}
64
65impl<T: PartialOrd + ?Sized> PartialOrd for Interned<'_, T> {
66 fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
67 self.0.partial_cmp(other.0)
68 }
69}
70
71impl<T: Ord + ?Sized> Ord for Interned<'_, T> {
72 fn cmp(&self, other: &Self) -> std::cmp::Ordering {
73 self.0.cmp(other.0)
74 }
75}
76
77impl<T: Hash + ?Sized> Hash for Interned<'_, T> {
78 fn hash<H: Hasher>(&self, state: &mut H) {
79 self.0.hash(state)
80 }
81}
82
83impl<T: ?Sized> Borrow<T> for Interned<'_, T> {
84 fn borrow(&self) -> &T {
85 self.0
86 }
87}
88
89impl<'a, T: ?Sized> ops::Deref for Interned<'a, T> {
90 type Target = &'a T;
91
92 fn deref(&self) -> &Self::Target {
93 &self.0
94 }
95}
96
97impl<T: ?Sized> Clone for Interned<'_, T> {
98 fn clone(&self) -> Self {
99 *self
100 }
101}
102
103impl<T: ?Sized> Copy for Interned<'_, T> {}
104
105impl<T: fmt::Debug + ?Sized> fmt::Debug for Interned<'_, T> {
106 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
107 fmt::Debug::fmt(&self.0, f)
108 }
109}
110
111impl<T: fmt::Display + ?Sized> fmt::Display for Interned<'_, T> {
112 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
113 fmt::Display::fmt(&self.0, f)
114 }
115}
116
117pub struct RawInterned<T: ?Sized = Prv>(pub(crate) NonNull<T>);
119
120impl<T: ?Sized> RawInterned<T> {
121 #[inline]
122 pub fn cast<U>(self) -> RawInterned<U> {
123 RawInterned(self.0.cast())
124 }
125}
126
127impl<T: ?Sized> PartialEq for RawInterned<T> {
129 fn eq(&self, other: &Self) -> bool {
130 ptr::addr_eq(self.0.as_ptr(), other.0.as_ptr())
131 }
132}
133
134impl<T: ?Sized> Eq for RawInterned<T> {}
136
137impl<T: ?Sized> PartialOrd for RawInterned<T> {
139 fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
140 Some(self.cmp(other))
141 }
142}
143
144impl<T: ?Sized> Ord for RawInterned<T> {
146 fn cmp(&self, other: &Self) -> std::cmp::Ordering {
147 self.0
148 .as_ptr()
149 .cast::<()>()
150 .cmp(&other.0.as_ptr().cast::<()>())
151 }
152}
153
154impl<T: ?Sized> Hash for RawInterned<T> {
155 fn hash<H: Hasher>(&self, state: &mut H) {
156 self.0.hash(state)
157 }
158}
159
160impl<T: ?Sized> Borrow<NonNull<T>> for RawInterned<T> {
161 fn borrow(&self) -> &NonNull<T> {
162 &self.0
163 }
164}
165
166impl<T: ?Sized> ops::Deref for RawInterned<T> {
167 type Target = NonNull<T>;
168
169 fn deref(&self) -> &Self::Target {
170 &self.0
171 }
172}
173
174impl<T: ?Sized> Clone for RawInterned<T> {
175 fn clone(&self) -> Self {
176 *self
177 }
178}
179
180impl<T: ?Sized> Copy for RawInterned<T> {}
181
182impl<T: ?Sized> fmt::Debug for RawInterned<T> {
183 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
184 self.0.fmt(f)
185 }
186}
187
188#[derive(Clone, Copy, PartialOrd, Ord, PartialEq, Eq, Hash)]
189pub struct Prv;
190
191#[derive(Clone)]
192pub struct UnsafeLock<T: ?Sized> {
193 inner: Arc<ManualMutex<T>>,
194}
195
196unsafe impl<T: ?Sized> Send for UnsafeLock<T> {}
215unsafe impl<T: ?Sized> Sync for UnsafeLock<T> {}
216
217impl<T> UnsafeLock<T> {
218 pub unsafe fn new(value: T) -> Self {
224 Self {
225 inner: Arc::new(ManualMutex {
226 mutex: RawMutex::INIT,
227 data: UnsafeCell::new(value),
228 }),
229 }
230 }
231}
232
233impl<T: ?Sized> UnsafeLock<T> {
234 pub unsafe fn lock(&self) -> NonNull<T> {
241 self.inner.mutex.lock();
242 unsafe { NonNull::new_unchecked(self.inner.data.get()) }
243 }
244
245 pub unsafe fn unlock(&self) {
249 unsafe { self.inner.mutex.unlock() };
250 }
251}
252
253impl<T: fmt::Debug> fmt::Debug for UnsafeLock<T> {
254 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
255 unsafe {
257 let t = self.lock().as_ref();
258 let ret = fmt::Debug::fmt(t, f);
259 self.unlock();
260 ret
261 }
262 }
263}
264
265struct ManualMutex<T: ?Sized> {
266 mutex: RawMutex,
267 data: UnsafeCell<T>,
268}
269
270unsafe impl<T: Send + ?Sized> Send for ManualMutex<T> {}
271unsafe impl<T: Send + ?Sized> Sync for ManualMutex<T> {}
272
273pub(crate) unsafe fn cast_then_drop_slice<T>(ptr: *mut u8, num_elems: usize) {
274 unsafe {
275 let slice = slice::from_raw_parts_mut(ptr.cast::<T>(), num_elems);
276 ptr::drop_in_place(slice);
277 }
278}
279
280#[cfg(test)]
281pub(crate) fn assert_group_addr_eq(groups: &[&[RawInterned]]) {
282 for i in 0..groups.len() {
283 for w in groups[i].windows(2) {
285 assert_eq!(w[0], w[1]);
286 }
287
288 let a = groups[i][0];
290 for j in i + 1..groups.len() {
291 let b = groups[j][0];
292 assert_ne!(a, b);
293 }
294 }
295}