1use crate::{
17 loom::*,
18 ref_count::{Interned, Interner},
19};
20use std::{
21 borrow::Borrow,
22 cmp::Ordering,
23 collections::BTreeSet,
24 fmt::{Debug, Display, Formatter, Pointer},
25 hash::Hasher,
26 ops::Deref,
27 sync::Arc,
28};
29
30pub struct OrdInterner<T: ?Sized + std::cmp::Ord> {
41 inner: Arc<Ord<T>>,
42}
43
44impl<T: ?Sized + std::cmp::Ord> Clone for OrdInterner<T> {
45 fn clone(&self) -> Self {
46 Self {
47 inner: self.inner.clone(),
48 }
49 }
50}
51
52#[repr(C)]
53pub struct Ord<T: ?Sized + std::cmp::Ord> {
54 set: RwLock<BTreeSet<InternedOrd<T>>>,
55}
56
57#[cfg(loom)]
58impl<T: ?Sized> Drop for Inner<T> {
59 fn drop(&mut self) {
60 self.set.read();
63 }
64}
65
66unsafe impl<T: ?Sized + std::cmp::Ord + Sync + Send> Send for Ord<T> {}
67unsafe impl<T: ?Sized + std::cmp::Ord + Sync + Send> Sync for Ord<T> {}
68
69impl<T: ?Sized + std::cmp::Ord> Interner for Ord<T> {
70 type T = T;
71
72 fn remove(&self, value: &Interned<Self>) -> (bool, Option<Interned<Self>>) {
73 let value = cast(value);
74 let mut set = self.set.write();
75 #[cfg(loom)]
76 let mut set = set.unwrap();
77 if let Some(i) = set.take(value) {
78 if i.ref_count() == 1 {
79 (true, Some(i.0))
80 } else {
81 set.insert(i);
82 (false, None)
83 }
84 } else {
85 (true, None)
86 }
87 }
88}
89
90impl<T: ?Sized + std::cmp::Ord> OrdInterner<T> {
91 pub fn new() -> Self {
92 Self {
93 inner: Arc::new(Ord {
94 set: RwLock::new(BTreeSet::new()),
95 }),
96 }
97 }
98
99 pub fn len(&self) -> usize {
101 let set = self.inner.set.read();
102 #[cfg(loom)]
103 let set = set.unwrap();
104 set.len()
105 }
106
107 pub fn is_empty(&self) -> bool {
109 let set = self.inner.set.read();
110 #[cfg(loom)]
111 let set = set.unwrap();
112 set.is_empty()
113 }
114
115 fn intern<U, F>(&self, value: U, intern: F) -> InternedOrd<T>
116 where
117 F: FnOnce(U) -> InternedOrd<T>,
118 U: Borrow<T>,
119 {
120 #[cfg(not(loom))]
121 let set = self.inner.set.upgradable_read();
122 #[cfg(loom)]
123 let set = self.inner.set.read().unwrap();
124 if let Some(entry) = set.get(value.borrow()) {
125 return entry.clone();
126 }
127 #[cfg(not(loom))]
128 let mut set = RwLockUpgradableReadGuard::upgrade(set);
129 #[cfg(loom)]
130 let mut set = {
131 drop(set);
132 self.inner.set.write().unwrap()
133 };
134 if let Some(entry) = set.get(value.borrow()) {
136 return entry.clone();
137 }
138 let mut ret = intern(value);
139 ret.0.make_hot(&self.inner);
140 set.insert(ret.clone());
141 ret
142 }
143
144 pub fn intern_ref(&self, value: &T) -> InternedOrd<T>
153 where
154 T: ToOwned,
155 T::Owned: Into<Box<T>>,
156 {
157 self.intern(value, |v| {
158 InternedOrd(Interned::from_box(v.to_owned().into()))
159 })
160 }
161
162 pub fn intern_box(&self, value: Box<T>) -> InternedOrd<T> {
173 self.intern(value, |v| InternedOrd(Interned::from_box(v)))
174 }
175
176 pub fn intern_sized(&self, value: T) -> InternedOrd<T>
184 where
185 T: Sized,
186 {
187 self.intern(value, |v| InternedOrd(Interned::from_sized(v)))
188 }
189}
190
191impl<T: ?Sized + std::cmp::Ord> Default for OrdInterner<T> {
192 fn default() -> Self {
193 Self::new()
194 }
195}
196
197#[repr(transparent)] pub struct InternedOrd<T: ?Sized + std::cmp::Ord>(Interned<Ord<T>>);
211
212impl<T: ?Sized + std::cmp::Ord> InternedOrd<T> {
213 pub fn ref_count(&self) -> u32 {
219 self.0.ref_count()
220 }
221}
222
223impl<T: ?Sized + std::cmp::Ord> Clone for InternedOrd<T> {
224 fn clone(&self) -> Self {
225 Self(self.0.clone())
226 }
227}
228
229fn cast<T: ?Sized + std::cmp::Ord>(i: &Interned<Ord<T>>) -> &InternedOrd<T> {
230 unsafe { &*(i as *const _ as *const InternedOrd<T>) }
233}
234
235impl<T: ?Sized + std::cmp::Ord> PartialEq for InternedOrd<T>
236where
237 T: PartialEq,
238{
239 fn eq(&self, other: &Self) -> bool {
240 self.0 == other.0 || self.deref().eq(other.deref())
242 }
243}
244impl<T: ?Sized + std::cmp::Ord> Eq for InternedOrd<T> where T: Eq {}
245
246impl<T: ?Sized + std::cmp::Ord> PartialOrd for InternedOrd<T>
247where
248 T: PartialOrd,
249{
250 fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
251 if self.0 == other.0 {
252 return Some(Ordering::Equal);
253 }
254 self.deref().partial_cmp(other.deref())
255 }
256}
257impl<T: ?Sized + std::cmp::Ord> std::cmp::Ord for InternedOrd<T> {
258 fn cmp(&self, other: &Self) -> Ordering {
259 if self.0 == other.0 {
260 return Ordering::Equal;
261 }
262 self.deref().cmp(other.deref())
263 }
264}
265
266impl<T: ?Sized + std::cmp::Ord> std::hash::Hash for InternedOrd<T>
267where
268 T: std::hash::Hash,
269{
270 fn hash<H: Hasher>(&self, state: &mut H) {
271 self.deref().hash(state)
272 }
273}
274
275impl<T: ?Sized + std::cmp::Ord> Borrow<T> for InternedOrd<T> {
276 fn borrow(&self) -> &T {
277 self.deref()
278 }
279}
280
281impl<T: ?Sized + std::cmp::Ord> Deref for InternedOrd<T> {
282 type Target = T;
283
284 fn deref(&self) -> &Self::Target {
285 self.0.deref()
286 }
287}
288
289impl<T: ?Sized + std::cmp::Ord> AsRef<T> for InternedOrd<T> {
290 fn as_ref(&self) -> &T {
291 self.deref()
292 }
293}
294
295impl<T: ?Sized + std::cmp::Ord> Debug for InternedOrd<T>
296where
297 T: Debug,
298{
299 fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
300 write!(f, "Interned({:?})", &**self)
301 }
302}
303
304impl<T: ?Sized + std::cmp::Ord> Display for InternedOrd<T>
305where
306 T: Display,
307{
308 fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
309 self.deref().fmt(f)
310 }
311}
312
313impl<T: ?Sized + std::cmp::Ord> Pointer for InternedOrd<T> {
314 fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
315 Pointer::fmt(&(&**self as *const T), f)
316 }
317}
318
319#[test]
320fn size() {
321 let s = std::mem::size_of::<Ord<()>>();
322 assert!(s < 100, "too big: {}", s);
323}
324
325#[test]
326fn debug() {
327 let interner = OrdInterner::new();
328 let i = interner.intern_ref("value");
329 assert_eq!(format!("{i:?}"), r#"Interned("value")"#);
330}
331
332#[cfg(all(test, loom))]
333mod tests {
334 use super::*;
335 use ::loom::{model, thread::spawn};
336
337 fn counts<T>(weak: Weak<T>) -> (usize, usize) {
338 unsafe {
340 let ptr = &weak as *const _ as *const *const (usize, usize);
341 **ptr
342 }
343 }
344
345 #[test]
346 fn drop_interner() {
347 model(|| {
348 let i = OrdInterner::new();
349 let i2 = Arc::downgrade(&i.inner);
350
351 let n = i.intern_box(42.into());
352
353 let h = spawn(move || drop(i));
354 let h2 = spawn(move || drop(n));
355
356 h.join().unwrap();
357 h2.join().unwrap();
358
359 assert_eq!(counts(i2), (0, 1));
360 })
361 }
362
363 #[test]
364 fn drop_two_external() {
365 model(|| {
366 let i = OrdInterner::new();
367 let i2 = Arc::downgrade(&i.inner);
368
369 let n = i.intern_box(42.into());
370 let n2 = n.clone();
371 drop(i);
372
373 let h = spawn(move || drop(n));
374 let h2 = spawn(move || drop(n2));
375
376 h.join().unwrap();
377 h2.join().unwrap();
378
379 assert_eq!(counts(i2), (0, 1));
380 })
381 }
382
383 #[test]
384 fn drop_against_intern() {
385 model(|| {
386 let i = OrdInterner::new();
387 let i2 = Arc::downgrade(&i.inner);
388
389 let n = i.intern_box(42.into());
390
391 let h1 = spawn(move || drop(n));
392 let h2 = spawn(move || i.intern_box(42.into()));
393
394 h1.join().unwrap();
395 h2.join().unwrap();
396
397 assert_eq!(counts(i2), (0, 1));
398 })
399 }
400
401 #[test]
402 fn tree_drop_against_intern_and_interner() {
403 model(|| {
404 let i = OrdInterner::new();
405 let i2 = Arc::downgrade(&i.inner);
406
407 let n = i.intern_box(42.into());
408 let ii = i.clone();
409
410 println!("{:?} setup", current().id());
411 let h1 = spawn(move || drop(n));
412 let h2 = spawn(move || i.intern_box(42.into()));
413 let h3 = spawn(move || drop(ii));
414
415 println!("{:?} joining", current().id());
416 h1.join().unwrap();
417 h2.join().unwrap();
418 h3.join().unwrap();
419
420 assert_eq!(counts(i2), (0, 1));
421 println!("{:?} done", current().id());
422 })
423 }
424}