1use crate::{
17 loom::*,
18 ref_count::{Interned, Interner},
19};
20use std::{
21 borrow::Borrow,
22 cmp::Ordering,
23 collections::BTreeSet,
24 fmt::{Debug, Display, Formatter, Pointer},
25 hash::Hasher,
26 ops::Deref,
27 sync::Arc,
28};
29
30pub struct OrdInterner<T: ?Sized + std::cmp::Ord> {
41 inner: Arc<Ord<T>>,
42}
43
44impl<T: ?Sized + std::cmp::Ord> Clone for OrdInterner<T> {
45 fn clone(&self) -> Self {
46 Self {
47 inner: self.inner.clone(),
48 }
49 }
50}
51
52#[repr(C)]
53pub struct Ord<T: ?Sized + std::cmp::Ord> {
54 set: RwLock<BTreeSet<InternedOrd<T>>>,
55}
56
57#[cfg(loom)]
58impl<T: ?Sized> Drop for Inner<T> {
59 fn drop(&mut self) {
60 self.set.read();
63 }
64}
65
66unsafe impl<T: ?Sized + std::cmp::Ord + Sync + Send> Send for Ord<T> {}
67unsafe impl<T: ?Sized + std::cmp::Ord + Sync + Send> Sync for Ord<T> {}
68
69impl<T: ?Sized + std::cmp::Ord> Interner for Ord<T> {
70 type T = T;
71
72 fn remove(&self, value: &Interned<Self>) -> (bool, Option<Interned<Self>>) {
73 let value = cast(value);
74 let mut set = self.set.write();
75 #[cfg(loom)]
76 let mut set = set.unwrap();
77 if let Some(i) = set.take(value) {
78 if i.ref_count() == 1 {
79 (true, Some(i.0))
80 } else {
81 set.insert(i);
82 (false, None)
83 }
84 } else {
85 (true, None)
86 }
87 }
88}
89
90impl<T: ?Sized + std::cmp::Ord> OrdInterner<T> {
91 pub fn new() -> Self {
92 Self {
93 inner: Arc::new(Ord {
94 set: RwLock::new(BTreeSet::new()),
95 }),
96 }
97 }
98
99 pub fn len(&self) -> usize {
101 let set = self.inner.set.read();
102 #[cfg(loom)]
103 let set = set.unwrap();
104 set.len()
105 }
106
107 pub fn is_empty(&self) -> bool {
109 let set = self.inner.set.read();
110 #[cfg(loom)]
111 let set = set.unwrap();
112 set.is_empty()
113 }
114
115 fn intern<U, F>(&self, value: U, intern: F) -> InternedOrd<T>
116 where
117 F: FnOnce(U) -> InternedOrd<T>,
118 U: Borrow<T>,
119 {
120 #[cfg(not(loom))]
121 let set = self.inner.set.upgradable_read();
122 #[cfg(loom)]
123 let set = self.inner.set.read().unwrap();
124 if let Some(entry) = set.get(value.borrow()) {
125 return entry.clone();
126 }
127 #[cfg(not(loom))]
128 let mut set = RwLockUpgradableReadGuard::upgrade(set);
129 #[cfg(loom)]
130 let mut set = {
131 drop(set);
132 self.inner.set.write().unwrap()
133 };
134 if let Some(entry) = set.get(value.borrow()) {
136 return entry.clone();
137 }
138 let mut ret = intern(value);
139 ret.0.make_hot(&self.inner);
140 set.insert(ret.clone());
141 ret
142 }
143
144 pub fn intern_ref(&self, value: &T) -> InternedOrd<T>
153 where
154 T: ToOwned,
155 T::Owned: Into<Box<T>>,
156 {
157 self.intern(value, |v| {
158 InternedOrd(Interned::from_box(v.to_owned().into()))
159 })
160 }
161
162 pub fn intern_box(&self, value: Box<T>) -> InternedOrd<T> {
173 self.intern(value, |v| InternedOrd(Interned::from_box(v)))
174 }
175
176 pub fn intern_sized(&self, value: T) -> InternedOrd<T>
184 where
185 T: Sized,
186 {
187 self.intern(value, |v| InternedOrd(Interned::from_sized(v)))
188 }
189}
190
191impl<T: ?Sized + std::cmp::Ord> Default for OrdInterner<T> {
192 fn default() -> Self {
193 Self::new()
194 }
195}
196
197#[repr(transparent)] pub struct InternedOrd<T: ?Sized + std::cmp::Ord>(Interned<Ord<T>>);
211
212impl<T: ?Sized + std::cmp::Ord> InternedOrd<T> {
213 pub fn ref_count(&self) -> u32 {
219 self.0.ref_count()
220 }
221}
222
223impl<T: ?Sized + std::cmp::Ord> Clone for InternedOrd<T> {
224 fn clone(&self) -> Self {
225 Self(self.0.clone())
226 }
227}
228
229fn cast<T: ?Sized + std::cmp::Ord>(i: &Interned<Ord<T>>) -> &InternedOrd<T> {
230 unsafe { &*(i as *const _ as *const InternedOrd<T>) }
233}
234
235impl<T: ?Sized + std::cmp::Ord> PartialEq for InternedOrd<T>
236where
237 T: PartialEq,
238{
239 fn eq(&self, other: &Self) -> bool {
240 self.0 == other.0 || self.deref().eq(other.deref())
242 }
243}
244impl<T: ?Sized + std::cmp::Ord> Eq for InternedOrd<T> where T: Eq {}
245
246impl<T: ?Sized + std::cmp::Ord> PartialOrd for InternedOrd<T>
247where
248 T: PartialOrd,
249{
250 fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
251 if self.0 == other.0 {
252 return Some(Ordering::Equal);
253 }
254 self.deref().partial_cmp(other.deref())
255 }
256}
257impl<T: ?Sized + std::cmp::Ord> std::cmp::Ord for InternedOrd<T> {
258 fn cmp(&self, other: &Self) -> Ordering {
259 if self.0 == other.0 {
260 return Ordering::Equal;
261 }
262 self.deref().cmp(other.deref())
263 }
264}
265
266impl<T: ?Sized + std::cmp::Ord> std::hash::Hash for InternedOrd<T>
267where
268 T: std::hash::Hash,
269{
270 fn hash<H: Hasher>(&self, state: &mut H) {
271 self.deref().hash(state)
272 }
273}
274
275impl<T: ?Sized + std::cmp::Ord> Borrow<T> for InternedOrd<T> {
276 fn borrow(&self) -> &T {
277 self.deref()
278 }
279}
280
281impl<T: ?Sized + std::cmp::Ord> Deref for InternedOrd<T> {
282 type Target = T;
283
284 fn deref(&self) -> &Self::Target {
285 self.0.deref()
286 }
287}
288
289impl<T: ?Sized + std::cmp::Ord> AsRef<T> for InternedOrd<T> {
290 fn as_ref(&self) -> &T {
291 self.deref()
292 }
293}
294
295impl<T: ?Sized + std::cmp::Ord> Debug for InternedOrd<T>
296where
297 T: Debug,
298{
299 fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
300 write!(f, "Interned({:?})", &**self)
301 }
302}
303
304impl<T: ?Sized + std::cmp::Ord> Display for InternedOrd<T>
305where
306 T: Display,
307{
308 fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
309 self.deref().fmt(f)
310 }
311}
312
313impl<T: ?Sized + std::cmp::Ord> Pointer for InternedOrd<T> {
314 fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
315 Pointer::fmt(&(&**self as *const T), f)
316 }
317}
318
319#[cfg(feature = "serde")]
320impl<T: ?Sized + std::cmp::Ord + serde::Serialize> serde::Serialize for InternedOrd<T> {
321 fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
322 where
323 S: serde::Serializer,
324 {
325 (**self).serialize(serializer)
326 }
327}
328
329#[cfg(feature = "serde")]
330impl<'de, T> serde::Deserialize<'de> for InternedOrd<T>
331where
332 T: std::cmp::Ord + serde::Deserialize<'de> + Send + Sync + 'static,
333{
334 fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
335 where
336 D: serde::Deserializer<'de>,
337 {
338 let value = T::deserialize(deserializer)?;
339 Ok(crate::global::ord_interner().intern_sized(value))
340 }
341}
342
343#[cfg(feature = "serde")]
344impl<'de, T> serde::Deserialize<'de> for InternedOrd<[T]>
345where
346 T: std::cmp::Ord + serde::Deserialize<'de> + Send + Sync + 'static,
347{
348 fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
349 where
350 D: serde::Deserializer<'de>,
351 {
352 let value = Vec::<T>::deserialize(deserializer)?;
353 Ok(crate::global::ord_interner().intern_box(value.into()))
354 }
355}
356
357#[cfg(feature = "serde")]
358impl<'de> serde::Deserialize<'de> for InternedOrd<str> {
359 fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
360 where
361 D: serde::Deserializer<'de>,
362 {
363 let value = String::deserialize(deserializer)?;
364 Ok(crate::global::ord_interner().intern_box(value.into()))
365 }
366}
367
368#[test]
369fn size() {
370 let s = std::mem::size_of::<Ord<()>>();
371 assert!(s < 100, "too big: {}", s);
372}
373
374#[test]
375fn debug() {
376 let interner = OrdInterner::new();
377 let i = interner.intern_ref("value");
378 assert_eq!(format!("{i:?}"), r#"Interned("value")"#);
379}
380
381#[cfg(all(test, loom))]
382mod tests {
383 use super::*;
384 use ::loom::{model, thread::spawn};
385
386 fn counts<T>(weak: Weak<T>) -> (usize, usize) {
387 unsafe {
389 let ptr = &weak as *const _ as *const *const (usize, usize);
390 **ptr
391 }
392 }
393
394 #[test]
395 fn drop_interner() {
396 model(|| {
397 let i = OrdInterner::new();
398 let i2 = Arc::downgrade(&i.inner);
399
400 let n = i.intern_box(42.into());
401
402 let h = spawn(move || drop(i));
403 let h2 = spawn(move || drop(n));
404
405 h.join().unwrap();
406 h2.join().unwrap();
407
408 assert_eq!(counts(i2), (0, 1));
409 })
410 }
411
412 #[test]
413 fn drop_two_external() {
414 model(|| {
415 let i = OrdInterner::new();
416 let i2 = Arc::downgrade(&i.inner);
417
418 let n = i.intern_box(42.into());
419 let n2 = n.clone();
420 drop(i);
421
422 let h = spawn(move || drop(n));
423 let h2 = spawn(move || drop(n2));
424
425 h.join().unwrap();
426 h2.join().unwrap();
427
428 assert_eq!(counts(i2), (0, 1));
429 })
430 }
431
432 #[test]
433 fn drop_against_intern() {
434 model(|| {
435 let i = OrdInterner::new();
436 let i2 = Arc::downgrade(&i.inner);
437
438 let n = i.intern_box(42.into());
439
440 let h1 = spawn(move || drop(n));
441 let h2 = spawn(move || i.intern_box(42.into()));
442
443 h1.join().unwrap();
444 h2.join().unwrap();
445
446 assert_eq!(counts(i2), (0, 1));
447 })
448 }
449
450 #[test]
451 fn tree_drop_against_intern_and_interner() {
452 model(|| {
453 let i = OrdInterner::new();
454 let i2 = Arc::downgrade(&i.inner);
455
456 let n = i.intern_box(42.into());
457 let ii = i.clone();
458
459 println!("{:?} setup", current().id());
460 let h1 = spawn(move || drop(n));
461 let h2 = spawn(move || i.intern_box(42.into()));
462 let h3 = spawn(move || drop(ii));
463
464 println!("{:?} joining", current().id());
465 h1.join().unwrap();
466 h2.join().unwrap();
467 h3.join().unwrap();
468
469 assert_eq!(counts(i2), (0, 1));
470 println!("{:?} done", current().id());
471 })
472 }
473}