tark/
lib.rs

1use std::fmt::{Debug, Display, Formatter, Result as FmtResult, Pointer};
2use std::hash::{Hash, Hasher};
3use std::ptr::NonNull;
4use std::ops::Deref;
5use std::borrow::Borrow;
6use std::cell::Cell;
7use std::sync::atomic::{AtomicUsize, Ordering};
8use std::num::NonZeroUsize;
9
10struct TarkInner<T: ?Sized> {
11    strong: AtomicUsize,
12    data: T,
13}
14
15impl<T: ?Sized> TarkInner<T> {
16    fn dec_maybe_drop(inner: NonNull<TarkInner<T>>) {
17        // SAFE: `inner` is assumed valid
18        if unsafe { inner.as_ref() }.strong.fetch_sub(1, Ordering::AcqRel) == 1 {
19            // SAFE: `inner` was allocated as a box, and thus can be dropped as
20            // one.
21            unsafe { drop(inner); }
22        }
23    }
24
25    fn inc_nonnull(inner: NonNull<TarkInner<T>>) {
26        // SAFE: `inner` is assumed valid
27        unsafe { inner.as_ref() }.strong.fetch_add(1, Ordering::Release);
28    }
29}
30
31impl<T> TarkInner<T> {
32    const fn new(data: T) -> Self {
33        TarkInner {
34            strong: AtomicUsize::new(1),
35            data,
36        }
37    }
38}
39
40impl<T: ?Sized + Hash> Hash for TarkInner<T> {
41    fn hash<H: Hasher>(&self, state: &mut H) {
42        self.data.hash(state)
43    }
44}
45
46impl<T: ?Sized + PartialEq> PartialEq for TarkInner<T> {
47    fn eq(&self, other: &Self) -> bool {
48        self.data.eq(&other.data)
49    }
50
51    fn ne(&self, other: &Self) -> bool {
52        self.data.ne(&other.data)
53    }
54}
55
56impl<T: ?Sized + Eq> Eq for TarkInner<T> {}
57
58impl<T: ?Sized + PartialOrd> PartialOrd for TarkInner<T> {
59    fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
60        self.data.partial_cmp(&other.data)
61    }
62
63    fn lt(&self, other: &Self) -> bool {
64        self.data.lt(&other.data)
65    }
66
67    fn le(&self, other: &Self) -> bool {
68        self.data.le(&other.data)
69    }
70
71    fn gt(&self, other: &Self) -> bool {
72        self.data.gt(&other.data)
73    }
74
75    fn ge(&self, other: &Self) -> bool {
76        self.data.ge(&other.data)
77    }
78}
79
80impl<T: ?Sized + Ord> Ord for TarkInner<T> {
81    fn cmp(&self, other: &Self) -> std::cmp::Ordering {
82        self.data.cmp(&other.data)
83    }
84}
85
86impl<T: ?Sized + Debug> Debug for TarkInner<T> {
87    fn fmt(&self, f: &mut Formatter) -> FmtResult {
88        T::fmt(&self.data, f)
89    }
90}
91
92impl<T: ?Sized + Display> Display for TarkInner<T> {
93    fn fmt(&self, f: &mut Formatter) -> FmtResult {
94        T::fmt(&self.data, f)
95    }
96}
97
98pub struct TarkSend<T: ?Sized + Send + Sync> {
99    inner: NonNull<TarkInner<T>>,
100}
101
102impl<T: ?Sized + Send + Sync> TarkSend<T> {
103    pub fn new(t: T) -> Self
104    where
105        T: Sized,
106    {
107        Self::from_raw(alloc_nonnull(TarkInner::new(t)))
108    }
109
110    pub fn atomic_count(this: &Self) -> NonZeroUsize {
111        // SAFE: The atomic refcount is guaranteed non-zero.
112        unsafe { NonZeroUsize::new_unchecked(
113            this.inner.as_ref().strong.load(Ordering::Acquire),
114        ) }
115    }
116
117    fn from_raw(inner: NonNull<TarkInner<T>>) -> Self {
118        TarkInner::inc_nonnull(inner);
119        TarkSend { inner }
120    }
121
122    pub fn promote(this: Self) -> Tark<T> {
123        let t = Tark {
124            inner: this.inner,
125            strong_weak: StrongWeak::alloc(),
126        };
127        std::mem::forget(this);
128        t
129    }
130
131    pub fn promote_ref(this: &Self) -> Tark<T> {
132        TarkInner::inc_nonnull(this.inner);
133        Tark {
134            inner: this.inner,
135            strong_weak: StrongWeak::alloc(),
136        }
137    }
138
139    pub fn ptr_eq(this: &Self, other: &Self) -> bool {
140        this.inner.eq(&other.inner)
141    }
142}
143
144impl<T: ?Sized + Send + Sync + Hash> Hash for TarkSend<T> {
145    fn hash<H: Hasher>(&self, state: &mut H) {
146        self.as_ref().hash(state)
147    }
148}
149
150impl<T: ?Sized + Send + Sync + PartialEq> PartialEq for TarkSend<T> {
151    fn eq(&self, other: &Self) -> bool {
152        self.as_ref().eq(other.as_ref())
153    }
154
155    fn ne(&self, other: &Self) -> bool {
156        self.as_ref().ne(other.as_ref())
157    }
158}
159
160impl<T: ?Sized + Send + Sync + Eq> Eq for TarkSend<T> {}
161
162impl<T: ?Sized + Send + Sync + PartialOrd> PartialOrd for TarkSend<T> {
163    fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
164        self.as_ref().partial_cmp(other.as_ref())
165    }
166
167    fn lt(&self, other: &Self) -> bool {
168        self.as_ref().lt(other.as_ref())
169    }
170
171    fn le(&self, other: &Self) -> bool {
172        self.as_ref().le(other.as_ref())
173    }
174
175    fn gt(&self, other: &Self) -> bool {
176        self.as_ref().gt(other.as_ref())
177    }
178
179    fn ge(&self, other: &Self) -> bool {
180        self.as_ref().ge(other.as_ref())
181    }
182}
183
184impl<T: ?Sized + Send + Sync + Ord> Ord for TarkSend<T> {
185    fn cmp(&self, other: &Self) -> std::cmp::Ordering {
186        self.as_ref().cmp(other.as_ref())
187    }
188}
189
190impl<T: ?Sized + Send + Sync> Clone for TarkSend<T> {
191    fn clone(&self) -> Self {
192        Self::from_raw(self.inner)
193    }
194
195    fn clone_from(&mut self, source: &Self) {
196        if self.inner != source.inner {
197            std::mem::drop(std::mem::replace(self, source.clone()));
198        }
199    }
200}
201
202impl<T: ?Sized + Send + Sync> Drop for TarkSend<T> {
203    fn drop(&mut self) {
204        TarkInner::dec_maybe_drop(self.inner);
205    }
206}
207
208// SAFE: `T` is Send and Sync, meaning a pointer to it is as well.
209unsafe impl<T: ?Sized + Send + Sync> Send for TarkSend<T> {}
210
211// SAFE: `T` is Send and Sync, meaning a pointer to it is as well.
212unsafe impl<T: ?Sized + Send + Sync> Sync for TarkSend<T> {}
213
214impl<T: ?Sized + Send + Sync> AsRef<T> for TarkSend<T> {
215    fn as_ref(&self) -> &T {
216        // SAFE: `inner` is a Box pointer, which upholds all the same invariants
217        // necessary for .as_ref() except mutable aliasing. we also only allow
218        // non-mutable references, so it all works out.
219        &unsafe { self.inner.as_ref() }.data
220    }
221}
222
223impl<T: ?Sized + Send + Sync> Borrow<T> for TarkSend<T> {
224    fn borrow(&self) -> &T {
225        self.as_ref()
226    }
227}
228
229impl<T: ?Sized + Send + Sync> Deref for TarkSend<T> {
230    type Target = T;
231
232    fn deref(&self) -> &Self::Target {
233        self.as_ref()
234    }
235}
236
237impl<T: ?Sized + Send + Sync + Debug> Debug for TarkSend<T> {
238    fn fmt(&self, f: &mut Formatter) -> FmtResult {
239        f
240            .debug_tuple("TarkSend")
241            .field(&self.as_ref())
242            .finish()
243    }
244}
245
246impl<T: ?Sized + Send + Sync + Display> Display for TarkSend<T> {
247    fn fmt(&self, f: &mut Formatter) -> FmtResult {
248        Display::fmt(&self, f)
249    }
250}
251
252impl<T: ?Sized + Send + Sync> Pointer for TarkSend<T> {
253    fn fmt(&self, f: &mut Formatter) -> FmtResult {
254        Pointer::fmt(&self.inner, f)
255    }
256}
257
258pub struct Tark<T: ?Sized> {
259    inner: NonNull<TarkInner<T>>,
260    strong_weak: NonNull<StrongWeak>,
261}
262
263pub type TarkLocal<T> = Tark<T>;
264
265impl<T: ?Sized> Tark<T> {
266    pub fn new(t: T) -> Self
267    where
268        T: Sized,
269    {
270        let inner = alloc_nonnull(TarkInner::new(t));
271        Tark {
272            inner,
273            strong_weak: StrongWeak::alloc(),
274        }
275    }
276
277    fn strong(this: &Self) -> &Cell<usize> {
278        // SAFE: `strong_weak` is a Box pointer, which upholds all the same
279        // invariants necessary for .as_ref() except mutable aliasing. we also
280        // only allow non-mutable references, so it all works out.
281        &unsafe { this.strong_weak.as_ref() }.strong
282    }
283
284    fn weak(this: &Self) -> &Cell<usize> {
285        // SAFE: `strong_weak` is a Box pointer, which upholds all the same
286        // invariants necessary for .as_ref() except mutable aliasing. we also
287        // only allow non-mutable references, so it all works out.
288        &unsafe { this.strong_weak.as_ref() }.weak
289    }
290
291    pub fn atomic_count(this: &Self) -> NonZeroUsize {
292        // SAFE: The atomic refcount is guaranteed non-zero.
293        unsafe { NonZeroUsize::new_unchecked(
294            this.inner.as_ref().strong.load(Ordering::Acquire),
295        ) }
296    }
297
298    pub fn strong_count(this: &Self) -> usize {
299        Self::strong(this).get()
300    }
301
302    pub fn weak_count(this: &Self) -> usize {
303        Self::weak(this).get()
304    }
305
306    pub fn downgrade(this: &Self) -> Weak<T> {
307        let weak = Self::weak(this);
308        weak.set(weak.get() + 1);
309        WeakTark {
310            inner: this.inner,
311            strong_weak: this.strong_weak,
312        }
313    }
314
315    pub fn swap<'a>(this: &'a Self, other: &'a Tark<T>) {
316        // SAFE: this is safe because Self isn't sync, so the only thread that
317        // could be working with `this` and `other` is the current one. and,
318        // because this is the only thread working with this data, and this
319        // thread is guaranteed preoccupied with currently performing this
320        // function on said data, it's totally fine to swap them right here.
321        //
322        // i think.
323        let this: &'a Cell<Self> = unsafe { std::mem::transmute(this) };
324        let other: &'a Cell<Self> = unsafe { std::mem::transmute(other) };
325        this.swap(other);
326    }
327}
328
329impl<T: ?Sized + Send + Sync> Tark<T> {
330    pub fn sendable(this: Self) -> TarkSend<T> {
331        TarkSend::from_raw(this.inner)
332    }
333}
334
335impl<T: ?Sized + Hash> Hash for Tark<T> {
336    fn hash<H: Hasher>(&self, state: &mut H) {
337        self.as_ref().hash(state)
338    }
339}
340
341impl<T: ?Sized + PartialEq> PartialEq for Tark<T> {
342    fn eq(&self, other: &Self) -> bool {
343        self.as_ref().eq(other.as_ref())
344    }
345
346    fn ne(&self, other: &Self) -> bool {
347        self.as_ref().ne(other.as_ref())
348    }
349}
350
351impl<T: ?Sized + Eq> Eq for Tark<T> {}
352
353impl<T: ?Sized + PartialOrd> PartialOrd for Tark<T> {
354    fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
355        self.as_ref().partial_cmp(other.as_ref())
356    }
357
358    fn lt(&self, other: &Self) -> bool {
359        self.as_ref().lt(other.as_ref())
360    }
361
362    fn le(&self, other: &Self) -> bool {
363        self.as_ref().le(other.as_ref())
364    }
365
366    fn gt(&self, other: &Self) -> bool {
367        self.as_ref().gt(other.as_ref())
368    }
369
370    fn ge(&self, other: &Self) -> bool {
371        self.as_ref().ge(other.as_ref())
372    }
373}
374
375impl<T: ?Sized + Ord> Ord for Tark<T> {
376    fn cmp(&self, other: &Self) -> std::cmp::Ordering {
377        self.as_ref().cmp(other.as_ref())
378    }
379}
380
381impl<T: ?Sized> Clone for Tark<T> {
382    fn clone(&self) -> Self {
383        let strong = Self::strong(self);
384        strong.set(strong.get() + 1);
385        Tark {
386            inner: self.inner,
387            strong_weak: self.strong_weak,
388        }
389    }
390
391    fn clone_from(&mut self, source: &Self) {
392        if self.inner != source.inner {
393            std::mem::drop(std::mem::replace(self, source.clone()));
394        }
395    }
396}
397
398impl<T: ?Sized> Drop for Tark<T> {
399    fn drop(&mut self) {
400        let strong = Self::strong(self);
401        let count = strong.get();
402
403        if count == 1 {
404            TarkInner::dec_maybe_drop(self.inner);
405
406            if Self::weak_count(self) == 0 {
407                // SAFE: strong_weak was allocated as a box, and thus can be
408                // dropped as one.
409                unsafe { drop(self.strong_weak); }
410            }
411        }
412
413        strong.set(count - 1);
414    }
415}
416
417impl<T: ?Sized> AsRef<T> for Tark<T> {
418    fn as_ref(&self) -> &T {
419        // SAFE: `inner` is a Box pointer, which upholds all the same invariants
420        // necessary for .as_ref() except mutable aliasing. we also only allow
421        // non-mutable references, so it all works out.
422        &unsafe { self.inner.as_ref() }.data
423    }
424}
425
426impl<T: ?Sized> Borrow<T> for Tark<T> {
427    fn borrow(&self) -> &T {
428        self.as_ref()
429    }
430}
431
432impl<T: ?Sized> Deref for Tark<T> {
433    type Target = T;
434
435    fn deref(&self) -> &Self::Target {
436        self.as_ref()
437    }
438}
439
440impl<T: ?Sized + Debug> Debug for Tark<T> {
441    fn fmt(&self, f: &mut Formatter) -> FmtResult {
442        f
443            .debug_tuple("Tark")
444            .field(&self.as_ref())
445            .finish()
446    }
447}
448
449impl<T: ?Sized + Display> Display for Tark<T> {
450    fn fmt(&self, f: &mut Formatter) -> FmtResult {
451        Display::fmt(&self, f)
452    }
453}
454
455impl<T: ?Sized> Pointer for Tark<T> {
456    fn fmt(&self, f: &mut Formatter) -> FmtResult {
457        Pointer::fmt(&self.inner, f)
458    }
459}
460
461pub struct WeakTark<T: ?Sized> {
462    inner: NonNull<TarkInner<T>>,
463    strong_weak: NonNull<StrongWeak>,
464}
465
466pub type Weak<T> = WeakTark<T>;
467
468impl<T: ?Sized> Weak<T> {
469    fn strong(this: &Self) -> &Cell<usize> {
470        // SAFE: `strong_weak` is a Box pointer, which upholds all the same
471        // invariants necessary for .as_ref() except mutable aliasing. we also
472        // only allow non-mutable references, so it all works out.
473        &unsafe { this.strong_weak.as_ref() }.strong
474    }
475
476    fn weak(this: &Self) -> &Cell<usize> {
477        // SAFE: `strong_weak` is a Box pointer, which upholds all the same
478        // invariants necessary for .as_ref() except mutable aliasing. we also
479        // only allow non-mutable references, so it all works out.
480        &unsafe { this.strong_weak.as_ref() }.weak
481    }
482
483    pub fn strong_count(this: &Self) -> usize {
484        Self::strong(this).get()
485    }
486
487    pub fn weak_count(this: &Self) -> usize {
488        Self::weak(this).get()
489    }
490
491    pub fn atomic_count(this: &Self) -> Option<NonZeroUsize> {
492        if Self::strong_count(this) == 0 {
493            None
494        } else {
495            // SAFE: if the strong count is non-zero, there is some Tark, and
496            // thus the atomic count is non-zero.
497            Some(unsafe { NonZeroUsize::new_unchecked(
498                this.inner.as_ref().strong.load(Ordering::Acquire),
499            ) })
500        }
501    }
502
503    pub fn upgrade(this: &Self) -> Option<Tark<T>> {
504        if Weak::strong_count(this) == 0 {
505            None
506        } else {
507            let strong = Weak::strong(this);
508            strong.set(strong.get() + 1);
509            Some(Tark {
510                inner: this.inner,
511                strong_weak: this.strong_weak,
512            })
513        }
514    }
515}
516
517impl<T: ?Sized> Clone for Weak<T> {
518    fn clone(&self) -> Self {
519        let weak = Self::weak(self);
520        weak.set(weak.get() + 1);
521        Weak {
522            inner: self.inner,
523            strong_weak: self.strong_weak,
524        }
525    }
526
527    fn clone_from(&mut self, source: &Self) {
528        if self.inner != source.inner {
529            std::mem::drop(std::mem::replace(self, source.clone()));
530        }
531    }
532}
533
534impl<T: ?Sized> Drop for Weak<T> {
535    fn drop(&mut self) {
536        let weak = Weak::weak(self);
537
538        if weak.get() == 1 && Weak::strong_count(self) == 0 {
539            // SAFE: strong_weak was allocated as a box, and thus can be
540            // dropped as one.
541            unsafe { drop(self.strong_weak); }
542        } else {
543            weak.set(weak.get() - 1);
544        }
545    }
546}
547
548impl<T: ?Sized> Pointer for Weak<T> {
549    fn fmt(&self, f: &mut Formatter) -> FmtResult {
550        Pointer::fmt(&self.inner, f)
551    }
552}
553
554struct StrongWeak {
555    strong: Cell<usize>,
556    weak: Cell<usize>,
557}
558
559impl StrongWeak {
560    fn alloc() -> NonNull<StrongWeak> {
561        alloc_nonnull(StrongWeak {
562            strong: Cell::new(1),
563            weak: Cell::new(0),
564        })
565    }
566}
567
568fn alloc_nonnull<T>(t: T) -> NonNull<T> {
569    // SAFE: `Box` itself holds a `Unique` which is guaranteed non-null, so the
570    // raw pointer must be non-null too.
571    unsafe { NonNull::new_unchecked(Box::into_raw(Box::new(t))) }
572}
573
574#[cold]
575unsafe fn drop<T: ?Sized>(ptr: NonNull<T>) {
576    std::mem::drop(Box::from_raw(ptr.as_ptr()))
577}