darc/
lib.rs

1//! Dynamically-atomic reference-counting pointers.
2//!
3//! This is a proof of concept of a Rust `Rc<T>` type that can *dynamically* choose
4//! whether to use atomic access to update its reference count. A related `Arc<T>`
5//! can be created which offers thread-safe (`Send + Sync`) access to the same
6//! data. If there's never an `Arc`, the `Rc` never pays the price for atomics.
7
8use std::borrow::Borrow;
9use std::cell::{Cell, UnsafeCell};
10use std::cmp;
11use std::fmt;
12use std::hash;
13use std::isize;
14use std::marker::PhantomData;
15use std::ops::Deref;
16use std::panic;
17use std::process::abort;
18use std::ptr::NonNull;
19use std::sync::atomic::{self, AtomicUsize, Ordering};
20
21/// A soft limit on the amount of references that may be made to an `Arc`.
22///
23/// Going above this limit will abort your program (although not
24/// necessarily) at _exactly_ `MAX_REFCOUNT + 1` references.
25const MAX_REFCOUNT: usize = (isize::MAX) as usize;
26
27enum Count {
28    Single(Cell<usize>),
29    Multi(AtomicUsize),
30}
31
32struct Inner<T: ?Sized> {
33    count: UnsafeCell<Count>,
34    data: T,
35}
36
37impl<T> Inner<T> {
38    fn new(data: T) -> Box<Self> {
39        Box::new(Self {
40            count: Count::Single(1.into()).into(),
41            data,
42        })
43    }
44}
45
46impl<T: ?Sized> Inner<T> {
47    unsafe fn make_multi_threaded(&self) {
48        let count = match &*self.count.get() {
49            Count::Single(cell) => cell.get(),
50            Count::Multi(_) => return,
51        };
52        // We're single-threaded, so we can safely do an unsynchronized write.
53        *self.count.get() = Count::Multi(count.into());
54    }
55
56    unsafe fn make_single_threaded(&self) -> bool {
57        let count = match &*self.count.get() {
58            Count::Single(_) => return true,
59            Count::Multi(atom) => atom.load(Ordering::SeqCst),
60        };
61        if count == 1 {
62            // We're the sole owner, so we can safely do an unsynchronized write.
63            *self.count.get() = Count::Single(count.into());
64            true
65        } else {
66            false
67        }
68    }
69
70    fn increment(&self) -> usize {
71        unsafe {
72            let count = match &*self.count.get() {
73                Count::Single(cell) => {
74                    let count = cell.get() + 1;
75                    cell.set(count);
76                    count
77                }
78                Count::Multi(atom) => atom.fetch_add(1, Ordering::Relaxed) + 1,
79            };
80            if count > MAX_REFCOUNT {
81                abort();
82            }
83            count
84        }
85    }
86
87    fn decrement(&self) -> usize {
88        unsafe {
89            match &*self.count.get() {
90                Count::Single(cell) => {
91                    let count = cell.get() - 1;
92                    cell.set(count);
93                    count
94                }
95                Count::Multi(atom) => {
96                    let count = atom.fetch_sub(1, Ordering::Release) - 1;
97                    if count == 0 {
98                        atomic::fence(Ordering::Acquire);
99                    }
100                    count
101                }
102            }
103        }
104    }
105}
106
107/// A reference-counted pointer. 'Rc' stands for 'Reference Counted'.
108///
109/// This may or may not use atomic access for the reference count, depending on whether it is ever
110/// converted to an `Arc`.
111pub struct Rc<T: ?Sized> {
112    inner: NonNull<Inner<T>>,
113    phantom: PhantomData<Inner<T>>,
114}
115
116impl<T: ?Sized> Rc<T> {
117    fn inner(&self) -> &Inner<T> {
118        unsafe { self.inner.as_ref() }
119    }
120}
121
122impl<T> Rc<T> {
123    /// Constructs a new `Rc<T>`.
124    ///
125    /// This is initially single-threaded, so updates to the reference count will use non-atomic
126    /// access. If an `Arc` is ever created from this instance, this will cause *all* of its
127    /// references to start using atomic access to the reference count.
128    pub fn new(data: T) -> Self {
129        Self {
130            // FIXME: use `Box::into_raw_non_null` when stable
131            inner: unsafe { NonNull::new_unchecked(Box::into_raw(Inner::new(data))) },
132            phantom: PhantomData,
133        }
134    }
135
136    /// Converts an `Arc<T>` to `Rc<T>`. This does not change its atomic property.
137    pub fn from_arc(arc: Arc<T>) -> Self {
138        arc.inner
139    }
140
141    /// Attempts to convert this to an unsynchronized pointer, no longer atomic. Returns `true` if
142    /// successful, or `false` if there are still potentially references on other threads.
143    pub fn unshare(this: &Self) -> bool {
144        unsafe { this.inner().make_single_threaded() }
145    }
146}
147
148impl<T: ?Sized> Clone for Rc<T> {
149    fn clone(&self) -> Self {
150        self.inner().increment();
151        Self { ..*self }
152    }
153}
154
155impl<T: ?Sized> Drop for Rc<T> {
156    fn drop(&mut self) {
157        if self.inner().decrement() == 0 {
158            drop(unsafe { Box::from_raw(self.inner.as_ptr()) });
159        }
160    }
161}
162
163impl<T: ?Sized> Deref for Rc<T> {
164    type Target = T;
165
166    fn deref(&self) -> &Self::Target {
167        &self.inner().data
168    }
169}
170
171impl<T: ?Sized> AsRef<T> for Rc<T> {
172    fn as_ref(&self) -> &T {
173        &**self
174    }
175}
176
177impl<T: ?Sized> Borrow<T> for Rc<T> {
178    fn borrow(&self) -> &T {
179        &**self
180    }
181}
182
183impl<T> From<T> for Rc<T> {
184    fn from(data: T) -> Self {
185        Rc::new(data)
186    }
187}
188
189impl<T: Default> Default for Rc<T> {
190    fn default() -> Self {
191        Rc::new(T::default())
192    }
193}
194
195impl<T: ?Sized + fmt::Display> fmt::Display for Rc<T> {
196    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
197        fmt::Display::fmt(&**self, f)
198    }
199}
200
201impl<T: ?Sized + fmt::Debug> fmt::Debug for Rc<T> {
202    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
203        fmt::Debug::fmt(&**self, f)
204    }
205}
206
207impl<T: ?Sized> fmt::Pointer for Rc<T> {
208    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
209        fmt::Pointer::fmt(&(&**self as *const T), f)
210    }
211}
212
213impl<T: ?Sized + hash::Hash> hash::Hash for Rc<T> {
214    fn hash<H: hash::Hasher>(&self, state: &mut H) {
215        (**self).hash(state)
216    }
217}
218
219impl<T: ?Sized + PartialEq> PartialEq for Rc<T> {
220    fn eq(&self, other: &Self) -> bool {
221        **self == **other
222    }
223
224    fn ne(&self, other: &Self) -> bool {
225        **self != **other
226    }
227}
228
229impl<T: ?Sized + Eq> Eq for Rc<T> {}
230
231impl<T: ?Sized + PartialOrd> PartialOrd for Rc<T> {
232    fn partial_cmp(&self, other: &Self) -> Option<cmp::Ordering> {
233        T::partial_cmp(&**self, &**other)
234    }
235
236    fn lt(&self, other: &Self) -> bool {
237        **self < **other
238    }
239
240    fn le(&self, other: &Self) -> bool {
241        **self <= **other
242    }
243
244    fn gt(&self, other: &Self) -> bool {
245        **self > **other
246    }
247
248    fn ge(&self, other: &Self) -> bool {
249        **self >= **other
250    }
251}
252
253impl<T: ?Sized + Ord> Ord for Rc<T> {
254    fn cmp(&self, other: &Self) -> cmp::Ordering {
255        T::cmp(&**self, &**other)
256    }
257}
258
259impl<T: panic::RefUnwindSafe + ?Sized> panic::UnwindSafe for Rc<T> {}
260
261/// A thread-safe reference-counting pointer. 'Arc' stands for 'Atomically Reference Counted'.
262pub struct Arc<T: ?Sized> {
263    inner: Rc<T>,
264}
265
266// NB: the inner count **must** be the synchronized `Count::Multi`!
267unsafe impl<T: Send + Sync + ?Sized> Send for Arc<T> {}
268unsafe impl<T: Send + Sync + ?Sized> Sync for Arc<T> {}
269
270impl<T> Arc<T> {
271    /// Constructs a new `Arc<T>`.
272    pub fn new(data: T) -> Self {
273        Arc::from_rc(Rc::new(data))
274    }
275
276    /// Converts an `Rc<T>` to `Arc<T>`. This changes its count to start using atomic access, even
277    /// in other outstanding `Rc<T>` references to the same underlying object.
278    pub fn from_rc(rc: Rc<T>) -> Self {
279        unsafe { rc.inner().make_multi_threaded() };
280        Self { inner: rc }
281    }
282}
283
284impl<T: ?Sized> Clone for Arc<T> {
285    fn clone(&self) -> Self {
286        Self {
287            inner: Rc::clone(&self.inner),
288        }
289    }
290}
291
292impl<T: ?Sized> Deref for Arc<T> {
293    type Target = T;
294
295    fn deref(&self) -> &Self::Target {
296        &*self.inner
297    }
298}
299
300impl<T: ?Sized> AsRef<T> for Arc<T> {
301    fn as_ref(&self) -> &T {
302        &**self
303    }
304}
305
306impl<T: ?Sized> Borrow<T> for Arc<T> {
307    fn borrow(&self) -> &T {
308        &**self
309    }
310}
311
312impl<T> From<T> for Arc<T> {
313    fn from(data: T) -> Self {
314        Arc::new(data)
315    }
316}
317
318impl<T: Default> Default for Arc<T> {
319    fn default() -> Self {
320        Arc::new(T::default())
321    }
322}
323
324impl<T: ?Sized + fmt::Display> fmt::Display for Arc<T> {
325    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
326        fmt::Display::fmt(&**self, f)
327    }
328}
329
330impl<T: ?Sized + fmt::Debug> fmt::Debug for Arc<T> {
331    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
332        fmt::Debug::fmt(&**self, f)
333    }
334}
335
336impl<T: ?Sized> fmt::Pointer for Arc<T> {
337    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
338        fmt::Pointer::fmt(&(&**self as *const T), f)
339    }
340}
341
342impl<T: ?Sized + hash::Hash> hash::Hash for Arc<T> {
343    fn hash<H: hash::Hasher>(&self, state: &mut H) {
344        (**self).hash(state)
345    }
346}
347
348impl<T: ?Sized + PartialEq> PartialEq for Arc<T> {
349    fn eq(&self, other: &Self) -> bool {
350        **self == **other
351    }
352
353    fn ne(&self, other: &Self) -> bool {
354        **self != **other
355    }
356}
357
358impl<T: ?Sized + Eq> Eq for Arc<T> {}
359
360impl<T: ?Sized + PartialOrd> PartialOrd for Arc<T> {
361    fn partial_cmp(&self, other: &Self) -> Option<cmp::Ordering> {
362        T::partial_cmp(&**self, &**other)
363    }
364
365    fn lt(&self, other: &Self) -> bool {
366        **self < **other
367    }
368
369    fn le(&self, other: &Self) -> bool {
370        **self <= **other
371    }
372
373    fn gt(&self, other: &Self) -> bool {
374        **self > **other
375    }
376
377    fn ge(&self, other: &Self) -> bool {
378        **self >= **other
379    }
380}
381
382impl<T: ?Sized + Ord> Ord for Arc<T> {
383    fn cmp(&self, other: &Self) -> cmp::Ordering {
384        T::cmp(&**self, &**other)
385    }
386}
387
388impl<T: panic::RefUnwindSafe + ?Sized> panic::UnwindSafe for Arc<T> {}