reference_counted/
rc.rs

1// This code is adapted from the rust standard library Rc.
2
3use base::alloc::{dealloc, Layout};
4use base::borrow;
5use base::cell::Cell;
6use base::cmp::Ordering;
7use base::convert::{From, AsMut};
8use base::fmt;
9use base::hash::{Hash, Hasher};
10use base::marker::{PhantomData, Unpin};
11use base::mem;
12use base::ops::{Deref, DerefMut};
13use base::ptr::{self, NonNull};
14
15use base::borrow::BorrowMut;
16
17use base::prelude::v1::*;
18
19use smart_pointer::{SmartPointer, IntoMut, SmartPointerMut};
20
21use crate::ReferenceCounted;
22
23/// A non-thread-safe reference-counted pointer.
24pub struct Rc<T: ?Sized> {
25    ptr: NonNull<RcBox<T>>,
26    phantom: PhantomData<RcBox<T>>,
27}
28
29struct RcBox<T: ?Sized> {
30    strong: Cell<usize>,
31    data: T,
32}
33
34impl<T: ?Sized> Rc<T> {
35    fn from_inner(ptr: NonNull<RcBox<T>>) -> Self {
36        Self { ptr, phantom: PhantomData }
37    }
38
39    #[inline]
40    fn inner(&self) -> &RcBox<T> {
41        // This unsafety is ok because while this arc is alive we're guaranteed
42        // that the inner pointer is valid.
43        unsafe { self.ptr.as_ref() }
44    }
45
46    fn ptr(&self) -> *mut RcBox<T> {
47        self.ptr.as_ptr()
48    }
49
50    fn ref_count(&self) -> usize {
51        self.inner().strong.get()
52    }
53
54    #[inline]
55    fn inc_strong(&self) {
56        let strong = self.ref_count();
57
58        // We want to abort on overflow instead of dropping the value.
59        // The reference count will never be zero when this is called;
60        // nevertheless, we insert an abort here to hint LLVM at
61        // an otherwise missed optimization.
62        if strong == 0 || strong == usize::MAX {
63            panic!();
64        }
65        self.inner().strong.set(strong + 1);
66    }
67
68    #[inline]
69    fn dec_strong(&self) {
70        self.inner().strong.set(self.ref_count() - 1);
71    }
72}
73
74impl<T: ?Sized> Clone for Rc<T> {
75    /// Makes a clone of the `Rc` pointer.
76    ///
77    /// This creates another pointer to the same allocation, increasing the reference count.
78    #[inline]
79    fn clone(&self) -> Rc<T> {
80        self.inc_strong();
81        Self::from_inner(self.ptr)
82    }
83}
84
85impl<T: ?Sized> Drop for Rc<T> {
86    /// Drops the `Rc`.
87    ///
88    /// This will decrement the reference count.
89    ///
90    /// # Examples
91    ///
92    /// ```
93    /// use std::sync::Rc;
94    ///
95    /// struct Foo;
96    ///
97    /// impl Drop for Foo {
98    ///     fn drop(&mut self) {
99    ///         println!("dropped!");
100    ///     }
101    /// }
102    ///
103    /// let foo  = Rc::new(Foo);
104    /// let foo2 = Rc::clone(&foo);
105    ///
106    /// drop(foo);    // Doesn't print anything
107    /// drop(foo2);   // Prints "dropped!"
108    /// ```
109    #[inline]
110    fn drop(&mut self) {
111        unsafe {
112            self.dec_strong();
113            if self.ref_count() == 0 {
114                // destroy the contained object
115                ptr::drop_in_place(self.ptr.as_mut());
116
117                dealloc(self.ptr().cast(), Layout::for_value(self.ptr.as_ref()));
118            }
119        }
120    }
121}
122
123impl<T: ?Sized> Deref for Rc<T> {
124    type Target = T;
125
126    #[inline]
127    fn deref(&self) -> &T {
128        &self.inner().data
129    }
130}
131
132impl<T: ?Sized> borrow::Borrow<T> for Rc<T> {
133    fn borrow(&self) -> &T {
134        &**self
135    }
136}
137
138impl<T: ?Sized> AsRef<T> for Rc<T> {
139    fn as_ref(&self) -> &T {
140        &**self
141    }
142}
143
144impl<T: ?Sized + fmt::Display> fmt::Display for Rc<T> {
145    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
146        fmt::Display::fmt(&**self, f)
147    }
148}
149
150impl<T: ?Sized + fmt::Debug> fmt::Debug for Rc<T> {
151    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
152        fmt::Debug::fmt(&**self, f)
153    }
154}
155
156impl<T: ?Sized> fmt::Pointer for Rc<T> {
157    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
158        fmt::Pointer::fmt(&(&**self as *const T), f)
159    }
160}
161
162impl<T: ?Sized> SmartPointer<T> for Rc<T> {
163    fn new(data: T) -> Rc<T> where T: Sized {
164        Self::from_inner(
165            Box::leak(Box::new(RcBox { strong: Cell::new(1), data })).into(),
166        )
167    }
168
169    fn try_unwrap(this: Self) -> Result<T, Self> where T: Sized {
170        if Rc::ref_count(&this) == 1 {
171            unsafe {
172                let val = ptr::read(&*this); // copy the contained object
173                dealloc(this.ptr().cast(), Layout::for_value(this.ptr.as_ref()));
174                mem::forget(this);
175                Ok(val)
176            }
177        } else {
178            Err(this)
179        }
180    }
181}
182
183pub struct UniqueRc<T: ?Sized>(Rc<T>);
184
185unsafe impl<T: ?Sized + Sync + Send> Send for UniqueRc<T> {}
186unsafe impl<T: ?Sized + Sync + Send> Sync for UniqueRc<T> {}
187
188impl<T: ?Sized> Deref for UniqueRc<T> {
189    type Target = T;
190
191    #[inline]
192    fn deref(&self) -> &T {
193        self.0.deref()
194    }
195}
196
197impl<T: ?Sized> borrow::Borrow<T> for UniqueRc<T> {
198    fn borrow(&self) -> &T {
199        self.0.borrow()
200    }
201}
202
203impl<T: ?Sized> AsRef<T> for UniqueRc<T> {
204    fn as_ref(&self) -> &T {
205        self.0.as_ref()
206    }
207}
208
209impl<T: ?Sized + fmt::Display> fmt::Display for UniqueRc<T> {
210    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
211        self.0.fmt(f)
212    }
213}
214
215impl<T: ?Sized + fmt::Debug> fmt::Debug for UniqueRc<T> {
216    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
217        self.0.fmt(f)
218    }
219}
220
221impl<T: ?Sized> fmt::Pointer for UniqueRc<T> {
222    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
223        self.0.fmt(f)
224    }
225}
226
227impl<T: ?Sized> SmartPointer<T> for UniqueRc<T> {
228    fn new(data: T) -> Self where T: Sized {
229        UniqueRc(Rc::new(data))
230    }
231
232    fn try_unwrap(this: Self) -> Result<T, Self> where T: Sized {
233        let this = this.0;
234
235        unsafe {
236            let elem = ptr::read(&this.ptr.as_ref().data);
237            dealloc(this.ptr().cast(), Layout::for_value(this.ptr.as_ref()));
238            mem::forget(this);
239            Ok(elem)
240        }
241    }
242}
243
244
245impl<T: ?Sized> DerefMut for UniqueRc<T> {
246    fn deref_mut(&mut self) -> &mut T {
247        // We know this to be uniquely owned
248        unsafe { &mut (*self.0.ptr()).data }
249    }
250}
251
252impl<T: ?Sized> BorrowMut<T> for UniqueRc<T> {
253    fn borrow_mut(&mut self) -> &mut T {
254        &mut **self
255    }
256}
257
258impl<T: ?Sized> AsMut<T> for UniqueRc<T> {
259    fn as_mut(&mut self) -> &mut T {
260        &mut **self
261    }
262}
263
264impl<T: ?Sized> SmartPointerMut<T> for UniqueRc<T> {}
265
266impl<T: ?Sized> Into<Rc<T>> for UniqueRc<T> {
267    fn into(self) -> Rc<T> {
268        self.0
269    }
270}
271
272impl<T: ?Sized> IntoMut<T> for Rc<T> {
273    type MutablePointer = UniqueRc<T>;
274
275    fn can_make_mut(this: &Self) -> bool {
276        this.ref_count() == 1
277    }
278
279    unsafe fn into_mut_unchecked(this: Self) -> Self::MutablePointer {
280        UniqueRc(this)
281    }
282
283    /// Obtain a mutable reference to the wrapped value without performing runtime checks for
284    /// upholding any invariants.
285    ///
286    /// Safety: Calling this is safe if and only if `can_make_mut` returns true.
287    unsafe fn get_mut_unchecked(this: &Self) -> &mut T {
288        // We are careful to *not* create a reference covering the "count" fields, as
289        // this would alias with concurrent access to the reference counts (e.g. by `Weak`).
290        unsafe { &mut (*this.ptr.as_ptr()).data }
291    }
292}
293
294impl<T: ?Sized> ReferenceCounted<T> for Rc<T> {
295    fn reference_count(this: &Self) -> usize {
296        this.ref_count()
297    }
298}
299
300impl<T: Default> Default for Rc<T> {
301    /// Creates a new `Rc<T>`, with the `Default` value for `T`.
302    ///
303    /// # Examples
304    ///
305    /// ```
306    /// use std::sync::Rc;
307    ///
308    /// let x: Rc<i32> = Default::default();
309    /// assert_eq!(*x, 0);
310    /// ```
311    fn default() -> Rc<T> {
312        Rc::new(Default::default())
313    }
314}
315
316impl<T: Default> Default for UniqueRc<T> {
317    /// Creates a new `UniqueRc<T>`, with the `Default` value for `T`.
318    fn default() -> UniqueRc<T> {
319        UniqueRc::new(Default::default())
320    }
321}
322
323impl<T: ?Sized + PartialEq> PartialEq for Rc<T> {
324    /// Equality for two `Rc`s.
325    ///
326    /// Two `Rc`s are equal if their inner values are equal, even if they are
327    /// stored in different allocation. This implementation does not check for
328    /// pointer equality.
329    #[inline]
330    fn eq(&self, other: &Rc<T>) -> bool {
331        (**self).eq(&**other)
332    }
333
334    /// Inequality for two `Rc`s.
335    ///
336    /// Two `Rc`s are unequal if their inner values are unequal. This implementation does not
337    /// check for pointer equality.
338    #[inline]
339    fn ne(&self, other: &Rc<T>) -> bool {
340        (**self).ne(&**other)
341    }
342}
343
344impl<T: ?Sized + Eq> Eq for Rc<T> {}
345
346impl<T: ?Sized + PartialEq> PartialEq for UniqueRc<T> {
347    /// Equality for two `UniqueRc`s.
348    ///
349    /// Two `UniqueRc`s are equal if their inner values are equal, even if they are
350    /// stored in different allocation. This implementation does not check for
351    /// pointer equality.
352    #[inline]
353    fn eq(&self, other: &UniqueRc<T>) -> bool {
354        (**self).eq(&**other)
355    }
356
357    /// Inequality for two `Rc`s.
358    ///
359    /// Two `Rc`s are unequal if their inner values are unequal. This implementation does not
360    /// check for pointer equality.
361    #[inline]
362    fn ne(&self, other: &UniqueRc<T>) -> bool {
363        (**self).ne(&**other)
364    }
365}
366
367impl<T: ?Sized + Eq> Eq for UniqueRc<T> {}
368
369impl<T: ?Sized + PartialOrd> PartialOrd for Rc<T> {
370    /// Partial comparison for two `Rc`s.
371    ///
372    /// The two are compared by calling `partial_cmp()` on their inner values.
373    fn partial_cmp(&self, other: &Rc<T>) -> Option<Ordering> {
374        (**self).partial_cmp(&**other)
375    }
376
377    /// Less-than comparison for two `Rc`s.
378    ///
379    /// The two are compared by calling `<` on their inner values.
380    fn lt(&self, other: &Rc<T>) -> bool {
381        *(*self) < *(*other)
382    }
383
384    /// 'Less than or equal to' comparison for two `Rc`s.
385    ///
386    /// The two are compared by calling `<=` on their inner values.
387    fn le(&self, other: &Rc<T>) -> bool {
388        *(*self) <= *(*other)
389    }
390
391    /// Greater-than comparison for two `Rc`s.
392    ///
393    /// The two are compared by calling `>` on their inner values.
394    fn gt(&self, other: &Rc<T>) -> bool {
395        *(*self) > *(*other)
396    }
397
398    /// 'Greater than or equal to' comparison for two `Rc`s.
399    ///
400    /// The two are compared by calling `>=` on their inner values.
401    fn ge(&self, other: &Rc<T>) -> bool {
402        *(*self) >= *(*other)
403    }
404}
405
406impl<T: ?Sized + PartialOrd> PartialOrd for UniqueRc<T> {
407    /// Partial comparison for two `UniqueRc`s.
408    ///
409    /// The two are compared by calling `partial_cmp()` on their inner values.
410    fn partial_cmp(&self, other: &UniqueRc<T>) -> Option<Ordering> {
411        (**self).partial_cmp(&**other)
412    }
413
414    /// Less-than comparison for two `UniqueRc`s.
415    ///
416    /// The two are compared by calling `<` on their inner values.
417    fn lt(&self, other: &UniqueRc<T>) -> bool {
418        *(*self) < *(*other)
419    }
420
421    /// 'Less than or equal to' comparison for two `UniqueRc`s.
422    ///
423    /// The two are compared by calling `<=` on their inner values.
424    fn le(&self, other: &UniqueRc<T>) -> bool {
425        *(*self) <= *(*other)
426    }
427
428    /// Greater-than comparison for two `UniqueRc`s.
429    ///
430    /// The two are compared by calling `>` on their inner values.
431    fn gt(&self, other: &UniqueRc<T>) -> bool {
432        *(*self) > *(*other)
433    }
434
435    /// 'Greater than or equal to' comparison for two `UniqueRc`s.
436    ///
437    /// The two are compared by calling `>=` on their inner values.
438    fn ge(&self, other: &UniqueRc<T>) -> bool {
439        *(*self) >= *(*other)
440    }
441}
442
443impl<T: ?Sized + Ord> Ord for Rc<T> {
444    /// Comparison for two `Rc`s.
445    ///
446    /// The two are compared by calling `cmp()` on their inner values.
447    fn cmp(&self, other: &Rc<T>) -> Ordering {
448        (**self).cmp(&**other)
449    }
450}
451
452impl<T: ?Sized + Ord> Ord for UniqueRc<T> {
453    /// Comparison for two `UniqueRc`s.
454    ///
455    /// The two are compared by calling `cmp()` on their inner values.
456    fn cmp(&self, other: &UniqueRc<T>) -> Ordering {
457        (**self).cmp(&**other)
458    }
459}
460
461impl<T> From<T> for Rc<T> {
462    fn from(t: T) -> Self {
463        Rc::new(t)
464    }
465}
466
467impl<T> From<T> for UniqueRc<T> {
468    fn from(t: T) -> Self {
469        UniqueRc::new(t)
470    }
471}
472
473impl<T: ?Sized + Hash> Hash for Rc<T> {
474    fn hash<H: Hasher>(&self, state: &mut H) {
475        (**self).hash(state)
476    }
477}
478
479impl<T: ?Sized + Hash> Hash for UniqueRc<T> {
480    fn hash<H: Hasher>(&self, state: &mut H) {
481        (**self).hash(state)
482    }
483}
484
485impl<T: ?Sized> Unpin for Rc<T> {}
486
487impl<T: ?Sized> Unpin for UniqueRc<T> {}