without_alloc/
rc.rs

1//! Reference counter value.
2//!
3//! See [`Rc`] for more information.
4//!
5//! [`Rc`]: ./struct.Rc.html
6use core::{borrow, cmp, fmt, hash, mem, ops, ptr};
7use core::alloc::Layout;
8use core::cell::Cell;
9
10use crate::uninit::{Uninit, UninitView};
11
12/// A single-threaded reference-counting pointer. 'Rc' stands for 'Reference Counted'.
13///
14/// The inherent methods are all associated functions. This means you can not call them
15/// unexpectedly through deref-coercion the reference itself. Instead, you need to call them as
16/// `Rc::try_unwrap(rc)` etc. .
17///
18/// Compared to the standard library version, this will perform its own allocation. Instead, you
19/// can ask [`Bump`] to perform them or manually allocate guided by the necessary [`layout`].
20///
21/// [`Bump`]: ../slab/struct.Bump.html#method.rc
22/// [`layout`]: #method.layout
23pub struct Rc<'a, T> {
24    /// Shared view on the memory of the box.
25    ///
26    /// It is important **NOT** to safely expose this to the user. The weak counter maintains the
27    /// invariant that the pointed-to memory is no longer aliased when the last Rc to that view has
28    /// been dropped.
29    inner: UninitView<'a, RcBox<T>>,
30}
31
32/// A reference-counting pointer to the allocation of an `Rc`.
33///
34/// ## TODO
35///
36/// Evaluate an interface:
37/// ```ignore
38/// fn reinit(&self, val: T) -> Result<Rc<T>, T>;
39/// ```
40pub struct Weak<'a, T> {
41    /// Shared view on the memory of the box.
42    ///
43    /// The inner `val` of the box may have been de-initialized already. So we must be very careful
44    /// to never create an actual reference to the box.
45    inner: UninitView<'a, RcBox<T>>,
46}
47
48/// A structured container for the boxed value.
49///
50/// It's representation is chosen such that it can be cast to `Uninit<T>` and from it given
51/// appropriate additional space. All added data is at the end of the allocation, this allows other
52/// containers that store the value to reuse the same allocation without shoveling data around.
53///
54/// That however, is an implementation detail since we could also `memmove` appropriately. And it
55/// falls apart as soon as we take extra alignment requirements into account. Hence, we do not
56/// expose it generally and give no guarantees outside the basic conversion. Make this
57/// incrementally better.
58#[repr(C)]
59struct RcBox<T> {
60    /// Keep this member first!
61    ///
62    /// Note that `as_mut_ptr` and `into_raw` rely on this.
63    val: T,
64
65    /// The number of owners of the value.
66    strong: Cell<usize>,
67
68    /// The number of owners of the memory view.
69    ///
70    /// Note that the strong ownership of the value also counts as a *single* weak ownership. The
71    /// last access which drops the value should also decrease the weak count.
72    weak: Cell<usize>,
73}
74
75impl<'a, T> Rc<'a, T> {
76    /// Constructs a new `Rc<T>`.
77    ///
78    /// See also [`Bump::rc`], which encapsulates the process of allocation and construction in a
79    /// single method call.
80    ///
81    /// ## Panics
82    /// This function panics if the memory is not valid for the layout of [`Rc::layout`].
83    ///
84    /// ## Examples
85    ///
86    /// ```
87    /// use core::convert::TryInto;
88    /// use without_alloc::{alloc::LocalAllocLeakExt, rc::Rc};
89    /// use static_alloc::Bump;
90    ///
91    /// struct Foo(u32);
92    ///
93    /// let slab: Bump<[u8; 1024]> = Bump::uninit();
94    /// let layout = Rc::<Foo>::layout().try_into().unwrap();
95    /// let memory = slab.alloc_layout(layout).unwrap();
96    /// let rc = Rc::new(Foo(0), memory.uninit);
97    /// ```
98    ///
99    /// [`Rc::layout`]: #method.layout
100    /// [`Bump::rc`]: ../slab/struct.Bump.html#method.rc
101    pub fn new(val: T, memory: Uninit<'a, ()>) -> Self {
102        assert!(memory.fits(Self::layout()), "Provided memory must fit the inner layout");
103        let mut memory = memory.cast::<RcBox<T>>().unwrap();
104
105        memory.borrow_mut().init(RcBox {
106            val,
107            strong: Cell::new(1),
108            weak: Cell::new(1),
109        });
110
111        Rc {
112            inner: memory.into(),
113        }
114    }
115
116    /// Wrap a raw initialized value back into an `Rc`.
117    ///
118    /// ## Safety
119    /// The block must originate from a previous call to [`into_raw`] and only the value must have
120    /// been modified. The value must still be valid.
121    pub unsafe fn from_raw(init: Uninit<'a, T>) -> Self {
122        debug_assert!(init.fits(Self::layout()), "Provided memory must fit the inner layout");
123        let inner = init.cast().unwrap();
124
125        Rc {
126            inner: inner.into(),
127        }
128    }
129
130    /// Try to extract the memory.
131    ///
132    /// This returns `Some` only when this is the last strong *and* weak reference to the value.
133    /// The contained value will be preserved and is not dropped. Use `from_raw` to reinitialize a
134    /// new `Rc` with the old value and memory.
135    ///
136    /// ## Example
137    ///
138    /// ```
139    /// use without_alloc::{alloc::LocalAllocLeakExt, rc::Rc};
140    /// use static_alloc::Bump;
141    ///
142    /// struct HotPotato;
143    ///
144    /// impl Drop for HotPotato {
145    ///     fn drop(&mut self) {
146    ///         panic!("dropped!");
147    ///     }
148    /// }
149    ///
150    /// let slab: Bump<[u8; 1024]> = Bump::uninit();
151    /// let foo = slab.rc(HotPotato).unwrap();
152    ///
153    /// let raw = Rc::into_raw(foo).ok().unwrap();
154    /// // No panic. Value has not been dropped.
155    /// ```
156    pub fn into_raw(rc: Self) -> Result<Uninit<'a, T>, Self> {
157        if !Rc::is_unique(&rc) {
158            // Note: implicitely decrements `strong`
159            return Err(rc);
160        }
161
162        let ptr = rc.inner.as_non_null();
163        let len = rc.inner.size();
164        mem::forget(rc);
165        unsafe {
166            // SAFETY: restored the memory we just forgot. We are the only reference to it, so it
167            // is fine to restore the original unqiue allocation reference.
168            Ok(Uninit::from_memory(ptr.cast(), len).cast().unwrap())
169        }
170    }
171
172    /// Returns the contained value, if the `Rc` has exactly one strong reference.
173    ///
174    /// Also returns the managed memory in the form of a `Weak`. This is unusual but the best
175    /// choice for potentially recovering it. Returning the memory directly is not possible since
176    /// other `Weak<T>` instances may still point to it. If you are not interested in the memory
177    /// you can simply drop the `Weak`.
178    pub fn try_unwrap(rc: Self) -> Result<(T, Weak<'a, T>), Self> {
179        if Rc::strong_count(&rc) != 1 {
180            return Err(rc);
181        }
182
183        rc.dec_strong();
184        let val = unsafe { ptr::read(rc.as_ptr()) };
185
186        let weak = Weak { inner: rc.inner };
187        mem::forget(rc);
188
189        Ok((val, weak))
190    }
191
192    /// Create a new `Weak` pointer to the value.
193    ///
194    /// The weak pointer shares ownership over the memory but not over the value itself.
195    ///
196    /// ## Example
197    ///
198    /// ```
199    /// use without_alloc::{alloc::LocalAllocLeakExt, rc::Rc};
200    /// use static_alloc::Bump;
201    ///
202    /// struct Foo;
203    ///
204    /// let slab: Bump<[u8; 1024]> = Bump::uninit();
205    /// let foo = slab.rc(Foo).unwrap();
206    /// let weak = Rc::downgrade(&foo);
207    ///
208    /// assert_eq!(Rc::weak_count(&foo), 2);
209    /// drop(foo);
210    ///
211    /// assert_eq!(weak.weak_count(), 1);
212    /// ```
213    pub fn downgrade(rc: &Self) -> Weak<'a, T> {
214        rc.inc_weak();
215        Weak { inner: rc.inner }
216    }
217}
218
219impl<T> Rc<'_, T> {
220    /// Get the layout for memory passed to [`Rc::new`].
221    ///
222    /// You should not rely on the value returned here. The two guarantees are: the size of the
223    /// layout is at least as large as the input type and it is never empty.
224    ///
225    /// An `Rc` does not simply point to a lone instance of a type but instead adds some small
226    /// metadata (two pointer-sized counters). To keep the implementation details private, this
227    /// method allows allocation of properly sized regions without exposing the exact type that
228    /// will be stored on the heap.
229    ///
230    /// ## Examples
231    ///
232    /// ```
233    /// use without_alloc::rc::Rc;
234    ///
235    /// struct Foo(u32);
236    /// struct Empty;
237    ///
238    /// assert!(Rc::<Foo>::layout().size() >= 4);
239    /// assert!(Rc::<Empty>::layout().size() > 0);
240    /// ```
241    ///
242    /// [`Rc::new`]: #method.new
243    pub fn layout() -> Layout {
244        // FIXME: this should really be `const` but `Layout` does not offer that yet.
245        Layout::new::<RcBox<T>>()
246    }
247
248    /// Gets the number of weak pointers to the value.
249    ///
250    /// Note that all `Rc` to the same value count as one weak pointer in total.
251    pub fn weak_count(rc: &Self) -> usize {
252        rc.inner().weak.get()
253    }
254
255    /// Gets the number of strong pointers to the value.
256    pub fn strong_count(rc: &Self) -> usize {
257        rc.inner().strong.get()
258    }
259
260    /// Try to retrieve a mutable reference to the value.
261    ///
262    /// This method will only succeed if there are no other pointers to the same value, neither
263    /// strong ones nor weak ones.
264    pub fn get_mut(rc: &mut Self) -> Option<&mut T> {
265        if rc.is_unique() {
266            Some(unsafe { &mut *rc.as_mut_ptr() })
267        } else {
268            None
269        }
270    }
271
272    /// Check if two `Rc`s point to the same data.
273    ///
274    /// This will never compare the values but simply inspect the inner pointers.
275    ///
276    /// ## Example
277    ///
278    /// ```
279    /// use without_alloc::{alloc::LocalAllocLeakExt, rc::Rc};
280    /// use static_alloc::Bump;
281    ///
282    /// struct Foo;
283    ///
284    /// let slab: Bump<[u8; 1024]> = Bump::uninit();
285    ///
286    /// // Two Rc's pointing to the same data.
287    /// let foo = slab.rc(Foo).unwrap();
288    /// let foo2 = Rc::clone(&foo);
289    ///
290    /// // An unrelated allocation.
291    /// let not_foo = slab.rc(Foo).unwrap();
292    ///
293    /// assert!( Rc::ptr_eq(&foo, &foo2));
294    /// assert!(!Rc::ptr_eq(&foo, &not_foo));
295    /// ```
296    pub fn ptr_eq(this: &Self, other: &Self) -> bool {
297        this.inner.as_ptr() == other.inner.as_ptr()
298    }
299
300    /// Get a reference to the inner box.
301    ///
302    /// Note that we must not mutably touch or reference the inner `T` through the reference by
303    /// casting to mutable pointers.
304    fn inner(&self) -> &RcBox<T> {
305        unsafe {
306            self.inner.as_ref()
307        }
308    }
309
310    fn is_unique(&self) -> bool {
311        Rc::strong_count(self) == 1 && Rc::weak_count(self) == 1
312    }
313
314    /// Get the mutable pointer to the value.
315    ///
316    /// This relies on the layout of the inner struct.
317    fn as_mut_ptr(&mut self) -> *mut T {
318        // `T` is the first member, #[repr(C)] makes this cast well behaved.
319        self.inner.as_ptr() as *mut T
320    }
321
322    /// Get the pointer to the value.
323    ///
324    /// This relies on the layout of the inner struct.
325    fn as_ptr(&self) -> *const T {
326        self.inner.as_ptr() as *const T
327    }
328
329    fn inc_strong(&self) {
330        let val = Self::strong_count(self) + 1;
331        self.inner().strong.set(val);
332    }
333
334    fn dec_strong(&self) {
335        let val = Self::strong_count(self) - 1;
336        self.inner().strong.set(val);
337    }
338
339    fn inc_weak(&self) {
340        let val = Self::weak_count(self) + 1;
341        self.inner().weak.set(val);
342    }
343
344    fn dec_weak(&self) {
345        let val = Self::weak_count(self) - 1;
346        self.inner().weak.set(val);
347    }
348}
349
350impl<'a, T> Weak<'a, T> {
351    /// Try to unwrap the original allocation of the `Rc`.
352    ///
353    /// This will only work when this is the only pointer to the allocation. That is, there are
354    /// neither `Weak` nor `Rc` still pointing at it.
355    ///
356    /// ## Example
357    ///
358    /// ```
359    /// use without_alloc::{alloc::LocalAllocLeakExt, rc::Rc};
360    /// use static_alloc::Bump;
361    ///
362    /// struct Foo;
363    ///
364    /// let slab: Bump<[u8; 1024]> = Bump::uninit();
365    /// let rc = slab.rc(Foo).unwrap();
366    /// let (_, weak) = Rc::try_unwrap(rc).ok().unwrap();
367    ///
368    /// // This is the only one pointing at the allocation.
369    /// let memory = weak.try_unwrap().ok().unwrap();
370    /// ```
371    pub fn try_unwrap(self) -> Result<Uninit<'a, ()>, Self> {
372        if !self.is_unique_to_rc_memory() {
373            return Err(self);
374        }
375
376        let ptr = self.inner.as_non_null();
377        let len = self.inner.size();
378        unsafe {
379            // SAFETY: restored the memory that an rc has originally provided to the `Weak`. We are
380            // the only reference to it, so it is fine to restore the original unqiue allocation
381            // reference.
382            Ok(Uninit::from_memory(ptr.cast(), len))
383        }
384    }
385
386    /// Attempt to upgrade to a shared pointer to the value.
387    ///
388    /// This operation will only succeed if there are still strong pointers to the value, i.e.
389    /// `strong_count` is not zero. Then the value has not been dropped yet and its lifetime is
390    /// extended.
391    ///
392    /// ```
393    /// use without_alloc::{alloc::LocalAllocLeakExt, rc::Rc};
394    /// use static_alloc::Bump;
395    ///
396    /// let memory: Bump<[u8; 1024]> = Bump::uninit();
397    /// let rc = memory.rc(0usize).unwrap();
398    ///
399    /// let weak = Rc::downgrade(&rc);
400    /// let rc2 = weak.upgrade().unwrap();
401    ///
402    /// drop(rc);
403    /// drop(rc2);
404    ///
405    /// // No more strong pointers left.
406    /// assert!(weak.upgrade().is_none());
407    /// ```
408    pub fn upgrade(&self) -> Option<Rc<'a, T>> {
409        if self.strong_count() == 0 {
410            None
411        } else { 
412            let rc = Rc { inner: self.inner };
413            rc.inc_strong();
414            Some(rc)
415        }
416    }
417}
418
419impl<T> Weak<'_, T> {
420    /// Gets the number of strong pointers pointing at the value.
421    ///
422    /// ## Example
423    ///
424    /// ```
425    /// use without_alloc::{alloc::LocalAllocLeakExt, rc::Rc, rc::Weak};
426    /// use static_alloc::Bump;
427    ///
428    /// struct Foo;
429    ///
430    /// let slab: Bump<[u8; 1024]> = Bump::uninit();
431    /// let rc = slab.rc(Foo).unwrap();
432    /// let (_, weak) = Rc::try_unwrap(rc).ok().unwrap();
433    ///
434    /// // We just destroyed the only one.
435    /// assert_eq!(Weak::strong_count(&weak), 0);
436    /// ```
437    pub fn strong_count(&self) -> usize {
438        self.strong().get()
439    }
440
441    /// Gets the number of weak pointers pointing at the value.
442    ///
443    /// ## Example
444    ///
445    /// ```
446    /// use without_alloc::{alloc::LocalAllocLeakExt, rc::Rc, rc::Weak};
447    /// use static_alloc::Bump;
448    ///
449    /// struct Foo;
450    ///
451    /// let slab: Bump<[u8; 1024]> = Bump::uninit();
452    /// let rc = slab.rc(Foo).unwrap();
453    /// let (_, weak) = Rc::try_unwrap(rc).ok().unwrap();
454    ///
455    /// // This is the only one pointing at the allocation.
456    /// assert_eq!(Weak::weak_count(&weak), 1);
457    /// ```
458    pub fn weak_count(&self) -> usize {
459        self.weak().get()
460    }
461
462    fn is_unique_to_rc_memory(&self) -> bool {
463        self.strong_count() == 0 && self.weak_count() == 1
464    }
465
466    /// Get a reference to the weak counter.
467    ///
468    /// Avoids potential UB, never creates a reference to the potentially dead `val`.
469    fn weak(&self) -> &Cell<usize> {
470        unsafe { &(*self.inner.as_ptr()).weak }
471    }
472
473    /// Get a reference to the strong counter.
474    ///
475    /// Avoids potential UB, never creates a reference to the potentially dead `val`.
476    fn strong(&self) -> &Cell<usize> {
477        unsafe { &(*self.inner.as_ptr()).strong }
478    }
479
480    fn inc_weak(&self) {
481        let val = Weak::weak_count(self);
482        self.weak().set(val + 1);
483    }
484
485    fn dec_weak(&self) {
486        let val = Weak::weak_count(self);
487        self.weak().set(val - 1);
488    }
489}
490
491impl<T> Drop for Rc<'_, T> {
492    /// Drops the `Rc`.
493    ///
494    /// This will decrement the strong reference count. If the strong reference
495    /// count reaches zero then the only other references (if any) are
496    /// [`Weak`], so we `drop` the inner value.
497    ///
498    /// # Examples
499    ///
500    /// ```
501    /// use without_alloc::{alloc::LocalAllocLeakExt, rc::Rc};
502    /// use static_alloc::Bump;
503    ///
504    /// struct Foo;
505    ///
506    /// impl Drop for Foo {
507    ///     fn drop(&mut self) {
508    ///         println!("dropped!");
509    ///     }
510    /// }
511    ///
512    /// let slab: Bump<[u8; 1024]> = Bump::uninit();
513    ///
514    /// let foo  = slab.rc(Foo).unwrap();
515    /// let foo2 = Rc::clone(&foo);
516    ///
517    /// drop(foo);    // Doesn't print anything
518    /// drop(foo2);   // Prints "dropped!"
519    /// ```
520    fn drop(&mut self) {
521        self.dec_strong();
522        // weak count doesn't actually do anything.
523        if Rc::strong_count(self) == 0 {
524            self.dec_weak();
525
526            unsafe {
527                ptr::drop_in_place(self.as_mut_ptr())
528            }
529        }
530    }
531}
532
533impl<T> ops::Deref for Rc<'_, T> {
534    type Target = T;
535
536    fn deref(&self) -> &T {
537        &self.inner().val
538    }
539}
540
541impl<T> Clone for Rc<'_, T> {
542    /// Clone the `Rc`.
543    ///
544    /// This will increment the strong reference count. Only an Rc pointing to a unique value can
545    /// unwrap or point to the value mutably.
546    ///
547    /// # Examples
548    ///
549    /// ```
550    /// use without_alloc::{alloc::LocalAllocLeakExt, rc::Rc};
551    /// use static_alloc::Bump; 
552    ///
553    /// struct Foo;
554    ///
555    /// let slab: Bump<[u8; 1024]> = Bump::uninit();
556    ///
557    /// let mut foo  = slab.rc(Foo).unwrap();
558    /// assert!(Rc::get_mut(&mut foo).is_some());
559    ///
560    /// let foo2 = Rc::clone(&foo);
561    /// assert!(Rc::get_mut(&mut foo).is_none());
562    /// ```
563    fn clone(&self) -> Self {
564        self.inc_strong();
565        Rc {
566            inner: self.inner,
567        }
568    }
569}
570
571impl<T> Drop for Weak<'_, T> {
572    fn drop(&mut self) {
573        self.dec_weak();
574        // It doesn't matter what happens to the memory.
575    }
576}
577
578impl<T> Clone for Weak<'_, T> {
579    /// Clone the `Weak`.
580    ///
581    /// This will increment the weak reference count.
582    ///
583    /// # Examples
584    ///
585    /// ```
586    /// use without_alloc::{alloc::LocalAllocLeakExt, rc::Rc};
587    /// use static_alloc::Bump;
588    ///
589    /// struct Foo;
590    ///
591    /// let slab: Bump<[u8; 1024]> = Bump::uninit();
592    /// let foo = slab.rc(Foo).unwrap();
593    ///
594    /// let (_, weak) = Rc::try_unwrap(foo).ok().unwrap();
595    /// assert_eq!(weak.weak_count(), 1);
596    ///
597    /// let weak2 = weak.clone();
598    /// assert_eq!(weak.weak_count(), 2);
599    /// assert_eq!(weak2.weak_count(), 2);
600    /// ```
601    fn clone(&self) -> Self {
602        self.inc_weak();
603        Weak {
604            inner: self.inner,
605        }
606    }
607}
608
609impl<'a, 'b, T: PartialEq> PartialEq<Rc<'b, T>> for Rc<'a, T> {
610    #[inline]
611    fn eq(&self, other: &Rc<T>) -> bool {
612        PartialEq::eq(&**self, &**other)
613    }
614    #[inline]
615    fn ne(&self, other: &Rc<T>) -> bool {
616        PartialEq::ne(&**self, &**other)
617    }
618}
619
620impl<T: Eq> Eq for Rc<'_, T> { }
621
622impl<'a, 'b, T: PartialOrd> PartialOrd<Rc<'b, T>> for Rc<'a, T> {
623    #[inline]
624    fn partial_cmp(&self, other: &Rc<T>) -> Option<cmp::Ordering> {
625        PartialOrd::partial_cmp(&**self, &**other)
626    }
627    #[inline]
628    fn lt(&self, other: &Rc<T>) -> bool {
629        PartialOrd::lt(&**self, &**other)
630    }
631    #[inline]
632    fn le(&self, other: &Rc<T>) -> bool {
633        PartialOrd::le(&**self, &**other)
634    }
635    #[inline]
636    fn ge(&self, other: &Rc<T>) -> bool {
637        PartialOrd::ge(&**self, &**other)
638    }
639    #[inline]
640    fn gt(&self, other: &Rc<T>) -> bool {
641        PartialOrd::gt(&**self, &**other)
642    }
643}
644
645impl<T: Ord> Ord for Rc<'_, T> {
646    #[inline]
647    fn cmp(&self, other: &Rc<T>) -> cmp::Ordering {
648        Ord::cmp(&**self, &**other)
649    }
650}
651
652impl<T: hash::Hash> hash::Hash for Rc<'_, T> {
653    fn hash<H: hash::Hasher>(&self, state: &mut H) {
654        (**self).hash(state)
655    }
656}
657
658impl<T: fmt::Display> fmt::Display for Rc<'_, T> {
659    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
660        fmt::Display::fmt(&**self, f)
661    }
662}
663
664impl<T: fmt::Debug> fmt::Debug for Rc<'_, T> {
665    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
666        fmt::Debug::fmt(&**self, f)
667    }
668}
669
670impl<T> fmt::Pointer for Rc<'_, T> {
671    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
672        fmt::Pointer::fmt(&self.as_ptr(), f)
673    }
674}
675
676impl<T> borrow::Borrow<T> for Rc<'_, T> {
677    fn borrow(&self) -> &T {
678        &**self
679    }
680}
681
682impl<T> AsRef<T> for Rc<'_, T> {
683    fn as_ref(&self) -> &T {
684        &**self
685    }
686}
687
688#[cfg(test)]
689mod tests {
690    use core::alloc::Layout;
691    use core::cell::Cell;
692
693    use super::{RcBox, Rc, Weak};
694    use static_alloc::Bump;
695    use crate::alloc::LocalAllocLeakExt;
696
697    #[test]
698    fn layout_box_compatible() {
699        let mut boxed = RcBox {
700            val: 0usize,
701            strong: Cell::new(1),
702            weak: Cell::new(1),
703        };
704
705        let box_ptr = &mut boxed as *mut RcBox<usize>;
706        let val_ptr = box_ptr as *const usize;
707        assert_eq!(unsafe { *val_ptr }, 0);
708
709        unsafe { (*box_ptr).val = 0xdeadbeef };
710        assert_eq!(unsafe { *val_ptr }, 0xdeadbeef);
711    }
712
713    #[test]
714    fn control_through_counters() {
715        struct Duck;
716        struct NeverDrop;
717
718        impl Drop for NeverDrop {
719            fn drop(&mut self) {
720                panic!("dropped!");
721            }
722        }
723
724        let slab: Bump<[u8; 1024]> = Bump::uninit();
725        let rc = slab.rc(NeverDrop).unwrap();
726        rc.inc_strong();
727        drop(rc);
728
729        let mut rc = slab.rc(Duck).unwrap();
730        assert_eq!(rc.as_mut_ptr() as *const u8, rc.inner.as_ptr() as *const u8);
731        assert_eq!(rc.as_ptr() as *const u8, rc.inner.as_ptr() as *const u8);
732
733        let rc = slab.rc(Duck).unwrap();
734        // Forbidden in public, but we do not grab mutable references.
735        let inner = rc.inner;
736        drop(rc);
737        unsafe {
738            assert_eq!((*inner.as_ptr()).strong.get(), 0);
739            assert_eq!((*inner.as_ptr()).weak.get(), 0);
740        }
741
742        let rc = slab.rc(Duck).unwrap();
743        let (_, weak) = Rc::try_unwrap(rc).ok().unwrap();
744        assert_eq!(Weak::strong_count(&weak), 0);
745        assert_eq!(Weak::weak_count(&weak), 1);
746        let inner = weak.inner;
747        drop(weak);
748        unsafe {
749            assert_eq!((*inner.as_ptr()).strong.get(), 0);
750            assert_eq!((*inner.as_ptr()).weak.get(), 0);
751        }
752    }
753
754    #[test]
755    #[should_panic = "inner layout"]
756    fn wrong_layout_panics() {
757        use core::convert::TryInto;
758
759        struct Foo(u32);
760
761        let slab: Bump<[u8; 1024]> = Bump::uninit();
762        let layout = Layout::new::<Foo>().try_into().unwrap();
763        let wrong_alloc = slab.alloc_layout(layout).unwrap();
764
765        let _ = Rc::new(Foo(0), wrong_alloc.uninit);
766    }
767}