concurrent_pool/
entry.rs

1use std::ptr;
2use std::sync::Arc;
3use std::sync::atomic::Ordering::*;
4use std::{ops::Deref, ptr::NonNull, sync::atomic::AtomicUsize};
5
6use crate::Pool;
7
8/// An entry in the pool.
9///
10/// `Entry` holds a reference pointer to an item from the pool and a reference
11/// to the [`Pool`].
12/// When the last `Entry` is dropped, the item is returned to the pool.
13///
14pub struct Entry<'a, T: Default> {
15    // When the last reference is dropped, the item is returned to the pool.
16    // `item` is always `Some` before the last reference is dropped.
17    pub(crate) item: Option<Prc<T>>,
18    pub(crate) pool: &'a Pool<T>,
19}
20
21impl<'a, T: Default> Clone for Entry<'a, T> {
22    /// Makes a clone of the `Entry` that points to the same allocation.
23    fn clone(&self) -> Self {
24        Self {
25            item: self.item.clone(),
26            pool: self.pool,
27        }
28    }
29}
30
31impl<'a, T: Default> Drop for Entry<'a, T> {
32    fn drop(&mut self) {
33        if self.item.as_ref().is_some_and(|i| i.dec_ref() == 1) {
34            // This was the last reference, return to the pool.
35            let item = self.item.take().unwrap();
36            self.pool.recycle(item);
37        }
38    }
39}
40
41impl<'a, T: Default> Deref for Entry<'a, T> {
42    type Target = T;
43    fn deref(&self) -> &Self::Target {
44        self.item.as_ref().unwrap()
45    }
46}
47
48impl<'a, T: Default> Entry<'a, T> {
49    /// Get reference to the inner item.
50    pub fn get(&self) -> &T {
51        &self
52    }
53
54    /// Get mutable reference to the inner item if there are no other references.
55    /// Otherwise, return `None`.
56    pub fn get_mut(&mut self) -> Option<&mut T> {
57        Prc::get_mut(self.item.as_mut().unwrap())
58    }
59
60    /// Get mutable reference to the inner item without checking for other references.
61    ///
62    pub unsafe fn get_mut_unchecked(&mut self) -> &mut T {
63        unsafe { Prc::get_mut_unchecked(self.item.as_mut().unwrap()) }
64    }
65}
66
67/// An owned entry in the pool.
68///
69/// `OwnedEntry` holds a reference pointer to an item from the pool and a `Arc`
70/// reference to the [`Pool`].
71/// When the last `OwnedEntry` is dropped, the item is returned to the pool.
72///
73pub struct OwnedEntry<T: Default> {
74    // When the last reference is dropped, the item is returned to the pool.
75    // `item` is always `Some` before the last reference is dropped.
76    pub(crate) item: Option<Prc<T>>,
77    pub(crate) pool: Arc<Pool<T>>,
78}
79
80impl<T: Default> Clone for OwnedEntry<T> {
81    /// Makes a clone of the `OwnedEntry` that points to the same allocation.
82    fn clone(&self) -> Self {
83        Self {
84            item: self.item.clone(),
85            pool: self.pool.clone(),
86        }
87    }
88}
89
90impl<T: Default> Drop for OwnedEntry<T> {
91    fn drop(&mut self) {
92        if self.item.as_ref().is_some_and(|i| i.dec_ref() == 1) {
93            // This was the last reference, return to the pool.
94            let item = self.item.take().unwrap();
95            self.pool.recycle(item);
96        }
97    }
98}
99
100impl<T: Default> Deref for OwnedEntry<T> {
101    type Target = T;
102    fn deref(&self) -> &Self::Target {
103        self.item.as_ref().unwrap()
104    }
105}
106
107impl<T: Default> OwnedEntry<T> {
108    /// Get reference to the inner item.
109    pub fn get(&self) -> &T {
110        &self
111    }
112
113    /// Get mutable reference to the inner item if there are no other references.
114    /// Otherwise, return `None`.
115    pub fn get_mut(&mut self) -> Option<&mut T> {
116        Prc::get_mut(self.item.as_mut().unwrap())
117    }
118
119    /// Get mutable reference to the inner item without checking for other references.
120    ///
121    pub unsafe fn get_mut_unchecked(&mut self) -> &mut T {
122        unsafe { Prc::get_mut_unchecked(self.item.as_mut().unwrap()) }
123    }
124}
125
126/// A thread-safe reference-counting pointer. `Prc` stands for 'Pooled
127/// Reference Counted'. This is like `Arc`, but only used in the pool
128/// implemented in this crate.
129///
130/// **Note**: `Drop` is not implemented for `Prc<T>`. The user should carefully
131/// manage the memory of `Prc<T>`. The user should call `drop_slow` when the
132/// last reference is dropped.
133///
134/// # Thread Safety
135///
136/// `Prc<T>` uses atomic operations for its reference counting. This means that
137/// it is thread-safe.
138///
139/// # Cloning references
140///
141/// Creating a new reference from an existing reference-counted pointer is done using the
142/// `Clone` trait implemented for [`Prc<T>`][Prc].
143///
144pub(crate) struct Prc<T: ?Sized> {
145    ptr: NonNull<PrcInner<T>>,
146}
147
148unsafe impl<T: ?Sized + Send + Sync> Send for Prc<T> {}
149unsafe impl<T: ?Sized + Send + Sync> Sync for Prc<T> {}
150
151impl<T: ?Sized> Deref for Prc<T> {
152    type Target = T;
153    fn deref(&self) -> &Self::Target {
154        &self.inner().data
155    }
156}
157
158impl<T: ?Sized> Clone for Prc<T> {
159    fn clone(&self) -> Self {
160        self.inc_ref();
161        Self { ptr: self.ptr }
162    }
163}
164
165impl<T> Prc<T> {
166    /// Starting the pointer count as 0 which means it is in the pool without
167    /// any clone instance.
168    #[inline]
169    pub(crate) fn new_zero(data: T) -> Self {
170        let x: Box<_> = Box::new(PrcInner {
171            count: AtomicUsize::new(0),
172            data,
173        });
174        Self {
175            ptr: Box::leak(x).into(),
176        }
177    }
178
179    /// Create a new `Prc<T>` with the reference count starting at 1.
180    #[inline]
181    pub(crate) fn new(data: T) -> Self {
182        let x: Box<_> = Box::new(PrcInner {
183            count: AtomicUsize::new(1),
184            data,
185        });
186        Self {
187            ptr: Box::leak(x).into(),
188        }
189    }
190}
191
192impl<T: ?Sized> Prc<T> {
193    /// Increase the reference count and return the previous count.
194    #[inline]
195    pub(crate) fn inc_ref(&self) -> usize {
196        self.inner().count.fetch_add(1, Relaxed)
197    }
198
199    /// Decrease the reference count and return the previous count.
200    #[inline]
201    pub(crate) fn dec_ref(&self) -> usize {
202        self.inner().count.fetch_sub(1, Release)
203    }
204
205    /// Drops the inner data.
206    pub(crate) unsafe fn drop_slow(&self) {
207        unsafe { ptr::drop_in_place(&mut (*self.ptr.as_ptr()).data) };
208    }
209
210    #[inline]
211    pub unsafe fn get_mut_unchecked(this: &mut Self) -> &mut T {
212        unsafe { &mut (*this.ptr.as_ptr()).data }
213    }
214
215    #[inline]
216    pub fn get_mut(this: &mut Self) -> Option<&mut T> {
217        // Only one reference exists or in the pool.
218        if this.inner().count.load(Acquire) <= 1 {
219            unsafe { Some(Prc::get_mut_unchecked(this)) }
220        } else {
221            None
222        }
223    }
224
225    #[inline]
226    fn inner(&self) -> &PrcInner<T> {
227        unsafe { self.ptr.as_ref() }
228    }
229}
230
231struct PrcInner<T: ?Sized> {
232    count: AtomicUsize,
233    data: T,
234}
235
236unsafe impl<T: ?Sized + Send + Sync> Send for PrcInner<T> {}
237unsafe impl<T: ?Sized + Send + Sync> Sync for PrcInner<T> {}