opool/
thread_local.rs

1use alloc::{collections::VecDeque, fmt, rc::Rc};
2use core::{
3    cell::UnsafeCell,
4    hash::{Hash, Hasher},
5    marker::PhantomData,
6    mem::{forget, MaybeUninit},
7    ops::{Deref, DerefMut},
8    ptr,
9};
10
11use crate::PoolAllocator;
12
13/// A struct representing an object pool for local thread, it cannot be moved
14/// between threads.
15///
16/// This struct uses an allocator to create and manage objects, and stores them
17/// in an array.
18#[derive(Debug)]
19pub struct LocalPool<P: PoolAllocator<T>, T> {
20    allocator: P,
21    storage: UnsafeCell<VecDeque<T>>,
22    // force the struct to be !Send
23    _phantom: PhantomData<*mut usize>,
24}
25
26impl<P: PoolAllocator<T>, T> LocalPool<P, T> {
27    /// Creates a new LocalPool with a given size and allocator.
28    ///
29    /// This method immediately fills the pool with new objects created by the
30    /// allocator.
31    pub fn new_prefilled(pool_size: usize, allocator: P) -> Self {
32        let mut storage = VecDeque::with_capacity(pool_size);
33        for _ in 0..pool_size {
34            storage.push_back(allocator.allocate());
35        }
36        LocalPool {
37            allocator,
38            storage: UnsafeCell::new(storage),
39            _phantom: PhantomData,
40        }
41    }
42
43    /// Creates a new Object Pool with a given size and allocator.
44    ///
45    /// Unlike [`Self::new_prefilled`], this method does not immediately fill
46    /// the pool with objects.
47    pub fn new(pool_size: usize, allocator: P) -> Self {
48        LocalPool {
49            allocator,
50            storage: UnsafeCell::new(VecDeque::with_capacity(pool_size)),
51            _phantom: PhantomData,
52        }
53    }
54
55    /// Get storage as mutable reference
56    /// Safety: it's safe to call only if the pool is used by a single threaded.
57    #[allow(clippy::mut_from_ref)]
58    fn storage_mut(&self) -> &mut VecDeque<T> {
59        unsafe { &mut *self.storage.get() }
60    }
61
62    /// Borrows storage as immutable reference
63    /// Safety: it's safe to call only if the pool is used by a single threaded.
64    #[allow(clippy::mut_from_ref)]
65    fn storage_borrow(&self) -> &VecDeque<T> {
66        unsafe { &*self.storage.get() }
67    }
68
69    /// Wraps the pool allocator with an reference counter, enabling the
70    /// use of [`Self::get_rc`] to obtain pool-allocated objects that rely on
71    /// reference counted references instead of borrowed references.
72    pub fn to_rc(self) -> Rc<Self> {
73        Rc::new(self)
74    }
75
76    /// Attempts to get an object from the pool.
77    ///
78    /// If the pool is empty, None is returned.
79    pub fn try_get(&self) -> Option<RefLocalGuard<'_, P, T>> {
80        self.storage_mut().pop_front().map(|mut obj| {
81            self.allocator.reset(&mut obj);
82            RefLocalGuard::new(obj, self)
83        })
84    }
85
86    /// Gets an object from the pool.
87    ///
88    /// If the pool is empty, a new object is created using the allocator.
89    pub fn get(&'_ self) -> RefLocalGuard<'_, P, T> {
90        match self.storage_mut().pop_front() {
91            Some(mut obj) => {
92                self.allocator.reset(&mut obj);
93                RefLocalGuard::new(obj, self)
94            }
95            None => RefLocalGuard::new(self.allocator.allocate(), self),
96        }
97    }
98
99    /// Attempts to get an object from the pool that holds an rc reference to
100    /// the owning pool. Allocated objects are not as efficient as those
101    /// allocated by [`Self::get`] method but they are easier to move as
102    /// they are not limited by allocator lifetime directly.
103    ///
104    /// If the pool is empty, None is returned.
105    pub fn try_get_rc(self: Rc<Self>) -> Option<RcLocalGuard<P, T>> {
106        self.storage_mut().pop_front().map(|mut obj| {
107            self.allocator.reset(&mut obj);
108            RcLocalGuard::new(obj, &self)
109        })
110    }
111
112    /// Gets an object from the pool that holds an rc reference to the owning
113    /// pool. Allocated objects are not as efficient as those allocated by
114    /// [`Self::get`] method but they are easier to move as they are not limited
115    /// by allocator lifetime directly.
116    ///
117    /// If the pool is empty, a new object is created using the allocator.
118    pub fn get_rc(self: Rc<Self>) -> RcLocalGuard<P, T> {
119        match self.storage_mut().pop_front() {
120            Some(mut obj) => {
121                self.allocator.reset(&mut obj);
122                RcLocalGuard::new(obj, &self)
123            }
124            None => RcLocalGuard::new(self.allocator.allocate(), &self),
125        }
126    }
127
128    /// Gets the number of objects currently in the pool.
129    ///
130    /// Returns the length of the internal storage, indicating the number of
131    /// objects that are ready to be recycled from the pool.
132    pub fn len(&self) -> usize {
133        self.storage_borrow().len()
134    }
135
136    /// Checks if the pool is empty.
137    ///
138    /// Returns `true` if there are no objects currently in the pool that are
139    /// ready to be recycled.
140    pub fn is_empty(&self) -> bool {
141        self.storage_borrow().is_empty()
142    }
143
144    /// Gets the capacity of the pool.
145    ///
146    /// Returns the maximum number of objects that the pool can hold. This does
147    /// not indicate the maximum number of objects that can be allocated,
148    /// but maximum objects that can be stored and recycled from the pool.
149    pub fn capacity(&self) -> usize {
150        self.storage_borrow().capacity()
151    }
152}
153
154/// A struct representing a guard over an object in the pool.
155///
156/// This struct ensures that the object is returned to the pool when it is
157/// dropped.
158pub struct RefLocalGuard<'a, P: PoolAllocator<T>, T> {
159    obj: MaybeUninit<T>,
160    pool: &'a LocalPool<P, T>,
161}
162
163impl<'a, P: PoolAllocator<T>, T> RefLocalGuard<'a, P, T> {
164    /// Creates a new Guard for an object and a reference to the pool it
165    /// belongs to.
166    fn new(obj: T, pool: &'a LocalPool<P, T>) -> Self {
167        RefLocalGuard {
168            obj: MaybeUninit::new(obj),
169            pool,
170        }
171    }
172
173    /// Consumes the guard and returns the object, without returning it to the
174    /// pool.
175    ///
176    /// This method should be used with caution, as it leads to objects not
177    /// being returned to the pool.
178    pub fn into_inner(self) -> T {
179        let obj = unsafe { self.obj.as_ptr().read() };
180        forget(self);
181        obj
182    }
183}
184
185impl<'a, P: PoolAllocator<T>, T> Deref for RefLocalGuard<'a, P, T> {
186    type Target = T;
187
188    fn deref(&self) -> &Self::Target {
189        unsafe { &*self.obj.as_ptr() }
190    }
191}
192
193impl<'a, P: PoolAllocator<T>, T> DerefMut for RefLocalGuard<'a, P, T> {
194    fn deref_mut(&mut self) -> &mut Self::Target {
195        unsafe { &mut *self.obj.as_mut_ptr() }
196    }
197}
198
199/// Implementation of the Drop trait for Guard.
200///
201/// This ensures that the object is returned to the pool when the guard is
202/// dropped, unless the object fails validation.
203impl<'a, P: PoolAllocator<T>, T> Drop for RefLocalGuard<'a, P, T> {
204    fn drop(&mut self) {
205        let storage = self.pool.storage_mut();
206        if self.pool.allocator.is_valid(self.deref()) && storage.len() < storage.capacity() {
207            // Safety: object is not moved and valid for this single move to the pool.
208            storage.push_back(unsafe { ptr::read(self.obj.as_mut_ptr()) });
209        } else {
210            // Safety: object is not moved back to the pool it is safe to drop it in place.
211            unsafe {
212                ptr::drop_in_place(self.obj.as_mut_ptr());
213            }
214        }
215    }
216}
217
218impl<'a, P: PoolAllocator<T>, T: Hash> Hash for RefLocalGuard<'a, P, T> {
219    #[inline]
220    fn hash<H: Hasher>(&self, state: &mut H) {
221        (**self).hash(state);
222    }
223}
224impl<'a, P: PoolAllocator<T>, T: fmt::Display> fmt::Display for RefLocalGuard<'a, P, T> {
225    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
226        fmt::Display::fmt(&**self, f)
227    }
228}
229impl<'a, P: PoolAllocator<T>, T: fmt::Debug> fmt::Debug for RefLocalGuard<'a, P, T> {
230    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
231        fmt::Debug::fmt(&**self, f)
232    }
233}
234impl<'a, P: PoolAllocator<T>, T> fmt::Pointer for RefLocalGuard<'a, P, T> {
235    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
236        fmt::Pointer::fmt(&(&**self as *const T), f)
237    }
238}
239impl<'a, P: PoolAllocator<T>, T: PartialEq> PartialEq for RefLocalGuard<'a, P, T> {
240    #[inline]
241    fn eq(&self, other: &RefLocalGuard<'a, P, T>) -> bool {
242        self.deref().eq(other)
243    }
244}
245impl<'a, P: PoolAllocator<T>, T: Eq> Eq for RefLocalGuard<'a, P, T> {}
246impl<'a, P: PoolAllocator<T>, T: PartialOrd> PartialOrd for RefLocalGuard<'a, P, T> {
247    #[inline]
248    fn partial_cmp(&self, other: &RefLocalGuard<'a, P, T>) -> Option<core::cmp::Ordering> {
249        (**self).partial_cmp(&**other)
250    }
251    #[inline]
252    fn lt(&self, other: &RefLocalGuard<'a, P, T>) -> bool {
253        **self < **other
254    }
255    #[inline]
256    fn le(&self, other: &RefLocalGuard<'a, P, T>) -> bool {
257        **self <= **other
258    }
259    #[inline]
260    fn gt(&self, other: &RefLocalGuard<'a, P, T>) -> bool {
261        **self > **other
262    }
263    #[inline]
264    fn ge(&self, other: &RefLocalGuard<'a, P, T>) -> bool {
265        **self >= **other
266    }
267}
268impl<'a, P: PoolAllocator<T>, T: Ord> Ord for RefLocalGuard<'a, P, T> {
269    #[inline]
270    fn cmp(&self, other: &RefLocalGuard<'a, P, T>) -> core::cmp::Ordering {
271        (**self).cmp(&**other)
272    }
273}
274impl<'a, P: PoolAllocator<T>, T> core::borrow::Borrow<T> for RefLocalGuard<'a, P, T> {
275    #[inline(always)]
276    fn borrow(&self) -> &T {
277        self
278    }
279}
280impl<'a, P: PoolAllocator<T>, T> AsRef<T> for RefLocalGuard<'a, P, T> {
281    #[inline(always)]
282    fn as_ref(&self) -> &T {
283        self
284    }
285}
286
287/// A struct representing a guard over an object in the pool.
288///
289/// This struct ensures that the object is returned to the pool when it is
290/// dropped.
291pub struct RcLocalGuard<P: PoolAllocator<T>, T> {
292    obj: MaybeUninit<T>,
293    pool: Rc<LocalPool<P, T>>,
294}
295
296impl<P: PoolAllocator<T>, T> RcLocalGuard<P, T> {
297    /// Creates a new Guard for an object and a reference to the pool it
298    /// belongs to.
299    fn new(obj: T, pool: &Rc<LocalPool<P, T>>) -> Self {
300        Self {
301            obj: MaybeUninit::new(obj),
302            pool: pool.clone(),
303        }
304    }
305
306    /// Consumes the guard and returns the object, without returning it to the
307    /// pool.
308    ///
309    /// This method should be used with caution, as it leads to objects not
310    /// being returned to the pool.
311    pub fn into_inner(mut self) -> T {
312        let obj = unsafe { self.obj.as_ptr().read() };
313        // Drop the arc reference
314        unsafe { ptr::drop_in_place(&mut self.pool) }
315        forget(self);
316        obj
317    }
318}
319
320impl<P: PoolAllocator<T>, T> Deref for RcLocalGuard<P, T> {
321    type Target = T;
322    #[inline(always)]
323    fn deref(&self) -> &Self::Target {
324        unsafe { &*self.obj.as_ptr() }
325    }
326}
327
328impl<P: PoolAllocator<T>, T> DerefMut for RcLocalGuard<P, T> {
329    #[inline(always)]
330    fn deref_mut(&mut self) -> &mut Self::Target {
331        unsafe { &mut *self.obj.as_mut_ptr() }
332    }
333}
334
335/// Implementation of the Drop trait for Guard.
336///
337/// This ensures that the object is returned to the pool when the guard is
338/// dropped, unless the object fails validation.
339impl<P: PoolAllocator<T>, T> Drop for RcLocalGuard<P, T> {
340    fn drop(&mut self) {
341        let storage = self.pool.storage_mut();
342        if self.pool.allocator.is_valid(self.deref()) && storage.len() < storage.capacity() {
343            // Safety: object is not moved and valid for this single move to the pool.
344            storage.push_back(unsafe { ptr::read(self.obj.as_mut_ptr()) });
345        } else {
346            // Safety: object is not moved back to the pool it is safe to drop it in place.
347            unsafe {
348                ptr::drop_in_place(self.obj.as_mut_ptr());
349            }
350        }
351    }
352}
353
354impl<P: PoolAllocator<T>, T: Hash> Hash for RcLocalGuard<P, T> {
355    #[inline]
356    fn hash<H: Hasher>(&self, state: &mut H) {
357        (**self).hash(state);
358    }
359}
360impl<P: PoolAllocator<T>, T: fmt::Display> fmt::Display for RcLocalGuard<P, T> {
361    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
362        fmt::Display::fmt(&**self, f)
363    }
364}
365impl<P: PoolAllocator<T>, T: fmt::Debug> fmt::Debug for RcLocalGuard<P, T> {
366    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
367        fmt::Debug::fmt(&**self, f)
368    }
369}
370impl<P: PoolAllocator<T>, T> fmt::Pointer for RcLocalGuard<P, T> {
371    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
372        fmt::Pointer::fmt(&(&**self as *const T), f)
373    }
374}
375impl<P: PoolAllocator<T>, T: PartialEq> PartialEq for RcLocalGuard<P, T> {
376    #[inline]
377    fn eq(&self, other: &RcLocalGuard<P, T>) -> bool {
378        self.deref().eq(other)
379    }
380}
381impl<P: PoolAllocator<T>, T: Eq> Eq for RcLocalGuard<P, T> {}
382impl<P: PoolAllocator<T>, T: PartialOrd> PartialOrd for RcLocalGuard<P, T> {
383    #[inline]
384    fn partial_cmp(&self, other: &RcLocalGuard<P, T>) -> Option<core::cmp::Ordering> {
385        (**self).partial_cmp(&**other)
386    }
387    #[inline]
388    fn lt(&self, other: &RcLocalGuard<P, T>) -> bool {
389        **self < **other
390    }
391    #[inline]
392    fn le(&self, other: &RcLocalGuard<P, T>) -> bool {
393        **self <= **other
394    }
395    #[inline]
396    fn gt(&self, other: &RcLocalGuard<P, T>) -> bool {
397        **self > **other
398    }
399    #[inline]
400    fn ge(&self, other: &RcLocalGuard<P, T>) -> bool {
401        **self >= **other
402    }
403}
404impl<P: PoolAllocator<T>, T: Ord> Ord for RcLocalGuard<P, T> {
405    #[inline]
406    fn cmp(&self, other: &RcLocalGuard<P, T>) -> core::cmp::Ordering {
407        (**self).cmp(&**other)
408    }
409}
410impl<P: PoolAllocator<T>, T> core::borrow::Borrow<T> for RcLocalGuard<P, T> {
411    #[inline(always)]
412    fn borrow(&self) -> &T {
413        self
414    }
415}
416impl<P: PoolAllocator<T>, T> AsRef<T> for RcLocalGuard<P, T> {
417    #[inline(always)]
418    fn as_ref(&self) -> &T {
419        self
420    }
421}