opool/
thread_local.rs

1use crate::PoolAllocator;
2use alloc::{collections::VecDeque, fmt, rc::Rc};
3use core::{
4    cell::UnsafeCell,
5    hash::{Hash, Hasher},
6    marker::PhantomData,
7    mem::{forget, MaybeUninit},
8    ops::{Deref, DerefMut},
9    ptr,
10};
11
12/// A struct representing an object pool for local thread, it cannot be moved
13/// between threads.
14///
15/// This struct uses an allocator to create and manage objects, and stores them
16/// in an array.
17#[derive(Debug)]
18pub struct LocalPool<P: PoolAllocator<T>, T> {
19    allocator: P,
20    storage: UnsafeCell<VecDeque<T>>,
21    // force the struct to be !Send
22    _phantom: PhantomData<*mut usize>,
23}
24
25impl<P: PoolAllocator<T>, T> LocalPool<P, T> {
26    /// Creates a new LocalPool with a given size and allocator.
27    ///
28    /// This method immediately fills the pool with new objects created by the
29    /// allocator.
30    pub fn new_prefilled(pool_size: usize, allocator: P) -> Self {
31        let mut storage = VecDeque::with_capacity(pool_size);
32        for _ in 0..pool_size {
33            storage.push_back(allocator.allocate());
34        }
35        LocalPool {
36            allocator,
37            storage: UnsafeCell::new(storage),
38            _phantom: PhantomData,
39        }
40    }
41
42    /// Creates a new Object Pool with a given size and allocator.
43    ///
44    /// Unlike [`Self::new_prefilled`], this method does not immediately fill
45    /// the pool with objects.
46    pub fn new(pool_size: usize, allocator: P) -> Self {
47        LocalPool {
48            allocator,
49            storage: UnsafeCell::new(VecDeque::with_capacity(pool_size)),
50            _phantom: PhantomData,
51        }
52    }
53
54    /// Get storage
55    /// Safety: it's safe to call only if the pool is used by a single threaded.
56    #[allow(clippy::mut_from_ref)]
57    fn get_storage(&self) -> &mut VecDeque<T> {
58        unsafe { &mut *self.storage.get() }
59    }
60
61    /// Wraps the pool allocator with an reference counter, enabling the
62    /// use of [`Self::get_rc`] to obtain pool-allocated objects that rely on
63    /// reference counted references instead of borrowed references.
64    pub fn to_rc(self) -> Rc<Self> {
65        Rc::new(self)
66    }
67
68    /// Gets an object from the pool.
69    ///
70    /// If the pool is empty, a new object is created using the allocator.
71    pub fn get(&self) -> RefLocalGuard<P, T> {
72        match self.get_storage().pop_front() {
73            Some(mut obj) => {
74                self.allocator.reset(&mut obj);
75                RefLocalGuard::new(obj, self)
76            }
77            None => RefLocalGuard::new(self.allocator.allocate(), self),
78        }
79    }
80
81    /// Gets an object from the pool that holds an rc reference to the owning
82    /// pool. Allocated objects are not as efficient as those allocated by
83    /// [`Self::get`] method but they are easier to move as they are not limited
84    /// by allocator lifetime directly.
85    ///
86    /// If the pool is empty, a new object is created using the allocator.
87    pub fn get_rc(self: Rc<Self>) -> RcLocalGuard<P, T> {
88        match self.get_storage().pop_front() {
89            Some(mut obj) => {
90                self.allocator.reset(&mut obj);
91                RcLocalGuard::new(obj, &self)
92            }
93            None => RcLocalGuard::new(self.allocator.allocate(), &self),
94        }
95    }
96}
97
98/// A struct representing a guard over an object in the pool.
99///
100/// This struct ensures that the object is returned to the pool when it is
101/// dropped.
102pub struct RefLocalGuard<'a, P: PoolAllocator<T>, T> {
103    obj: MaybeUninit<T>,
104    pool: &'a LocalPool<P, T>,
105}
106
107impl<'a, P: PoolAllocator<T>, T> RefLocalGuard<'a, P, T> {
108    /// Creates a new Guard for an object and a reference to the pool it
109    /// belongs to.
110    fn new(obj: T, pool: &'a LocalPool<P, T>) -> Self {
111        RefLocalGuard {
112            obj: MaybeUninit::new(obj),
113            pool,
114        }
115    }
116
117    /// Consumes the guard and returns the object, without returning it to the
118    /// pool.
119    ///
120    /// This method should be used with caution, as it leads to objects not
121    /// being returned to the pool.
122    pub fn into_inner(self) -> T {
123        let obj = unsafe { self.obj.as_ptr().read() };
124        forget(self);
125        obj
126    }
127}
128
129impl<'a, P: PoolAllocator<T>, T> Deref for RefLocalGuard<'a, P, T> {
130    type Target = T;
131
132    fn deref(&self) -> &Self::Target {
133        unsafe { &*self.obj.as_ptr() }
134    }
135}
136
137impl<'a, P: PoolAllocator<T>, T> DerefMut for RefLocalGuard<'a, P, T> {
138    fn deref_mut(&mut self) -> &mut Self::Target {
139        unsafe { &mut *self.obj.as_mut_ptr() }
140    }
141}
142
143/// Implementation of the Drop trait for Guard.
144///
145/// This ensures that the object is returned to the pool when the guard is
146/// dropped, unless the object fails validation.
147impl<'a, P: PoolAllocator<T>, T> Drop for RefLocalGuard<'a, P, T> {
148    fn drop(&mut self) {
149        let storage = self.pool.get_storage();
150        if self.pool.allocator.is_valid(self.deref()) && storage.len() < storage.capacity() {
151            // Safety: object is not moved and valid for this single move to the pool.
152            storage.push_back(unsafe { ptr::read(self.obj.as_mut_ptr()) });
153        } else {
154            // Safety: object is not moved back to the pool it is safe to drop it in place.
155            unsafe {
156                ptr::drop_in_place(self.obj.as_mut_ptr());
157            }
158        }
159    }
160}
161
162impl<'a, P: PoolAllocator<T>, T: Hash> Hash for RefLocalGuard<'a, P, T> {
163    #[inline]
164    fn hash<H: Hasher>(&self, state: &mut H) {
165        (**self).hash(state);
166    }
167}
168impl<'a, P: PoolAllocator<T>, T: fmt::Display> fmt::Display for RefLocalGuard<'a, P, T> {
169    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
170        fmt::Display::fmt(&**self, f)
171    }
172}
173impl<'a, P: PoolAllocator<T>, T: fmt::Debug> fmt::Debug for RefLocalGuard<'a, P, T> {
174    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
175        fmt::Debug::fmt(&**self, f)
176    }
177}
178impl<'a, P: PoolAllocator<T>, T> fmt::Pointer for RefLocalGuard<'a, P, T> {
179    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
180        fmt::Pointer::fmt(&(&**self as *const T), f)
181    }
182}
183impl<'a, P: PoolAllocator<T>, T: PartialEq> PartialEq for RefLocalGuard<'a, P, T> {
184    #[inline]
185    fn eq(&self, other: &RefLocalGuard<'a, P, T>) -> bool {
186        self.deref().eq(other)
187    }
188}
189impl<'a, P: PoolAllocator<T>, T: Eq> Eq for RefLocalGuard<'a, P, T> {}
190impl<'a, P: PoolAllocator<T>, T: PartialOrd> PartialOrd for RefLocalGuard<'a, P, T> {
191    #[inline]
192    fn partial_cmp(&self, other: &RefLocalGuard<'a, P, T>) -> Option<core::cmp::Ordering> {
193        (**self).partial_cmp(&**other)
194    }
195    #[inline]
196    fn lt(&self, other: &RefLocalGuard<'a, P, T>) -> bool {
197        **self < **other
198    }
199    #[inline]
200    fn le(&self, other: &RefLocalGuard<'a, P, T>) -> bool {
201        **self <= **other
202    }
203    #[inline]
204    fn gt(&self, other: &RefLocalGuard<'a, P, T>) -> bool {
205        **self > **other
206    }
207    #[inline]
208    fn ge(&self, other: &RefLocalGuard<'a, P, T>) -> bool {
209        **self >= **other
210    }
211}
212impl<'a, P: PoolAllocator<T>, T: Ord> Ord for RefLocalGuard<'a, P, T> {
213    #[inline]
214    fn cmp(&self, other: &RefLocalGuard<'a, P, T>) -> core::cmp::Ordering {
215        (**self).cmp(&**other)
216    }
217}
218impl<'a, P: PoolAllocator<T>, T> core::borrow::Borrow<T> for RefLocalGuard<'a, P, T> {
219    #[inline(always)]
220    fn borrow(&self) -> &T {
221        self
222    }
223}
224impl<'a, P: PoolAllocator<T>, T> AsRef<T> for RefLocalGuard<'a, P, T> {
225    #[inline(always)]
226    fn as_ref(&self) -> &T {
227        self
228    }
229}
230
231/// A struct representing a guard over an object in the pool.
232///
233/// This struct ensures that the object is returned to the pool when it is
234/// dropped.
235pub struct RcLocalGuard<P: PoolAllocator<T>, T> {
236    obj: MaybeUninit<T>,
237    pool: Rc<LocalPool<P, T>>,
238}
239
240impl<P: PoolAllocator<T>, T> RcLocalGuard<P, T> {
241    /// Creates a new Guard for an object and a reference to the pool it
242    /// belongs to.
243    fn new(obj: T, pool: &Rc<LocalPool<P, T>>) -> Self {
244        Self {
245            obj: MaybeUninit::new(obj),
246            pool: pool.clone(),
247        }
248    }
249
250    /// Consumes the guard and returns the object, without returning it to the
251    /// pool.
252    ///
253    /// This method should be used with caution, as it leads to objects not
254    /// being returned to the pool.
255    pub fn into_inner(mut self) -> T {
256        let obj = unsafe { self.obj.as_ptr().read() };
257        // Drop the arc reference
258        unsafe { ptr::drop_in_place(&mut self.pool) }
259        forget(self);
260        obj
261    }
262}
263
264impl<P: PoolAllocator<T>, T> Deref for RcLocalGuard<P, T> {
265    type Target = T;
266    #[inline(always)]
267    fn deref(&self) -> &Self::Target {
268        unsafe { &*self.obj.as_ptr() }
269    }
270}
271
272impl<P: PoolAllocator<T>, T> DerefMut for RcLocalGuard<P, T> {
273    #[inline(always)]
274    fn deref_mut(&mut self) -> &mut Self::Target {
275        unsafe { &mut *self.obj.as_mut_ptr() }
276    }
277}
278
279/// Implementation of the Drop trait for Guard.
280///
281/// This ensures that the object is returned to the pool when the guard is
282/// dropped, unless the object fails validation.
283impl<P: PoolAllocator<T>, T> Drop for RcLocalGuard<P, T> {
284    fn drop(&mut self) {
285        let storage = self.pool.get_storage();
286        if self.pool.allocator.is_valid(self.deref()) && storage.len() < storage.capacity() {
287            // Safety: object is not moved and valid for this single move to the pool.
288            storage.push_back(unsafe { ptr::read(self.obj.as_mut_ptr()) });
289        } else {
290            // Safety: object is not moved back to the pool it is safe to drop it in place.
291            unsafe {
292                ptr::drop_in_place(self.obj.as_mut_ptr());
293            }
294        }
295    }
296}
297
298impl<P: PoolAllocator<T>, T: Hash> Hash for RcLocalGuard<P, T> {
299    #[inline]
300    fn hash<H: Hasher>(&self, state: &mut H) {
301        (**self).hash(state);
302    }
303}
304impl<P: PoolAllocator<T>, T: fmt::Display> fmt::Display for RcLocalGuard<P, T> {
305    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
306        fmt::Display::fmt(&**self, f)
307    }
308}
309impl<P: PoolAllocator<T>, T: fmt::Debug> fmt::Debug for RcLocalGuard<P, T> {
310    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
311        fmt::Debug::fmt(&**self, f)
312    }
313}
314impl<P: PoolAllocator<T>, T> fmt::Pointer for RcLocalGuard<P, T> {
315    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
316        fmt::Pointer::fmt(&(&**self as *const T), f)
317    }
318}
319impl<P: PoolAllocator<T>, T: PartialEq> PartialEq for RcLocalGuard<P, T> {
320    #[inline]
321    fn eq(&self, other: &RcLocalGuard<P, T>) -> bool {
322        self.deref().eq(other)
323    }
324}
325impl<P: PoolAllocator<T>, T: Eq> Eq for RcLocalGuard<P, T> {}
326impl<P: PoolAllocator<T>, T: PartialOrd> PartialOrd for RcLocalGuard<P, T> {
327    #[inline]
328    fn partial_cmp(&self, other: &RcLocalGuard<P, T>) -> Option<core::cmp::Ordering> {
329        (**self).partial_cmp(&**other)
330    }
331    #[inline]
332    fn lt(&self, other: &RcLocalGuard<P, T>) -> bool {
333        **self < **other
334    }
335    #[inline]
336    fn le(&self, other: &RcLocalGuard<P, T>) -> bool {
337        **self <= **other
338    }
339    #[inline]
340    fn gt(&self, other: &RcLocalGuard<P, T>) -> bool {
341        **self > **other
342    }
343    #[inline]
344    fn ge(&self, other: &RcLocalGuard<P, T>) -> bool {
345        **self >= **other
346    }
347}
348impl<P: PoolAllocator<T>, T: Ord> Ord for RcLocalGuard<P, T> {
349    #[inline]
350    fn cmp(&self, other: &RcLocalGuard<P, T>) -> core::cmp::Ordering {
351        (**self).cmp(&**other)
352    }
353}
354impl<P: PoolAllocator<T>, T> core::borrow::Borrow<T> for RcLocalGuard<P, T> {
355    #[inline(always)]
356    fn borrow(&self) -> &T {
357        self
358    }
359}
360impl<P: PoolAllocator<T>, T> AsRef<T> for RcLocalGuard<P, T> {
361    #[inline(always)]
362    fn as_ref(&self) -> &T {
363        self
364    }
365}