1use alloc::{collections::VecDeque, fmt, rc::Rc};
2use core::{
3 cell::UnsafeCell,
4 hash::{Hash, Hasher},
5 marker::PhantomData,
6 mem::{forget, MaybeUninit},
7 ops::{Deref, DerefMut},
8 ptr,
9};
10
11use crate::PoolAllocator;
12
13#[derive(Debug)]
19pub struct LocalPool<P: PoolAllocator<T>, T> {
20 allocator: P,
21 storage: UnsafeCell<VecDeque<T>>,
22 _phantom: PhantomData<*mut usize>,
24}
25
26impl<P: PoolAllocator<T>, T> LocalPool<P, T> {
27 pub fn new_prefilled(pool_size: usize, allocator: P) -> Self {
32 let mut storage = VecDeque::with_capacity(pool_size);
33 for _ in 0..pool_size {
34 storage.push_back(allocator.allocate());
35 }
36 LocalPool {
37 allocator,
38 storage: UnsafeCell::new(storage),
39 _phantom: PhantomData,
40 }
41 }
42
43 pub fn new(pool_size: usize, allocator: P) -> Self {
48 LocalPool {
49 allocator,
50 storage: UnsafeCell::new(VecDeque::with_capacity(pool_size)),
51 _phantom: PhantomData,
52 }
53 }
54
55 #[allow(clippy::mut_from_ref)]
58 fn storage_mut(&self) -> &mut VecDeque<T> {
59 unsafe { &mut *self.storage.get() }
60 }
61
62 #[allow(clippy::mut_from_ref)]
65 fn storage_borrow(&self) -> &VecDeque<T> {
66 unsafe { &*self.storage.get() }
67 }
68
69 pub fn to_rc(self) -> Rc<Self> {
73 Rc::new(self)
74 }
75
76 pub fn try_get(&self) -> Option<RefLocalGuard<'_, P, T>> {
80 self.storage_mut().pop_front().map(|mut obj| {
81 self.allocator.reset(&mut obj);
82 RefLocalGuard::new(obj, self)
83 })
84 }
85
86 pub fn get(&'_ self) -> RefLocalGuard<'_, P, T> {
90 match self.storage_mut().pop_front() {
91 Some(mut obj) => {
92 self.allocator.reset(&mut obj);
93 RefLocalGuard::new(obj, self)
94 }
95 None => RefLocalGuard::new(self.allocator.allocate(), self),
96 }
97 }
98
99 pub fn try_get_rc(self: Rc<Self>) -> Option<RcLocalGuard<P, T>> {
106 self.storage_mut().pop_front().map(|mut obj| {
107 self.allocator.reset(&mut obj);
108 RcLocalGuard::new(obj, &self)
109 })
110 }
111
112 pub fn get_rc(self: Rc<Self>) -> RcLocalGuard<P, T> {
119 match self.storage_mut().pop_front() {
120 Some(mut obj) => {
121 self.allocator.reset(&mut obj);
122 RcLocalGuard::new(obj, &self)
123 }
124 None => RcLocalGuard::new(self.allocator.allocate(), &self),
125 }
126 }
127
128 pub fn len(&self) -> usize {
133 self.storage_borrow().len()
134 }
135
136 pub fn is_empty(&self) -> bool {
141 self.storage_borrow().is_empty()
142 }
143
144 pub fn capacity(&self) -> usize {
150 self.storage_borrow().capacity()
151 }
152}
153
154pub struct RefLocalGuard<'a, P: PoolAllocator<T>, T> {
159 obj: MaybeUninit<T>,
160 pool: &'a LocalPool<P, T>,
161}
162
163impl<'a, P: PoolAllocator<T>, T> RefLocalGuard<'a, P, T> {
164 fn new(obj: T, pool: &'a LocalPool<P, T>) -> Self {
167 RefLocalGuard {
168 obj: MaybeUninit::new(obj),
169 pool,
170 }
171 }
172
173 pub fn into_inner(self) -> T {
179 let obj = unsafe { self.obj.as_ptr().read() };
180 forget(self);
181 obj
182 }
183}
184
185impl<'a, P: PoolAllocator<T>, T> Deref for RefLocalGuard<'a, P, T> {
186 type Target = T;
187
188 fn deref(&self) -> &Self::Target {
189 unsafe { &*self.obj.as_ptr() }
190 }
191}
192
193impl<'a, P: PoolAllocator<T>, T> DerefMut for RefLocalGuard<'a, P, T> {
194 fn deref_mut(&mut self) -> &mut Self::Target {
195 unsafe { &mut *self.obj.as_mut_ptr() }
196 }
197}
198
199impl<'a, P: PoolAllocator<T>, T> Drop for RefLocalGuard<'a, P, T> {
204 fn drop(&mut self) {
205 let storage = self.pool.storage_mut();
206 if self.pool.allocator.is_valid(self.deref()) && storage.len() < storage.capacity() {
207 storage.push_back(unsafe { ptr::read(self.obj.as_mut_ptr()) });
209 } else {
210 unsafe {
212 ptr::drop_in_place(self.obj.as_mut_ptr());
213 }
214 }
215 }
216}
217
218impl<'a, P: PoolAllocator<T>, T: Hash> Hash for RefLocalGuard<'a, P, T> {
219 #[inline]
220 fn hash<H: Hasher>(&self, state: &mut H) {
221 (**self).hash(state);
222 }
223}
224impl<'a, P: PoolAllocator<T>, T: fmt::Display> fmt::Display for RefLocalGuard<'a, P, T> {
225 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
226 fmt::Display::fmt(&**self, f)
227 }
228}
229impl<'a, P: PoolAllocator<T>, T: fmt::Debug> fmt::Debug for RefLocalGuard<'a, P, T> {
230 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
231 fmt::Debug::fmt(&**self, f)
232 }
233}
234impl<'a, P: PoolAllocator<T>, T> fmt::Pointer for RefLocalGuard<'a, P, T> {
235 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
236 fmt::Pointer::fmt(&(&**self as *const T), f)
237 }
238}
239impl<'a, P: PoolAllocator<T>, T: PartialEq> PartialEq for RefLocalGuard<'a, P, T> {
240 #[inline]
241 fn eq(&self, other: &RefLocalGuard<'a, P, T>) -> bool {
242 self.deref().eq(other)
243 }
244}
245impl<'a, P: PoolAllocator<T>, T: Eq> Eq for RefLocalGuard<'a, P, T> {}
246impl<'a, P: PoolAllocator<T>, T: PartialOrd> PartialOrd for RefLocalGuard<'a, P, T> {
247 #[inline]
248 fn partial_cmp(&self, other: &RefLocalGuard<'a, P, T>) -> Option<core::cmp::Ordering> {
249 (**self).partial_cmp(&**other)
250 }
251 #[inline]
252 fn lt(&self, other: &RefLocalGuard<'a, P, T>) -> bool {
253 **self < **other
254 }
255 #[inline]
256 fn le(&self, other: &RefLocalGuard<'a, P, T>) -> bool {
257 **self <= **other
258 }
259 #[inline]
260 fn gt(&self, other: &RefLocalGuard<'a, P, T>) -> bool {
261 **self > **other
262 }
263 #[inline]
264 fn ge(&self, other: &RefLocalGuard<'a, P, T>) -> bool {
265 **self >= **other
266 }
267}
268impl<'a, P: PoolAllocator<T>, T: Ord> Ord for RefLocalGuard<'a, P, T> {
269 #[inline]
270 fn cmp(&self, other: &RefLocalGuard<'a, P, T>) -> core::cmp::Ordering {
271 (**self).cmp(&**other)
272 }
273}
274impl<'a, P: PoolAllocator<T>, T> core::borrow::Borrow<T> for RefLocalGuard<'a, P, T> {
275 #[inline(always)]
276 fn borrow(&self) -> &T {
277 self
278 }
279}
280impl<'a, P: PoolAllocator<T>, T> AsRef<T> for RefLocalGuard<'a, P, T> {
281 #[inline(always)]
282 fn as_ref(&self) -> &T {
283 self
284 }
285}
286
287pub struct RcLocalGuard<P: PoolAllocator<T>, T> {
292 obj: MaybeUninit<T>,
293 pool: Rc<LocalPool<P, T>>,
294}
295
296impl<P: PoolAllocator<T>, T> RcLocalGuard<P, T> {
297 fn new(obj: T, pool: &Rc<LocalPool<P, T>>) -> Self {
300 Self {
301 obj: MaybeUninit::new(obj),
302 pool: pool.clone(),
303 }
304 }
305
306 pub fn into_inner(mut self) -> T {
312 let obj = unsafe { self.obj.as_ptr().read() };
313 unsafe { ptr::drop_in_place(&mut self.pool) }
315 forget(self);
316 obj
317 }
318}
319
320impl<P: PoolAllocator<T>, T> Deref for RcLocalGuard<P, T> {
321 type Target = T;
322 #[inline(always)]
323 fn deref(&self) -> &Self::Target {
324 unsafe { &*self.obj.as_ptr() }
325 }
326}
327
328impl<P: PoolAllocator<T>, T> DerefMut for RcLocalGuard<P, T> {
329 #[inline(always)]
330 fn deref_mut(&mut self) -> &mut Self::Target {
331 unsafe { &mut *self.obj.as_mut_ptr() }
332 }
333}
334
335impl<P: PoolAllocator<T>, T> Drop for RcLocalGuard<P, T> {
340 fn drop(&mut self) {
341 let storage = self.pool.storage_mut();
342 if self.pool.allocator.is_valid(self.deref()) && storage.len() < storage.capacity() {
343 storage.push_back(unsafe { ptr::read(self.obj.as_mut_ptr()) });
345 } else {
346 unsafe {
348 ptr::drop_in_place(self.obj.as_mut_ptr());
349 }
350 }
351 }
352}
353
354impl<P: PoolAllocator<T>, T: Hash> Hash for RcLocalGuard<P, T> {
355 #[inline]
356 fn hash<H: Hasher>(&self, state: &mut H) {
357 (**self).hash(state);
358 }
359}
360impl<P: PoolAllocator<T>, T: fmt::Display> fmt::Display for RcLocalGuard<P, T> {
361 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
362 fmt::Display::fmt(&**self, f)
363 }
364}
365impl<P: PoolAllocator<T>, T: fmt::Debug> fmt::Debug for RcLocalGuard<P, T> {
366 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
367 fmt::Debug::fmt(&**self, f)
368 }
369}
370impl<P: PoolAllocator<T>, T> fmt::Pointer for RcLocalGuard<P, T> {
371 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
372 fmt::Pointer::fmt(&(&**self as *const T), f)
373 }
374}
375impl<P: PoolAllocator<T>, T: PartialEq> PartialEq for RcLocalGuard<P, T> {
376 #[inline]
377 fn eq(&self, other: &RcLocalGuard<P, T>) -> bool {
378 self.deref().eq(other)
379 }
380}
381impl<P: PoolAllocator<T>, T: Eq> Eq for RcLocalGuard<P, T> {}
382impl<P: PoolAllocator<T>, T: PartialOrd> PartialOrd for RcLocalGuard<P, T> {
383 #[inline]
384 fn partial_cmp(&self, other: &RcLocalGuard<P, T>) -> Option<core::cmp::Ordering> {
385 (**self).partial_cmp(&**other)
386 }
387 #[inline]
388 fn lt(&self, other: &RcLocalGuard<P, T>) -> bool {
389 **self < **other
390 }
391 #[inline]
392 fn le(&self, other: &RcLocalGuard<P, T>) -> bool {
393 **self <= **other
394 }
395 #[inline]
396 fn gt(&self, other: &RcLocalGuard<P, T>) -> bool {
397 **self > **other
398 }
399 #[inline]
400 fn ge(&self, other: &RcLocalGuard<P, T>) -> bool {
401 **self >= **other
402 }
403}
404impl<P: PoolAllocator<T>, T: Ord> Ord for RcLocalGuard<P, T> {
405 #[inline]
406 fn cmp(&self, other: &RcLocalGuard<P, T>) -> core::cmp::Ordering {
407 (**self).cmp(&**other)
408 }
409}
410impl<P: PoolAllocator<T>, T> core::borrow::Borrow<T> for RcLocalGuard<P, T> {
411 #[inline(always)]
412 fn borrow(&self) -> &T {
413 self
414 }
415}
416impl<P: PoolAllocator<T>, T> AsRef<T> for RcLocalGuard<P, T> {
417 #[inline(always)]
418 fn as_ref(&self) -> &T {
419 self
420 }
421}