1use std::ptr;
2use std::sync::Arc;
3use std::sync::atomic::Ordering::*;
4use std::{ops::Deref, ptr::NonNull, sync::atomic::AtomicUsize};
5
6use crate::Pool;
7
8pub struct Entry<'a, T: Default> {
15 pub(crate) item: Option<Prc<T>>,
18 pub(crate) pool: &'a Pool<T>,
19}
20
21impl<'a, T: Default> Clone for Entry<'a, T> {
22 fn clone(&self) -> Self {
24 Self {
25 item: self.item.clone(),
26 pool: self.pool,
27 }
28 }
29}
30
31impl<'a, T: Default> Drop for Entry<'a, T> {
32 fn drop(&mut self) {
33 if self.item.as_ref().is_some_and(|i| i.dec_ref() == 1) {
34 let item = self.item.take().unwrap();
36 self.pool.recycle(item);
37 }
38 }
39}
40
41impl<'a, T: Default> Deref for Entry<'a, T> {
42 type Target = T;
43 fn deref(&self) -> &Self::Target {
44 self.item.as_ref().unwrap()
45 }
46}
47
48impl<'a, T: Default> Entry<'a, T> {
49 pub fn get(&self) -> &T {
51 &self
52 }
53
54 pub fn get_mut(&mut self) -> Option<&mut T> {
57 Prc::get_mut(self.item.as_mut().unwrap())
58 }
59
60 pub unsafe fn get_mut_unchecked(&mut self) -> &mut T {
63 unsafe { Prc::get_mut_unchecked(self.item.as_mut().unwrap()) }
64 }
65}
66
67pub struct OwnedEntry<T: Default> {
74 pub(crate) item: Option<Prc<T>>,
77 pub(crate) pool: Arc<Pool<T>>,
78}
79
80impl<T: Default> Clone for OwnedEntry<T> {
81 fn clone(&self) -> Self {
83 Self {
84 item: self.item.clone(),
85 pool: self.pool.clone(),
86 }
87 }
88}
89
90impl<T: Default> Drop for OwnedEntry<T> {
91 fn drop(&mut self) {
92 if self.item.as_ref().is_some_and(|i| i.dec_ref() == 1) {
93 let item = self.item.take().unwrap();
95 self.pool.recycle(item);
96 }
97 }
98}
99
100impl<T: Default> Deref for OwnedEntry<T> {
101 type Target = T;
102 fn deref(&self) -> &Self::Target {
103 self.item.as_ref().unwrap()
104 }
105}
106
107impl<T: Default> OwnedEntry<T> {
108 pub fn get(&self) -> &T {
110 &self
111 }
112
113 pub fn get_mut(&mut self) -> Option<&mut T> {
116 Prc::get_mut(self.item.as_mut().unwrap())
117 }
118
119 pub unsafe fn get_mut_unchecked(&mut self) -> &mut T {
122 unsafe { Prc::get_mut_unchecked(self.item.as_mut().unwrap()) }
123 }
124}
125
126pub(crate) struct Prc<T: ?Sized> {
145 ptr: NonNull<PrcInner<T>>,
146}
147
148unsafe impl<T: ?Sized + Send + Sync> Send for Prc<T> {}
149unsafe impl<T: ?Sized + Send + Sync> Sync for Prc<T> {}
150
151impl<T: ?Sized> Deref for Prc<T> {
152 type Target = T;
153 fn deref(&self) -> &Self::Target {
154 &self.inner().data
155 }
156}
157
158impl<T: ?Sized> Clone for Prc<T> {
159 fn clone(&self) -> Self {
160 self.inc_ref();
161 Self { ptr: self.ptr }
162 }
163}
164
165impl<T> Prc<T> {
166 #[inline]
169 pub(crate) fn new_zero(data: T) -> Self {
170 let x: Box<_> = Box::new(PrcInner {
171 count: AtomicUsize::new(0),
172 data,
173 });
174 Self {
175 ptr: Box::leak(x).into(),
176 }
177 }
178
179 #[inline]
181 pub(crate) fn new(data: T) -> Self {
182 let x: Box<_> = Box::new(PrcInner {
183 count: AtomicUsize::new(1),
184 data,
185 });
186 Self {
187 ptr: Box::leak(x).into(),
188 }
189 }
190}
191
192impl<T: ?Sized> Prc<T> {
193 #[inline]
195 pub(crate) fn inc_ref(&self) -> usize {
196 self.inner().count.fetch_add(1, Relaxed)
197 }
198
199 #[inline]
201 pub(crate) fn dec_ref(&self) -> usize {
202 self.inner().count.fetch_sub(1, Release)
203 }
204
205 pub(crate) unsafe fn drop_slow(&self) {
207 unsafe { ptr::drop_in_place(&mut (*self.ptr.as_ptr()).data) };
208 }
209
210 #[inline]
211 pub unsafe fn get_mut_unchecked(this: &mut Self) -> &mut T {
212 unsafe { &mut (*this.ptr.as_ptr()).data }
213 }
214
215 #[inline]
216 pub fn get_mut(this: &mut Self) -> Option<&mut T> {
217 if this.inner().count.load(Acquire) <= 1 {
219 unsafe { Some(Prc::get_mut_unchecked(this)) }
220 } else {
221 None
222 }
223 }
224
225 #[inline]
226 fn inner(&self) -> &PrcInner<T> {
227 unsafe { self.ptr.as_ref() }
228 }
229}
230
231struct PrcInner<T: ?Sized> {
232 count: AtomicUsize,
233 data: T,
234}
235
236unsafe impl<T: ?Sized + Send + Sync> Send for PrcInner<T> {}
237unsafe impl<T: ?Sized + Send + Sync> Sync for PrcInner<T> {}