1use super::*;
9use portable_atomic::{AtomicU32, Ordering};
10
11pub unsafe trait VTableMetaDropInPlace: VTableMeta {
19 unsafe fn drop_in_place(vtable: &Self::VTable, ptr: *mut u8) -> vrc::Layout;
24 unsafe fn dealloc(vtable: &Self::VTable, ptr: *mut u8, layout: vrc::Layout);
28}
29
30pub struct Dyn(PhantomData<*mut ()>);
34
35#[repr(C)]
37#[derive(Clone, Copy)]
38pub struct Layout {
39 pub size: usize,
41 pub align: usize,
43}
44
45impl From<core::alloc::Layout> for Layout {
46 fn from(layout: core::alloc::Layout) -> Self {
47 Self { size: layout.size(), align: layout.align() }
48 }
49}
50
51impl core::convert::TryFrom<Layout> for core::alloc::Layout {
52 type Error = core::alloc::LayoutError;
53
54 fn try_from(value: Layout) -> Result<Self, Self::Error> {
55 Self::from_size_align(value.size, value.align)
56 }
57}
58
59#[repr(C)]
60struct VRcInner<'vt, VTable: VTableMeta, X> {
61 vtable: &'vt VTable::VTable,
62 strong_ref: AtomicU32,
64 weak_ref: AtomicU32,
67 data_offset: u16,
71 data: X,
74}
75
76impl<VTable: VTableMeta, X> VRcInner<'_, VTable, X> {
77 unsafe fn data_ptr(s: *const Self) -> *const X {
78 (s as *const u8).add(*core::ptr::addr_of!((*s).data_offset) as usize) as *const X
79 }
80 fn as_ref(&self) -> &X {
81 let ptr = self as *const Self as *const u8;
82 unsafe { &*(ptr.add(self.data_offset as usize) as *const X) }
83 }
84}
85
86#[repr(transparent)]
98pub struct VRc<VTable: VTableMetaDropInPlace + 'static, X = Dyn> {
99 inner: NonNull<VRcInner<'static, VTable, X>>,
100}
101
102impl<VTable: VTableMetaDropInPlace + 'static, X> Drop for VRc<VTable, X> {
103 fn drop(&mut self) {
104 unsafe {
105 let inner = self.inner.as_ptr();
106 if (*inner).strong_ref.fetch_sub(1, Ordering::SeqCst) == 1 {
107 let data =
108 (inner as *mut u8).add(*core::ptr::addr_of!((*inner).data_offset) as usize);
109 let vtable = core::ptr::addr_of!((*inner).vtable);
110 let mut layout = VTable::drop_in_place(*vtable, data);
111 layout = core::alloc::Layout::new::<VRcInner<VTable, ()>>()
112 .extend(layout.try_into().unwrap())
113 .unwrap()
114 .0
115 .pad_to_align()
116 .into();
117 if (*core::ptr::addr_of!((*inner).weak_ref)).load(Ordering::SeqCst) > 1 {
118 *(VRcInner::data_ptr(self.inner.cast::<VRcInner<VTable, Layout>>().as_ptr())
122 as *mut Layout) = layout;
123 }
124 if (*core::ptr::addr_of!((*inner).weak_ref)).fetch_sub(1, Ordering::SeqCst) == 1 {
125 VTable::dealloc(*vtable, self.inner.cast().as_ptr(), layout);
126 }
127 }
128 }
129 }
130}
131
132impl<VTable: VTableMetaDropInPlace + 'static, X> core::fmt::Debug for VRc<VTable, X> {
133 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
134 f.debug_struct("VRc").field("inner", &self.inner).finish()
135 }
136}
137
138impl<VTable: VTableMetaDropInPlace, X: HasStaticVTable<VTable>> VRc<VTable, X> {
139 pub fn new(data: X) -> Self {
146 let layout = core::alloc::Layout::new::<VRcInner<VTable, X>>().pad_to_align();
147 let layout_with_layout = core::alloc::Layout::new::<VRcInner<VTable, Layout>>();
149 let layout = core::alloc::Layout::from_size_align(
150 layout.size().max(layout_with_layout.size()),
151 layout.align().max(layout_with_layout.align()),
152 )
153 .unwrap();
154 let mem = unsafe { alloc::alloc::alloc(layout) as *mut VRcInner<VTable, X> };
155 let inner = NonNull::new(mem).unwrap();
156 assert!(!mem.is_null());
157
158 unsafe {
159 mem.write(VRcInner {
160 vtable: X::static_vtable(),
161 strong_ref: AtomicU32::new(1),
162 weak_ref: AtomicU32::new(1), data_offset: 0,
164 data,
165 });
166 (*mem).data_offset =
167 (&(*mem).data as *const _ as usize - mem as *const _ as usize) as u16;
168 VRc { inner }
169 }
170 }
171
172 pub fn into_dyn(this: Self) -> VRc<VTable, Dyn>
174 where
175 Self: 'static,
176 {
177 unsafe { core::mem::transmute(this) }
180 }
181}
182
183impl<VTable: VTableMetaDropInPlace + 'static, X: HasStaticVTable<VTable> + 'static> VRc<VTable, X> {
184 pub fn map<MappedType: ?Sized>(
189 this: Self,
190 map_fn: impl for<'r> FnOnce(Pin<&'r X>) -> Pin<&'r MappedType>,
191 ) -> VRcMapped<VTable, MappedType> {
192 VRcMapped {
193 parent_strong: Self::into_dyn(this.clone()),
194 object: map_fn(this.as_pin_ref()).get_ref(),
195 }
196 }
197}
198
199impl<VTable: VTableMetaDropInPlace + 'static> VRc<VTable, Dyn> {
200 pub fn map_dyn<MappedType: ?Sized>(
206 this: Self,
207 map_fn: impl for<'r> FnOnce(Pin<VRef<'r, VTable>>) -> Pin<&'r MappedType>,
208 ) -> VRcMapped<VTable, MappedType> {
209 VRcMapped { parent_strong: this.clone(), object: map_fn(Self::borrow_pin(&this)).get_ref() }
210 }
211}
212impl<VTable: VTableMetaDropInPlace, X> VRc<VTable, X> {
213 pub fn as_pin_ref(&self) -> Pin<&X> {
217 unsafe { Pin::new_unchecked(self) }
218 }
219
220 pub fn borrow(this: &Self) -> VRef<'_, VTable> {
222 unsafe {
223 let inner = this.inner.cast::<VRcInner<VTable, u8>>();
224 VRef::from_raw(
225 NonNull::from(*::core::ptr::addr_of!((*inner.as_ptr()).vtable)),
226 NonNull::new_unchecked(VRcInner::data_ptr(inner.as_ptr()) as *mut u8),
227 )
228 }
229 }
230
231 pub fn borrow_pin(this: &Self) -> Pin<VRef<'_, VTable>> {
236 unsafe { Pin::new_unchecked(Self::borrow(this)) }
237 }
238
239 pub fn downgrade(this: &Self) -> VWeak<VTable, X> {
241 let inner = unsafe { this.inner.as_ref() };
242 inner.weak_ref.fetch_add(1, Ordering::SeqCst);
243 VWeak { inner: Some(this.inner) }
244 }
245
246 pub fn strong_count(this: &Self) -> usize {
248 unsafe { this.inner.as_ref().strong_ref.load(Ordering::SeqCst) as usize }
249 }
250
251 pub fn ptr_eq(this: &Self, other: &Self) -> bool {
253 this.inner == other.inner
254 }
255}
256
257impl<VTable: VTableMetaDropInPlace + 'static, X> Clone for VRc<VTable, X> {
258 fn clone(&self) -> Self {
259 let inner = unsafe { self.inner.as_ref() };
260 inner.strong_ref.fetch_add(1, Ordering::SeqCst);
261 Self { inner: self.inner }
262 }
263}
264
265impl<VTable: VTableMetaDropInPlace, X > Deref for VRc<VTable, X> {
266 type Target = X;
267 fn deref(&self) -> &Self::Target {
268 let inner = unsafe { self.inner.as_ref() };
269 inner.as_ref()
270 }
271}
272
273unsafe impl<VTable: VTableMetaDropInPlace + Send + Sync + 'static, X: Send + Sync> Send
275 for VRc<VTable, X>
276{
277}
278unsafe impl<VTable: VTableMetaDropInPlace + Send + Sync + 'static, X: Send + Sync> Sync
279 for VRc<VTable, X>
280{
281}
282
283#[repr(transparent)]
291pub struct VWeak<VTable: VTableMetaDropInPlace + 'static, X = Dyn> {
292 inner: Option<NonNull<VRcInner<'static, VTable, X>>>,
293}
294
295impl<VTable: VTableMetaDropInPlace + 'static, X> Default for VWeak<VTable, X> {
296 fn default() -> Self {
297 Self { inner: None }
298 }
299}
300
301impl<VTable: VTableMetaDropInPlace + 'static, X> Clone for VWeak<VTable, X> {
302 fn clone(&self) -> Self {
303 if let Some(inner) = self.inner {
304 let inner = unsafe { inner.as_ref() };
305 inner.weak_ref.fetch_add(1, Ordering::SeqCst);
306 }
307 VWeak { inner: self.inner }
308 }
309}
310
311impl<T: VTableMetaDropInPlace + 'static, X> Drop for VWeak<T, X> {
312 fn drop(&mut self) {
313 if let Some(i) = self.inner {
314 unsafe {
315 if (*core::ptr::addr_of!((*i.as_ptr()).weak_ref)).fetch_sub(1, Ordering::SeqCst)
316 == 1
317 {
318 let vtable = &*core::ptr::addr_of!((*i.as_ptr()).vtable);
321 let layout = *(VRcInner::data_ptr(i.cast::<VRcInner<T, Layout>>().as_ptr()));
322 T::dealloc(vtable, i.cast().as_ptr(), layout);
323 }
324 }
325 }
326 }
327}
328
329impl<VTable: VTableMetaDropInPlace + 'static, X> VWeak<VTable, X> {
330 pub fn upgrade(&self) -> Option<VRc<VTable, X>> {
333 if let Some(i) = self.inner {
334 let inner = unsafe { i.as_ref() };
335 if inner.strong_ref.load(Ordering::SeqCst) == 0 {
336 None
337 } else {
338 inner.strong_ref.fetch_add(1, Ordering::SeqCst);
339 Some(VRc { inner: i })
340 }
341 } else {
342 None
343 }
344 }
345
346 pub fn ptr_eq(this: &Self, other: &Self) -> bool {
348 this.inner == other.inner
349 }
350}
351
352impl<VTable: VTableMetaDropInPlace + 'static, X: HasStaticVTable<VTable> + 'static>
353 VWeak<VTable, X>
354{
355 pub fn into_dyn(self) -> VWeak<VTable, Dyn> {
357 unsafe { core::mem::transmute(self) }
360 }
361}
362
363unsafe impl<VTable: VTableMetaDropInPlace + 'static, X> stable_deref_trait::StableDeref
366 for VRc<VTable, X>
367{
368}
369
370unsafe impl<VTable: VTableMetaDropInPlace + 'static, X> stable_deref_trait::CloneStableDeref
373 for VRc<VTable, X>
374{
375}
376
377pub struct VRcMapped<VTable: VTableMetaDropInPlace + 'static, MappedType: ?Sized> {
381 parent_strong: VRc<VTable, Dyn>,
382 object: *const MappedType,
383}
384
385impl<VTable: VTableMetaDropInPlace + 'static, MappedType: ?Sized> Clone
386 for VRcMapped<VTable, MappedType>
387{
388 fn clone(&self) -> Self {
389 Self { parent_strong: self.parent_strong.clone(), object: self.object }
390 }
391}
392
393impl<VTable: VTableMetaDropInPlace + 'static, MappedType: ?Sized> VRcMapped<VTable, MappedType> {
394 pub fn downgrade(this: &Self) -> VWeakMapped<VTable, MappedType> {
397 VWeakMapped { parent_weak: VRc::downgrade(&this.parent_strong), object: this.object }
398 }
399
400 pub fn as_pin_ref(&self) -> Pin<&MappedType> {
404 unsafe { Pin::new_unchecked(self) }
405 }
406
407 pub fn map<ReMappedType: ?Sized>(
414 this: Self,
415 map_fn: impl for<'r> FnOnce(Pin<&'r MappedType>) -> Pin<&'r ReMappedType>,
416 ) -> VRcMapped<VTable, ReMappedType> {
417 VRcMapped {
418 parent_strong: this.parent_strong.clone(),
419 object: map_fn(this.as_pin_ref()).get_ref(),
420 }
421 }
422
423 pub fn origin(this: &Self) -> VRc<VTable> {
426 this.parent_strong.clone()
427 }
428}
429
430impl<VTable: VTableMetaDropInPlace + 'static, MappedType: ?Sized> Deref
431 for VRcMapped<VTable, MappedType>
432{
433 type Target = MappedType;
434 fn deref(&self) -> &Self::Target {
435 unsafe { &*self.object }
438 }
439}
440
441pub struct VWeakMapped<VTable: VTableMetaDropInPlace + 'static, MappedType: ?Sized> {
445 parent_weak: VWeak<VTable, Dyn>,
446 object: *const MappedType,
447}
448
449impl<VTable: VTableMetaDropInPlace + 'static, MappedType: ?Sized> VWeakMapped<VTable, MappedType> {
450 pub fn upgrade(&self) -> Option<VRcMapped<VTable, MappedType>> {
453 self.parent_weak
454 .upgrade()
455 .map(|parent| VRcMapped { parent_strong: parent, object: self.object })
456 }
457}
458
459impl<VTable: VTableMetaDropInPlace + 'static, MappedType: ?Sized> Clone
460 for VWeakMapped<VTable, MappedType>
461{
462 fn clone(&self) -> Self {
463 Self { parent_weak: self.parent_weak.clone(), object: self.object }
464 }
465}
466
467impl<VTable: VTableMetaDropInPlace + 'static, MappedType> Default
468 for VWeakMapped<VTable, MappedType>
469{
470 fn default() -> Self {
471 Self { parent_weak: VWeak::default(), object: core::ptr::null() }
472 }
473}