1use core::{
16 fmt::Debug,
17 hash::Hash,
18 marker::PhantomData,
19 mem::{ManuallyDrop, MaybeUninit},
20 ptr::NonNull,
21 sync::atomic::{AtomicPtr, AtomicUsize, Ordering},
22};
23
24use crate::{
25 unreachable_unchecked, vtable::HasDropVt, AnonymRef, AnonymRefMut, Dyn, IStable, IntoDyn,
26};
27
28use super::{
29 vec::{ptr_add, ptr_diff, Vec, VecInner},
30 AllocPtr, AllocSlice, DefaultAllocator, IAlloc,
31};
32
33#[crate::stabby]
35pub struct Arc<T, Alloc: IAlloc = super::DefaultAllocator> {
36 ptr: AllocPtr<T, Alloc>,
37}
38unsafe impl<T: Send + Sync, Alloc: IAlloc + Send + Sync> Send for Arc<T, Alloc> {}
40unsafe impl<T: Send + Sync, Alloc: IAlloc + Send + Sync> Sync for Arc<T, Alloc> {}
42const USIZE_TOP_BIT: usize = 1 << (core::mem::size_of::<usize>() as i32 * 8 - 1);
43
44#[cfg(not(stabby_default_alloc = "disabled"))]
45impl<T> Arc<T> {
46 pub unsafe fn make<
61 F: for<'a> FnOnce(&'a mut core::mem::MaybeUninit<T>) -> Result<&'a mut T, ()>,
62 >(
63 constructor: F,
64 ) -> Result<Self, Arc<MaybeUninit<T>>> {
65 unsafe { Self::make_in(constructor, super::DefaultAllocator::new()) }
67 }
68 pub fn new(value: T) -> Self {
73 Self::new_in(value, DefaultAllocator::new())
74 }
75}
76
77impl<T, Alloc: IAlloc> Arc<T, Alloc> {
78 #[allow(clippy::type_complexity)]
94 pub unsafe fn try_make_in<
95 F: for<'a> FnOnce(&'a mut core::mem::MaybeUninit<T>) -> Result<&'a mut T, ()>,
96 >(
97 constructor: F,
98 mut alloc: Alloc,
99 ) -> Result<Self, Result<Arc<MaybeUninit<T>, Alloc>, (F, Alloc)>> {
100 let mut ptr = match AllocPtr::alloc(&mut alloc) {
101 Some(mut ptr) => {
102 let prefix = unsafe { ptr.prefix_mut() };
104 prefix.alloc.write(alloc);
105 prefix.strong = AtomicUsize::new(1);
106 prefix.weak = AtomicUsize::new(1);
107 ptr
108 }
109 None => return Err(Err((constructor, alloc))),
110 };
111 constructor(unsafe { ptr.as_mut() }).map_or_else(
113 |()| Err(Ok(Arc { ptr })),
114 |_| {
115 Ok(Self {
116 ptr: unsafe { ptr.assume_init() },
118 })
119 },
120 )
121 }
122 pub fn try_new_in(value: T, alloc: Alloc) -> Result<Self, (T, Alloc)> {
126 let this = unsafe {
128 Self::try_make_in(
129 |slot: &mut core::mem::MaybeUninit<T>| {
130 Ok(slot.write(core::ptr::read(&value)))
132 },
133 alloc,
134 )
135 };
136 match this {
137 Ok(this) => {
138 core::mem::forget(value);
139 Ok(this)
140 }
141 Err(Err((_, a))) => Err((value, a)),
142 Err(Ok(_)) => unsafe { unreachable_unchecked!() },
144 }
145 }
146 pub unsafe fn make_in<
159 F: for<'a> FnOnce(&'a mut core::mem::MaybeUninit<T>) -> Result<&'a mut T, ()>,
160 >(
161 constructor: F,
162 alloc: Alloc,
163 ) -> Result<Self, Arc<MaybeUninit<T>, Alloc>> {
164 Self::try_make_in(constructor, alloc).map_err(|e| match e {
165 Ok(uninit) => uninit,
166 Err(_) => panic!("Allocation failed"),
167 })
168 }
169 pub fn new_in(value: T, alloc: Alloc) -> Self {
174 let this = unsafe { Self::make_in(move |slot| Ok(slot.write(value)), alloc) };
176 unsafe { this.unwrap_unchecked() }
178 }
179
180 pub const fn into_raw(this: Self) -> AllocPtr<T, Alloc> {
184 let inner = this.ptr;
185 core::mem::forget(this);
186 inner
187 }
188 pub const unsafe fn from_raw(this: AllocPtr<T, Alloc>) -> Self {
192 Self { ptr: this }
193 }
194
195 pub fn get_mut(this: &mut Self) -> Option<&mut T> {
197 if Self::is_unique(this) {
198 Some(unsafe { Self::get_mut_unchecked(this) })
199 } else {
200 None
201 }
202 }
203
204 #[rustversion::attr(since(1.86), const)]
209 pub unsafe fn get_mut_unchecked(this: &mut Self) -> &mut T {
210 unsafe { this.ptr.ptr.as_mut() }
211 }
212
213 pub fn strong_count(this: &Self) -> usize {
215 unsafe { this.ptr.prefix() }.strong.load(Ordering::Relaxed)
216 }
217 pub unsafe fn increment_strong_count(this: *const T) -> usize {
221 let ptr: AllocPtr<T, Alloc> = AllocPtr {
222 ptr: NonNull::new_unchecked(this.cast_mut()),
223 marker: core::marker::PhantomData,
224 };
225 unsafe { ptr.prefix() }
226 .strong
227 .fetch_add(1, Ordering::Relaxed)
228 }
229 pub fn weak_count(this: &Self) -> usize {
231 unsafe { this.ptr.prefix() }.weak.load(Ordering::Relaxed)
232 }
233 pub fn increment_weak_count(this: &Self) -> usize {
235 unsafe { this.ptr.prefix() }
236 .weak
237 .fetch_add(1, Ordering::Relaxed)
238 }
239
240 pub fn make_mut(&mut self) -> &mut T
242 where
243 T: Clone,
244 Alloc: Clone,
245 {
246 if !Self::is_unique(self) {
247 *self = Self::new_in(
248 T::clone(self),
249 unsafe { self.ptr.prefix().alloc.assume_init_ref() }.clone(),
250 );
251 }
252 unsafe { Self::get_mut_unchecked(self) }
253 }
254
255 pub fn make_mut_and_get_alloc(&mut self) -> (&mut T, &Alloc)
257 where
258 T: Clone,
259 Alloc: Clone,
260 {
261 if !Self::is_unique(self) {
262 *self = Self::new_in(
263 T::clone(self),
264 unsafe { self.ptr.prefix().alloc.assume_init_ref() }.clone(),
265 );
266 }
267 let (prefix, inner) = unsafe { self.ptr.split_mut() };
268 (inner, unsafe { prefix.alloc.assume_init_ref() })
269 }
270
271 pub fn is_unique(this: &Self) -> bool {
273 Self::strong_count(this) == 1 && Self::weak_count(this) == 1
274 }
275 pub fn try_into_inner(this: Self) -> Result<T, Self> {
279 if !Self::is_unique(&this) {
280 Err(this)
281 } else {
282 let ret = unsafe { core::ptr::read(&*this) };
283 _ = unsafe { Weak::<T, Alloc>::from_raw(Arc::into_raw(this)) };
284 Ok(ret)
285 }
286 }
287
288 pub fn downgrade(this: &Self) -> Weak<T, Alloc> {
290 this.into()
291 }
292 #[rustversion::since(1.73)]
293 pub const fn allocator(this: &Self) -> &Alloc {
295 unsafe { this.ptr.prefix().alloc.assume_init_ref() }
296 }
297 #[rustversion::before(1.73)]
298 pub fn allocator(this: &Self) -> &Alloc {
300 unsafe { this.ptr.prefix().alloc.assume_init_ref() }
301 }
302}
303impl<T, Alloc: IAlloc> Drop for Arc<T, Alloc> {
304 fn drop(&mut self) {
305 if unsafe { self.ptr.prefix() }
306 .strong
307 .fetch_sub(1, Ordering::Relaxed)
308 != 1
309 {
310 return;
311 }
312 unsafe {
313 core::ptr::drop_in_place(self.ptr.as_mut());
314 _ = Weak::<T, Alloc>::from_raw(self.ptr);
315 }
316 }
317}
318impl<T, Alloc: IAlloc> Clone for Arc<T, Alloc> {
319 fn clone(&self) -> Self {
320 unsafe { self.ptr.prefix() }
321 .strong
322 .fetch_add(1, Ordering::Relaxed);
323 Self { ptr: self.ptr }
324 }
325}
326impl<T, Alloc: IAlloc> core::ops::Deref for Arc<T, Alloc> {
327 type Target = T;
328 fn deref(&self) -> &Self::Target {
329 unsafe { self.ptr.as_ref() }
330 }
331}
332
333#[crate::stabby]
335pub struct Weak<T, Alloc: IAlloc = super::DefaultAllocator> {
336 ptr: AllocPtr<T, Alloc>,
337}
338unsafe impl<T: Send + Sync, Alloc: IAlloc + Send + Sync> Send for Weak<T, Alloc> {}
340unsafe impl<T: Send + Sync, Alloc: IAlloc + Send + Sync> Sync for Weak<T, Alloc> {}
342impl<T, Alloc: IAlloc> From<&Arc<T, Alloc>> for Arc<T, Alloc> {
343 fn from(value: &Arc<T, Alloc>) -> Self {
344 value.clone()
345 }
346}
347impl<T, Alloc: IAlloc> From<&Weak<T, Alloc>> for Weak<T, Alloc> {
348 fn from(value: &Weak<T, Alloc>) -> Self {
349 value.clone()
350 }
351}
352impl<T, Alloc: IAlloc> From<&Arc<T, Alloc>> for Weak<T, Alloc> {
353 fn from(value: &Arc<T, Alloc>) -> Self {
354 unsafe { value.ptr.prefix() }
355 .weak
356 .fetch_add(1, Ordering::Relaxed);
357 Self { ptr: value.ptr }
358 }
359}
360impl<T, Alloc: IAlloc> Weak<T, Alloc> {
361 pub const fn into_raw(this: Self) -> AllocPtr<T, Alloc> {
365 let inner = this.ptr;
366 core::mem::forget(this);
367 inner
368 }
369 pub const unsafe fn from_raw(this: AllocPtr<T, Alloc>) -> Self {
373 Self { ptr: this }
374 }
375 pub fn upgrade(&self) -> Option<Arc<T, Alloc>> {
377 let strong = &unsafe { self.ptr.prefix() }.strong;
378 let count = strong.fetch_or(USIZE_TOP_BIT, Ordering::Acquire);
379 match count {
380 0 | USIZE_TOP_BIT => {
381 strong.store(0, Ordering::Release);
382 None
383 }
384 _ => {
385 strong.fetch_add(1, Ordering::Release);
386 strong.fetch_and(!USIZE_TOP_BIT, Ordering::Release);
387 Some(Arc { ptr: self.ptr })
388 }
389 }
390 }
391}
392impl<T, Alloc: IAlloc> Clone for Weak<T, Alloc> {
393 fn clone(&self) -> Self {
394 unsafe { self.ptr.prefix() }
395 .weak
396 .fetch_add(1, Ordering::Relaxed);
397 Self { ptr: self.ptr }
398 }
399}
400impl<T, Alloc: IAlloc> Drop for Weak<T, Alloc> {
401 fn drop(&mut self) {
402 if unsafe { self.ptr.prefix() }
403 .weak
404 .fetch_sub(1, Ordering::Relaxed)
405 != 1
406 {
407 return;
408 }
409 unsafe {
410 let mut alloc = self.ptr.prefix().alloc.assume_init_read();
411 self.ptr.free(&mut alloc)
412 }
413 }
414}
415
416#[crate::stabby]
420pub struct ArcSlice<T, Alloc: IAlloc = super::DefaultAllocator> {
421 pub(crate) inner: AllocSlice<T, Alloc>,
422}
423unsafe impl<T: Send + Sync, Alloc: IAlloc + Send + Sync> Send for ArcSlice<T, Alloc> {}
425unsafe impl<T: Send + Sync, Alloc: IAlloc + Send + Sync> Sync for ArcSlice<T, Alloc> {}
427unsafe impl<T: Send + Sync, Alloc: IAlloc + Send + Sync> Send for WeakSlice<T, Alloc> {}
429unsafe impl<T: Send + Sync, Alloc: IAlloc + Send + Sync> Sync for WeakSlice<T, Alloc> {}
431
432impl<T, Alloc: IAlloc> ArcSlice<T, Alloc> {
433 pub const fn len(&self) -> usize {
435 ptr_diff(self.inner.end, self.inner.start.ptr)
436 }
437 pub const fn is_empty(&self) -> bool {
439 self.len() == 0
440 }
441 #[rustversion::attr(since(1.86), const)]
443 pub fn as_slice(&self) -> &[T] {
444 let start = self.inner.start;
445 unsafe { core::slice::from_raw_parts(start.ptr.as_ptr(), self.len()) }
446 }
447 pub fn as_slice_mut(&mut self) -> Option<&mut [T]> {
449 (ArcSlice::strong_count(self) == 1 && ArcSlice::weak_count(self) == 1)
450 .then(|| unsafe { self.as_slice_mut_unchecked() })
451 }
452 #[rustversion::attr(since(1.86), const)]
456 pub unsafe fn as_slice_mut_unchecked(&mut self) -> &mut [T] {
457 let start = self.inner.start;
458 unsafe { core::slice::from_raw_parts_mut(start.ptr.as_ptr(), self.len()) }
459 }
460 pub fn strong_count(this: &Self) -> usize {
462 unsafe { this.inner.start.prefix().strong.load(Ordering::Relaxed) }
463 }
464 pub fn weak_count(this: &Self) -> usize {
466 unsafe { this.inner.start.prefix().weak.load(Ordering::Relaxed) }
467 }
468 pub fn is_unique(this: &Self) -> bool {
470 Self::strong_count(this) == 1 && Self::weak_count(this) == 1
471 }
472 pub const fn into_raw(this: Self) -> AllocSlice<T, Alloc> {
476 let inner = this.inner;
477 core::mem::forget(this);
478 inner
479 }
480 pub const unsafe fn from_raw(this: AllocSlice<T, Alloc>) -> Self {
485 Self { inner: this }
486 }
487}
488impl<T, Alloc: IAlloc> core::ops::Deref for ArcSlice<T, Alloc> {
489 type Target = [T];
490 fn deref(&self) -> &Self::Target {
491 self.as_slice()
492 }
493}
494impl<T, Alloc: IAlloc> Clone for ArcSlice<T, Alloc> {
495 fn clone(&self) -> Self {
496 unsafe { self.inner.start.prefix() }
497 .strong
498 .fetch_add(1, Ordering::Relaxed);
499 Self { inner: self.inner }
500 }
501}
502impl<T, Alloc: IAlloc> From<Arc<T, Alloc>> for ArcSlice<T, Alloc> {
503 fn from(mut value: Arc<T, Alloc>) -> Self {
504 unsafe { value.ptr.prefix_mut() }.capacity = AtomicUsize::new(1);
505 Self {
506 inner: AllocSlice {
507 start: value.ptr,
508 end: ptr_add(value.ptr.ptr, 1),
509 },
510 }
511 }
512}
513impl<T: Copy, Alloc: IAlloc + Default> From<&[T]> for ArcSlice<T, Alloc> {
514 fn from(value: &[T]) -> Self {
515 Vec::from(value).into()
516 }
517}
518impl<T, Alloc: IAlloc> From<Vec<T, Alloc>> for ArcSlice<T, Alloc> {
519 fn from(value: Vec<T, Alloc>) -> Self {
520 let (mut slice, capacity, mut alloc) = value.into_raw_components();
521 if capacity != 0 {
522 unsafe {
523 slice.start.prefix_mut().strong = AtomicUsize::new(1);
524 slice.start.prefix_mut().weak = AtomicUsize::new(1);
525 slice.start.prefix_mut().capacity = AtomicUsize::new(capacity);
526 slice.start.prefix_mut().alloc.write(alloc);
527 }
528 Self {
529 inner: AllocSlice {
530 start: slice.start,
531 end: slice.end,
532 },
533 }
534 } else {
535 let mut start = AllocPtr::alloc_array(&mut alloc, 0).expect("Allocation failed");
536 unsafe {
537 start.prefix_mut().strong = AtomicUsize::new(1);
538 start.prefix_mut().weak = AtomicUsize::new(1);
539 start.prefix_mut().capacity = if core::mem::size_of::<T>() != 0 {
540 AtomicUsize::new(0)
541 } else {
542 AtomicUsize::new(ptr_diff(
543 core::mem::transmute::<usize, NonNull<u8>>(usize::MAX),
544 start.ptr.cast::<u8>(),
545 ))
546 };
547 slice.start.prefix_mut().alloc.write(alloc);
548 }
549 Self {
550 inner: AllocSlice {
551 start,
552 end: ptr_add(start.ptr.cast::<u8>(), slice.len()).cast(),
553 },
554 }
555 }
556 }
557}
558impl<T, Alloc: IAlloc> TryFrom<ArcSlice<T, Alloc>> for Vec<T, Alloc> {
559 type Error = ArcSlice<T, Alloc>;
560 fn try_from(value: ArcSlice<T, Alloc>) -> Result<Self, Self::Error> {
561 if core::mem::size_of::<T>() == 0 || !ArcSlice::is_unique(&value) {
562 Err(value)
563 } else {
564 unsafe {
565 let ret = Vec {
566 inner: VecInner {
567 start: value.inner.start,
568 end: value.inner.end,
569 capacity: ptr_add(
570 value.inner.start.ptr,
571 value.inner.start.prefix().capacity.load(Ordering::Relaxed),
572 ),
573 alloc: value.inner.start.prefix().alloc.assume_init_read(),
574 },
575 };
576 core::mem::forget(value);
577 Ok(ret)
578 }
579 }
580 }
581}
582impl<T: Eq, Alloc: IAlloc> Eq for ArcSlice<T, Alloc> {}
583impl<T: PartialEq, Alloc: IAlloc> PartialEq for ArcSlice<T, Alloc> {
584 fn eq(&self, other: &Self) -> bool {
585 self.as_slice() == other.as_slice()
586 }
587}
588impl<T: Ord, Alloc: IAlloc> Ord for ArcSlice<T, Alloc> {
589 fn cmp(&self, other: &Self) -> core::cmp::Ordering {
590 self.as_slice().cmp(other.as_slice())
591 }
592}
593impl<T: PartialOrd, Alloc: IAlloc> PartialOrd for ArcSlice<T, Alloc> {
594 fn partial_cmp(&self, other: &Self) -> Option<core::cmp::Ordering> {
595 self.as_slice().partial_cmp(other.as_slice())
596 }
597}
598impl<T: Hash, Alloc: IAlloc> Hash for ArcSlice<T, Alloc> {
599 fn hash<H: core::hash::Hasher>(&self, state: &mut H) {
600 self.as_slice().hash(state)
601 }
602}
603impl<T, Alloc: IAlloc> Drop for ArcSlice<T, Alloc> {
604 fn drop(&mut self) {
605 if unsafe { self.inner.start.prefix() }
606 .strong
607 .fetch_sub(1, Ordering::Relaxed)
608 != 1
609 {
610 return;
611 }
612 unsafe { core::ptr::drop_in_place(self.as_slice_mut_unchecked()) }
613 _ = WeakSlice { inner: self.inner };
614 }
615}
616impl<T: Debug, Alloc: IAlloc> Debug for ArcSlice<T, Alloc> {
617 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
618 self.as_slice().fmt(f)
619 }
620}
621impl<T: core::fmt::LowerHex, Alloc: IAlloc> core::fmt::LowerHex for ArcSlice<T, Alloc> {
622 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
623 let mut first = true;
624 for item in self {
625 if !first {
626 f.write_str(":")?;
627 }
628 first = false;
629 core::fmt::LowerHex::fmt(item, f)?;
630 }
631 Ok(())
632 }
633}
634impl<T: core::fmt::UpperHex, Alloc: IAlloc> core::fmt::UpperHex for ArcSlice<T, Alloc> {
635 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
636 let mut first = true;
637 for item in self {
638 if !first {
639 f.write_str(":")?;
640 }
641 first = false;
642 core::fmt::UpperHex::fmt(item, f)?;
643 }
644 Ok(())
645 }
646}
647impl<'a, T, Alloc: IAlloc> IntoIterator for &'a ArcSlice<T, Alloc> {
648 type Item = &'a T;
649 type IntoIter = core::slice::Iter<'a, T>;
650 fn into_iter(self) -> Self::IntoIter {
651 self.as_slice().iter()
652 }
653}
654
655impl<T, Alloc: IAlloc + Default> FromIterator<T> for ArcSlice<T, Alloc> {
656 fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> Self {
657 Vec::from_iter(iter).into()
658 }
659}
660
661#[crate::stabby]
663pub struct WeakSlice<T, Alloc: IAlloc = super::DefaultAllocator> {
664 pub(crate) inner: AllocSlice<T, Alloc>,
665}
666
667impl<T, Alloc: IAlloc> WeakSlice<T, Alloc> {
668 pub fn upgrade(&self) -> Option<ArcSlice<T, Alloc>> {
670 let strong = &unsafe { self.inner.start.prefix() }.strong;
671 let count = strong.fetch_or(USIZE_TOP_BIT, Ordering::Acquire);
672 match count {
673 0 | USIZE_TOP_BIT => {
674 strong.store(0, Ordering::Release);
675 None
676 }
677 _ => {
678 strong.fetch_add(1, Ordering::Release);
679 strong.fetch_and(!USIZE_TOP_BIT, Ordering::Release);
680 Some(ArcSlice { inner: self.inner })
681 }
682 }
683 }
684 pub fn force_upgrade(&self) -> ArcSlice<T, Alloc>
689 where
690 T: Copy,
691 {
692 let strong = &unsafe { self.inner.start.prefix() }.strong;
693 match strong.fetch_add(1, Ordering::Release) {
694 0 | USIZE_TOP_BIT => {
695 unsafe { self.inner.start.prefix() }
696 .weak
697 .fetch_add(1, Ordering::Relaxed);
698 }
699 _ => {}
700 }
701 ArcSlice { inner: self.inner }
702 }
703}
704impl<T, Alloc: IAlloc> Clone for WeakSlice<T, Alloc> {
705 fn clone(&self) -> Self {
706 unsafe { self.inner.start.prefix() }
707 .weak
708 .fetch_add(1, Ordering::Relaxed);
709 Self { inner: self.inner }
710 }
711}
712impl<T, Alloc: IAlloc> From<&ArcSlice<T, Alloc>> for ArcSlice<T, Alloc> {
713 fn from(value: &ArcSlice<T, Alloc>) -> Self {
714 value.clone()
715 }
716}
717impl<T, Alloc: IAlloc> From<&WeakSlice<T, Alloc>> for WeakSlice<T, Alloc> {
718 fn from(value: &WeakSlice<T, Alloc>) -> Self {
719 value.clone()
720 }
721}
722impl<T, Alloc: IAlloc> From<&ArcSlice<T, Alloc>> for WeakSlice<T, Alloc> {
723 fn from(value: &ArcSlice<T, Alloc>) -> Self {
724 unsafe { value.inner.start.prefix() }
725 .weak
726 .fetch_add(1, Ordering::Relaxed);
727 Self { inner: value.inner }
728 }
729}
730impl<T, Alloc: IAlloc> Drop for WeakSlice<T, Alloc> {
731 fn drop(&mut self) {
732 if unsafe { self.inner.start.prefix() }
733 .weak
734 .fetch_sub(1, Ordering::Relaxed)
735 != 1
736 {
737 return;
738 }
739 let mut alloc = unsafe { self.inner.start.prefix().alloc.assume_init_read() };
740 unsafe { self.inner.start.free(&mut alloc) }
741 }
742}
743pub use super::string::{ArcStr, WeakStr};
744
745impl<T, Alloc: IAlloc> crate::IPtr for Arc<T, Alloc> {
746 unsafe fn as_ref(&self) -> AnonymRef<'_> {
747 AnonymRef {
748 ptr: self.ptr.ptr.cast(),
749 _marker: PhantomData,
750 }
751 }
752}
753impl<T, Alloc: IAlloc> crate::IPtrClone for Arc<T, Alloc> {
754 fn clone(this: &Self) -> Self {
755 this.clone()
756 }
757}
758
759impl<T, Alloc: IAlloc> crate::IPtrTryAsMut for Arc<T, Alloc> {
760 unsafe fn try_as_mut(&mut self) -> Option<AnonymRefMut<'_>> {
761 Self::is_unique(self).then(|| AnonymRefMut {
762 ptr: self.ptr.ptr.cast(),
763 _marker: PhantomData,
764 })
765 }
766}
767impl<T, Alloc: IAlloc> crate::IPtrOwned for Arc<T, Alloc> {
768 fn drop(
769 this: &mut core::mem::ManuallyDrop<Self>,
770 drop: unsafe extern "C" fn(AnonymRefMut<'_>),
771 ) {
772 if unsafe { this.ptr.prefix() }
773 .strong
774 .fetch_sub(1, Ordering::Relaxed)
775 != 1
776 {
777 return;
778 }
779 unsafe {
780 drop(AnonymRefMut {
781 ptr: this.ptr.ptr.cast(),
782 _marker: PhantomData,
783 });
784 _ = Weak::<T, Alloc>::from_raw(this.ptr);
785 }
786 }
787}
788
789impl<T, Alloc: IAlloc> IntoDyn for Arc<T, Alloc> {
790 type Anonymized = Arc<(), Alloc>;
791 type Target = T;
792 fn anonimize(self) -> Self::Anonymized {
793 let original_prefix = self.ptr.prefix_ptr();
794 let anonymized = unsafe { core::mem::transmute::<Self, Self::Anonymized>(self) };
795 let anonymized_prefix = anonymized.ptr.prefix_ptr();
796 assert_eq!(anonymized_prefix, original_prefix, "The allocation prefix was lost in anonimization, this is definitely a bug, please report it.");
797 anonymized
798 }
799}
800
801impl<T, Alloc: IAlloc> crate::IPtrOwned for Weak<T, Alloc> {
802 fn drop(
803 this: &mut core::mem::ManuallyDrop<Self>,
804 _drop: unsafe extern "C" fn(AnonymRefMut<'_>),
805 ) {
806 if unsafe { this.ptr.prefix() }
807 .weak
808 .fetch_sub(1, Ordering::Relaxed)
809 != 1
810 {
811 return;
812 }
813 unsafe {
814 _ = Weak::<T, Alloc>::from_raw(this.ptr);
815 }
816 }
817}
818
819impl<T, Alloc: IAlloc> crate::IPtrClone for Weak<T, Alloc> {
820 fn clone(this: &Self) -> Self {
821 this.clone()
822 }
823}
824
825impl<T, Alloc: IAlloc> IntoDyn for Weak<T, Alloc> {
826 type Anonymized = Weak<(), Alloc>;
827 type Target = T;
828 fn anonimize(self) -> Self::Anonymized {
829 let original_prefix = self.ptr.prefix_ptr();
830 let anonymized = unsafe { core::mem::transmute::<Self, Self::Anonymized>(self) };
831 let anonymized_prefix = anonymized.ptr.prefix_ptr();
832 assert_eq!(anonymized_prefix, original_prefix, "The allocation prefix was lost in anonimization, this is definitely a bug, please report it.");
833 anonymized
834 }
835}
836
837impl<'a, Vt: HasDropVt, Alloc: IAlloc> From<&'a Dyn<'a, Arc<(), Alloc>, Vt>>
838 for Dyn<'a, Weak<(), Alloc>, Vt>
839{
840 fn from(value: &'a Dyn<'a, Arc<(), Alloc>, Vt>) -> Self {
841 Self {
842 ptr: ManuallyDrop::new(Arc::downgrade(&value.ptr)),
843 vtable: value.vtable,
844 unsend: core::marker::PhantomData,
845 }
846 }
847}
848impl<'a, Vt: HasDropVt + IStable, Alloc: IAlloc> Dyn<'a, Weak<(), Alloc>, Vt> {
849 pub fn upgrade(self) -> crate::option::Option<Dyn<'a, Arc<(), Alloc>, Vt>> {
851 let Some(ptr) = self.ptr.upgrade() else {
852 return crate::option::Option::None();
853 };
854 crate::option::Option::Some(Dyn {
855 ptr: ManuallyDrop::new(ptr),
856 vtable: self.vtable,
857 unsend: core::marker::PhantomData,
858 })
859 }
860}
861
862#[crate::stabby]
863pub struct AtomicArc<T, Alloc: IAlloc> {
865 ptr: AtomicPtr<T>,
866 alloc: core::marker::PhantomData<*const Alloc>,
867}
868unsafe impl<T: Send + Sync, Alloc: IAlloc + Send + Sync> Send for AtomicArc<T, Alloc> {}
870unsafe impl<T: Send + Sync, Alloc: IAlloc + Send + Sync> Sync for AtomicArc<T, Alloc> {}
872
873impl<T, Alloc: IAlloc> Drop for AtomicArc<T, Alloc> {
874 fn drop(&mut self) {
875 let ptr = self.ptr.load(Ordering::Relaxed);
876 if let Some(ptr) = NonNull::new(ptr) {
877 unsafe {
878 Arc::<T, Alloc>::from_raw(AllocPtr {
879 ptr,
880 marker: PhantomData,
881 })
882 };
883 }
884 }
885}
886
887type MaybeArc<T, Alloc> = Option<Arc<T, Alloc>>;
888impl<T, Alloc: IAlloc> AtomicArc<T, Alloc> {
889 pub const fn new(value: MaybeArc<T, Alloc>) -> Self {
891 Self {
892 ptr: AtomicPtr::new(unsafe {
893 core::mem::transmute::<Option<Arc<T, Alloc>>, *mut T>(value)
894 }),
895 alloc: PhantomData,
896 }
897 }
898 pub fn load(&self, order: Ordering) -> MaybeArc<T, Alloc> {
900 let ptr = NonNull::new(self.ptr.load(order))?;
901 unsafe {
902 Arc::<T, Alloc>::increment_strong_count(ptr.as_ptr());
903 Some(Arc::from_raw(AllocPtr {
904 ptr,
905 marker: PhantomData,
906 }))
907 }
908 }
909 pub fn store(&self, value: MaybeArc<T, Alloc>, order: Ordering) {
911 let ptr = value.map_or(core::ptr::null_mut(), |value| Arc::into_raw(value).as_ptr());
912 self.ptr.store(ptr, order)
913 }
914 pub fn is(
918 &self,
919 current: Option<&Arc<T, Alloc>>,
920 order: Ordering,
921 ) -> Result<(), MaybeArc<T, Alloc>> {
922 let ptr = NonNull::new(self.ptr.load(order));
923 match (ptr, current) {
924 (None, None) => Ok(()),
925 (None, _) => Err(None),
926 (Some(ptr), Some(current)) if core::ptr::eq(ptr.as_ptr(), current.ptr.as_ptr()) => {
927 Ok(())
928 }
929 (Some(ptr), _) => unsafe {
930 Arc::<T, Alloc>::increment_strong_count(ptr.as_ptr());
931 Err(Some(Arc::from_raw(AllocPtr {
932 ptr,
933 marker: PhantomData,
934 })))
935 },
936 }
937 }
938 pub fn compare_exchange(
942 &self,
943 current: Option<&Arc<T, Alloc>>,
944 new: MaybeArc<T, Alloc>,
945 success: Ordering,
946 failure: Ordering,
947 ) -> Result<MaybeArc<T, Alloc>, MaybeArc<T, Alloc>> {
948 let current = current.map_or(core::ptr::null_mut(), |value| value.ptr.ptr.as_ptr());
949 let new = new.map_or(core::ptr::null_mut(), |value| Arc::into_raw(value).as_ptr());
950 match self.ptr.compare_exchange(current, new, success, failure) {
951 Ok(ptr) => Ok(NonNull::new(ptr).map(|ptr| unsafe {
952 Arc::from_raw(AllocPtr {
953 ptr,
954 marker: PhantomData,
955 })
956 })),
957 Err(ptr) => Err(NonNull::new(ptr).map(|ptr| unsafe {
958 Arc::<T, Alloc>::increment_strong_count(ptr.as_ptr());
959 Arc::from_raw(AllocPtr {
960 ptr,
961 marker: PhantomData,
962 })
963 })),
964 }
965 }
966}
967
968#[cfg(feature = "serde")]
969mod serde_impl {
970 use super::*;
971 use crate::alloc::IAlloc;
972 use serde::{Deserialize, Serialize};
973 impl<T: Serialize, Alloc: IAlloc> Serialize for ArcSlice<T, Alloc> {
974 fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
975 where
976 S: serde::Serializer,
977 {
978 let slice: &[T] = self;
979 slice.serialize(serializer)
980 }
981 }
982 impl<'a, T: Deserialize<'a>, Alloc: IAlloc + Default> Deserialize<'a> for ArcSlice<T, Alloc> {
983 fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
984 where
985 D: serde::Deserializer<'a>,
986 {
987 crate::alloc::vec::Vec::deserialize(deserializer).map(Into::into)
988 }
989 }
990 impl<Alloc: IAlloc> Serialize for ArcStr<Alloc> {
991 fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
992 where
993 S: serde::Serializer,
994 {
995 let slice: &str = self;
996 slice.serialize(serializer)
997 }
998 }
999 impl<'a, Alloc: IAlloc + Default> Deserialize<'a> for ArcStr<Alloc> {
1000 fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
1001 where
1002 D: serde::Deserializer<'a>,
1003 {
1004 crate::alloc::string::String::deserialize(deserializer).map(Into::into)
1005 }
1006 }
1007}