1use core::mem::MaybeUninit;
8use core::ptr::{addr_of_mut, NonNull};
9use core::sync::atomic;
10
11pub(super) struct Arc<T> {
13 ptr: NonNull<ArcInner<T>>,
14}
15
16unsafe impl<T: Sync + Send> Send for Arc<T> {}
17unsafe impl<T: Sync + Send> Sync for Arc<T> {}
18
19struct ArcInner<T> {
20 counter: atomic::AtomicUsize,
21 data: T,
22}
23
24unsafe impl<T: Sync + Send> Send for ArcInner<T> {}
25unsafe impl<T: Sync + Send> Sync for ArcInner<T> {}
26
27impl<T> Arc<T> {
28 #[inline]
29 pub(super) fn get_mut(this: &mut Self) -> Option<&mut T> {
30 if this.is_unique() {
31 unsafe { Some(Self::get_mut_unchecked(this)) }
34 } else {
35 None
36 }
37 }
38
39 #[inline]
40 pub(super) unsafe fn get_mut_unchecked(this: &mut Self) -> &mut T {
41 &mut this.ptr.as_mut().data
42 }
43
44 #[inline]
45 fn inner(&self) -> &ArcInner<T> {
46 unsafe { self.ptr.as_ref() }
49 }
50
51 #[inline]
52 fn is_unique(&self) -> bool {
53 self.inner().counter.load(atomic::Ordering::Relaxed) == 1
54 }
55
56 #[inline]
57 pub(super) fn new(data: T) -> Self {
58 let inner = ArcInner { counter: atomic::AtomicUsize::new(1), data };
59
60 let ptr =
63 unsafe { NonNull::new_unchecked(Box::into_raw(Box::new(inner))) };
64
65 Self { ptr }
66 }
67
68 #[inline]
69 pub(super) fn ptr_eq(this: &Self, other: &Self) -> bool {
70 this.ptr == other.ptr
71 }
72}
73
74impl<T: Clone> Arc<T> {
75 #[inline]
76 pub(super) fn make_mut(this: &mut Self) -> &mut T {
77 if !this.is_unique() {
78 *this = this.optimized_clone();
79 }
80
81 unsafe { Self::get_mut_unchecked(this) }
83 }
84
85 #[inline]
86 fn optimized_clone(&self) -> Self {
87 let mut buffer: Box<MaybeUninit<ArcInner<T>>> =
90 Box::new(MaybeUninit::uninit());
91
92 let ptr = unsafe {
93 let ptr = buffer.as_mut_ptr();
94 addr_of_mut!((*ptr).data).write(T::clone(self));
97 (*ptr).counter = atomic::AtomicUsize::new(1);
98 NonNull::new_unchecked(Box::into_raw(buffer) as *mut ArcInner<T>)
99 };
100
101 Arc { ptr }
102 }
103}
104
105impl<T: core::fmt::Debug> core::fmt::Debug for Arc<T> {
106 #[inline]
107 fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result {
108 core::fmt::Debug::fmt(&**self, f)
109 }
110}
111
112impl<T: Default> Default for Arc<T> {
113 #[inline]
114 fn default() -> Self {
115 Self::new(T::default())
116 }
117}
118
119impl<T> Clone for Arc<T> {
120 #[inline]
121 fn clone(&self) -> Self {
122 let old = self.inner().counter.fetch_add(1, atomic::Ordering::Relaxed);
123
124 if unlikely(old > isize::MAX as usize) {
127 drop(Self { ptr: self.ptr });
128 panic!("Arc counter overflow");
129 }
130
131 Self { ptr: self.ptr }
132 }
133}
134
135impl<T> core::ops::Deref for Arc<T> {
136 type Target = T;
137
138 #[inline]
139 fn deref(&self) -> &Self::Target {
140 &self.inner().data
141 }
142}
143
144impl<T> Drop for Arc<T> {
145 #[inline]
146 fn drop(&mut self) {
147 let old = self.inner().counter.fetch_sub(1, atomic::Ordering::Release);
148
149 if old == 1 {
150 atomic::fence(atomic::Ordering::Acquire);
151
152 let _ = unsafe { Box::from_raw(self.ptr.as_ptr()) };
155 }
156 }
157}
158
159use predictions::*;
160
161mod predictions {
162 #[inline(always)]
165 pub(super) fn unlikely(b: bool) -> bool {
166 if b {
167 cold();
168 }
169 b
170 }
171
172 #[inline(always)]
173 #[cold]
174 fn cold() {}
175}