1use std::fmt::{Debug, Display};
2use std::ops::{Deref, DerefMut};
3use std::ptr::{drop_in_place, null_mut, NonNull};
4use std::sync::atomic::{AtomicUsize, Ordering};
5
6pub struct ArcIllegal<T> {
43 inner: NonNull<ArcIllegalInner<T>>,
44}
45
46unsafe impl<T> Send for ArcIllegal<T> {}
47unsafe impl<T> Sync for ArcIllegal<T> {}
48
49impl<T: Debug> Debug for ArcIllegal<T> {
50 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::result::Result<(), std::fmt::Error> {
51 T::fmt(&*self, f)
52 }
53}
54
55impl<T: Display> Display for ArcIllegal<T> {
56 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::result::Result<(), std::fmt::Error> {
57 T::fmt(&*self, f)
58 }
59}
60
61struct ArcIllegalInner<T: ?Sized> {
62 refs: AtomicUsize,
63 weak_refs: AtomicUsize,
64 value: *mut T,
65}
66
67#[inline]
69pub fn arc<T>(inner: T) -> ArcIllegal<T> {
70 ArcIllegal::new(inner)
71}
72
73impl<T> ArcIllegal<T> {
74 #[inline]
76 pub fn new(inner: T) -> ArcIllegal<T> {
77 let inner_arc = Box::new(ArcIllegalInner {
78 refs: AtomicUsize::new(1),
79 weak_refs: AtomicUsize::new(0),
80 value: Box::leak(Box::new(inner)),
81 });
82
83 unsafe {
84 ArcIllegal {
85 inner: NonNull::new_unchecked(Box::leak(inner_arc)),
86 }
87 }
88 }
89
90 pub fn as_ptr(&self) -> *mut T {
92 unsafe { (*self.inner.as_ptr()).value }
93 }
94
95 pub fn ref_count(&self) -> usize {
97 self.strong_ref_count() + self.weak_ref_count()
98 }
99
100 pub fn weak_ref_count(&self) -> usize {
102 unsafe {
103 let inner = &*self.inner.as_ptr();
104 inner.weak_refs.load(Ordering::SeqCst)
105 }
106 }
107
108 pub fn strong_ref_count(&self) -> usize {
110 unsafe {
111 let inner = &*self.inner.as_ptr();
112 inner.refs.load(Ordering::SeqCst)
113 }
114 }
115
116 pub fn dup(&self) -> Self {
119 ArcIllegal::clone(&self)
120 }
121
122 pub fn weak(&self) -> WeakIllegal<T> {
124 unsafe {
125 (&mut *self.inner.as_ptr())
126 .weak_refs
127 .fetch_add(1, Ordering::SeqCst);
128
129 WeakIllegal {
130 inner: NonNull::new_unchecked(self.inner.as_ptr()),
131 }
132 }
133 }
134
135 pub fn dismantle(self) -> Result<T, Self> {
137 unsafe {
138 if 1 != self.ref_count() {
139 return Err(self);
140 }
141 (&mut *self.inner.as_ptr())
142 .refs
143 .fetch_sub(1, Ordering::SeqCst);
144
145 let inner = self.inner.as_ptr();
146 let obj = (*inner).value.read();
147 drop_in_place(inner);
148
149 Ok(obj)
150 }
151 }
152
153 pub fn dismantle_with_weak(self) -> Result<T, Self> {
154 unsafe {
155 if 1 != self.strong_ref_count() {
156 return Err(self);
157 }
158
159 (&mut *self.inner.as_ptr())
160 .refs
161 .fetch_sub(1, Ordering::SeqCst);
162
163 let obj = (*self.inner.as_ptr()).value.read();
164 (*self.inner.as_ptr()).value = null_mut();
165 Ok(obj)
166 }
167 }
168}
169
170impl<T> Drop for ArcIllegal<T> {
171 fn drop(&mut self) {
172 unsafe {
173 let prev = (&mut *self.inner.as_ptr())
174 .refs
175 .fetch_sub(1, Ordering::SeqCst);
176 if 1 != prev {
177 return;
178 }
179
180 if (&mut *self.inner.as_ptr()).refs.load(Ordering::SeqCst) == 0 {
181 drop_in_place((*self.inner.as_ptr()).value);
182 (*self.inner.as_ptr()).value = null_mut();
183 }
184
185 if (&mut *self.inner.as_ptr()).weak_refs.load(Ordering::SeqCst) == 0 {
186 drop_in_place(self.inner.as_ptr());
187 }
188 }
189 }
190}
191
192impl<T> Clone for ArcIllegal<T> {
193 fn clone(&self) -> Self {
194 unsafe {
195 (&mut *self.inner.as_ptr())
196 .refs
197 .fetch_add(1, Ordering::SeqCst);
198
199 ArcIllegal {
200 inner: NonNull::new_unchecked(self.inner.as_ptr()),
201 }
202 }
203 }
204}
205
206impl<T> Deref for ArcIllegal<T> {
207 type Target = T;
208
209 fn deref(&self) -> &T {
210 unsafe { &*(*self.inner.as_ptr()).value }
211 }
212}
213
214impl<T> DerefMut for ArcIllegal<T> {
215 fn deref_mut(&mut self) -> &mut T {
216 unsafe { &mut *(*self.inner.as_ptr()).value }
217 }
218}
219
220pub struct WeakIllegal<T> {
222 inner: NonNull<ArcIllegalInner<T>>,
223}
224
225unsafe impl<T> Sync for WeakIllegal<T> {}
226unsafe impl<T> Send for WeakIllegal<T> {}
227
228impl<T> WeakIllegal<T> {
229 pub fn get(&self) -> Option<&T> {
231 if unsafe { self.inner.as_ref() }.refs.load(Ordering::SeqCst) == 0 {
232 None
233 } else {
234 Some(unsafe { &*self.inner.as_ref().value })
235 }
236 }
237
238 pub fn get_mut(&self) -> Option<&mut T> {
240 if unsafe { self.inner.as_ref() }.refs.load(Ordering::SeqCst) == 0 {
241 None
242 } else {
243 Some(unsafe { &mut *self.inner.as_ref().value })
244 }
245 }
246
247 pub fn strong(&self) -> Option<ArcIllegal<T>> {
249 if unsafe { self.inner.as_ref() }.refs.load(Ordering::SeqCst) == 0 {
250 None
251 } else {
252 unsafe { self.inner.as_ref() }
253 .refs
254 .fetch_add(1, Ordering::SeqCst);
255
256 Some(ArcIllegal {
257 inner: unsafe { NonNull::new_unchecked(self.inner.as_ptr()) },
258 })
259 }
260 }
261
262 pub fn upgrade(&self) -> Option<ArcIllegal<T>> {
264 self.strong()
265 }
266
267 pub fn ref_count(&self) -> usize {
269 self.strong_ref_count() + self.weak_ref_count()
270 }
271
272 pub fn weak_ref_count(&self) -> usize {
274 unsafe {
275 let inner = &*self.inner.as_ptr();
276 inner.weak_refs.load(Ordering::SeqCst)
277 }
278 }
279
280 pub fn strong_ref_count(&self) -> usize {
282 unsafe {
283 let inner = &*self.inner.as_ptr();
284 inner.refs.load(Ordering::SeqCst)
285 }
286 }
287}
288
289impl<T> Clone for WeakIllegal<T> {
290 fn clone(&self) -> Self {
291 unsafe {
292 (&mut *self.inner.as_ptr())
293 .weak_refs
294 .fetch_add(1, Ordering::SeqCst);
295
296 WeakIllegal {
297 inner: NonNull::new_unchecked(self.inner.as_ptr()),
298 }
299 }
300 }
301}
302
303impl<T> Drop for WeakIllegal<T> {
304 fn drop(&mut self) {
305 unsafe {
306 let prev = (&mut *self.inner.as_ptr())
307 .weak_refs
308 .fetch_sub(1, Ordering::SeqCst);
309 if 1 != prev {
310 return;
311 }
312
313 if (&mut *self.inner.as_ptr()).refs.load(Ordering::SeqCst) != 0 {
314 return;
315 }
316
317 if (&mut *self.inner.as_ptr()).weak_refs.load(Ordering::SeqCst) == 0 {
318 drop_in_place(self.inner.as_ptr());
319 }
320 }
321 }
322}
323
324#[cfg(test)]
325mod tests {
326 use crate::arc;
327
328 #[test]
329 pub fn weak_works() {
330 let aa = &[0u8; 4];
331 let x = arc(&aa[..]);
332 let y = x.weak();
333 let z = y.strong().unwrap();
334 assert_eq!(&[0, 0, 0, 0][..], *x);
335 assert_eq!(Some(&[0, 0, 0, 0][..]), y.get().copied());
336 drop(x);
337 assert_eq!(Some(&[0, 0, 0, 0][..]), y.get().copied());
338 drop(z);
339 assert_eq!(None, y.get().copied());
340 assert_eq!(None, y.strong().map(|_| true));
341 }
342
343 #[test]
344 pub fn dismantle_works() {
345 let input = {
346 let input = arc(vec![1u8; 5]);
347 let clone = input.dup();
348 let _ = match clone.dismantle() {
349 Err(clone) => clone,
350 Ok(_) => panic!("Shouldn't be able to dismantle ArcIllegal with 2 references"),
351 };
352
353 input
354 };
355
356 assert_eq!(1, input.ref_count());
357 assert_eq!(Some(vec![1u8; 5]), input.dismantle().ok());
358 }
359
360 #[test]
361 pub fn dismantle_weak_works() {
362 let (input, weak) = {
363 let input = arc(vec![1u8; 5]);
364 let clone = input.dup();
365 let weak = input.weak();
366 let _ = match clone.dismantle_with_weak() {
367 Err(clone) => clone,
368 Ok(_) => panic!("Shouldn't be able to dismantle ArcIllegal with 2 references"),
369 };
370
371 (input, weak)
372 };
373
374 assert_eq!(2, input.ref_count());
375 let res = input.dismantle_with_weak().ok();
376 assert_eq!(Some(vec![1u8; 5]), res);
377 std::mem::drop(res.unwrap());
378 unsafe {
379 assert!((*weak.inner.as_ptr()).value.is_null());
380 }
381 assert_eq!(None, weak.get());
382 }
383}