1use std::{
3 alloc::{self, Layout},
4 cmp::Ordering,
5 hash::{Hash, Hasher},
6 marker::PhantomData,
7 mem::{self, offset_of, ManuallyDrop},
8 ops::Deref,
9 ptr::{self, NonNull},
10 sync::atomic::{
11 self,
12 Ordering::{Acquire, Relaxed, Release},
13 },
14};
15
16const MAX_REFCOUNT: usize = (isize::MAX) as usize;
21
22#[repr(C)]
24pub(crate) struct ArcInner<T: ?Sized> {
25 pub(crate) count: atomic::AtomicUsize,
26 pub(crate) data: T,
27}
28
29unsafe impl<T: ?Sized + Sync + Send> Send for ArcInner<T> {}
30unsafe impl<T: ?Sized + Sync + Send> Sync for ArcInner<T> {}
31
32#[repr(transparent)]
39pub(crate) struct Arc<T: ?Sized> {
40 pub(crate) p: ptr::NonNull<ArcInner<T>>,
41 pub(crate) phantom: PhantomData<T>,
42}
43
44unsafe impl<T: ?Sized + Sync + Send> Send for Arc<T> {}
45unsafe impl<T: ?Sized + Sync + Send> Sync for Arc<T> {}
46
47impl<T> Arc<T> {
48 #[inline]
55 pub(crate) unsafe fn from_raw(ptr: *const T) -> Self {
56 let ptr = (ptr as *const u8).sub(offset_of!(ArcInner<T>, data));
59 Arc {
60 p: ptr::NonNull::new_unchecked(ptr as *mut ArcInner<T>),
61 phantom: PhantomData,
62 }
63 }
64}
65
66impl<T: ?Sized> Arc<T> {
67 #[inline]
68 fn inner(&self) -> &ArcInner<T> {
69 unsafe { &*self.ptr() }
75 }
76
77 #[inline(never)]
79 unsafe fn drop_slow(&mut self) {
80 let _ = Box::from_raw(self.ptr());
81 }
82
83 #[inline]
86 pub(crate) fn ptr_eq(this: &Self, other: &Self) -> bool {
87 std::ptr::addr_eq(this.ptr(), other.ptr())
88 }
89
90 pub(crate) fn ptr(&self) -> *mut ArcInner<T> {
91 self.p.as_ptr()
92 }
93
94 #[inline]
95 pub(crate) fn into_raw(self) -> NonNull<T> {
96 let ptr = NonNull::from(&self.inner().data);
97 mem::forget(self);
98 ptr
99 }
100}
101
102impl<T: ?Sized> Clone for Arc<T> {
103 #[inline]
104 fn clone(&self) -> Self {
105 let old_size = self.inner().count.fetch_add(1, Relaxed);
117
118 if old_size > MAX_REFCOUNT {
128 std::process::abort();
129 }
130
131 unsafe {
132 Arc {
133 p: ptr::NonNull::new_unchecked(self.ptr()),
134 phantom: PhantomData,
135 }
136 }
137 }
138}
139
140impl<T: ?Sized> Deref for Arc<T> {
141 type Target = T;
142
143 #[inline]
144 fn deref(&self) -> &T {
145 &self.inner().data
146 }
147}
148
149impl<T: ?Sized> Arc<T> {
150 #[inline]
152 pub(crate) fn get_mut(this: &mut Self) -> Option<&mut T> {
153 if this.is_unique() {
154 unsafe {
155 Some(&mut (*this.ptr()).data)
157 }
158 } else {
159 None
160 }
161 }
162
163 pub(crate) fn is_unique(&self) -> bool {
165 self.inner().count.load(Acquire) == 1
169 }
170}
171
172impl<T: ?Sized> Drop for Arc<T> {
173 #[inline]
174 fn drop(&mut self) {
175 if self.inner().count.fetch_sub(1, Release) != 1 {
178 return;
179 }
180
181 self.inner().count.load(Acquire);
202
203 unsafe {
204 self.drop_slow();
205 }
206 }
207}
208
209impl<T: ?Sized + PartialEq> PartialEq for Arc<T> {
210 fn eq(&self, other: &Arc<T>) -> bool {
211 Self::ptr_eq(self, other) || *(*self) == *(*other)
212 }
213}
214
215impl<T: ?Sized + PartialOrd> PartialOrd for Arc<T> {
216 fn partial_cmp(&self, other: &Arc<T>) -> Option<Ordering> {
217 (**self).partial_cmp(&**other)
218 }
219
220 fn lt(&self, other: &Arc<T>) -> bool {
221 *(*self) < *(*other)
222 }
223
224 fn le(&self, other: &Arc<T>) -> bool {
225 *(*self) <= *(*other)
226 }
227
228 fn gt(&self, other: &Arc<T>) -> bool {
229 *(*self) > *(*other)
230 }
231
232 fn ge(&self, other: &Arc<T>) -> bool {
233 *(*self) >= *(*other)
234 }
235}
236
237impl<T: ?Sized + Ord> Ord for Arc<T> {
238 fn cmp(&self, other: &Arc<T>) -> Ordering {
239 (**self).cmp(&**other)
240 }
241}
242
243impl<T: ?Sized + Eq> Eq for Arc<T> {}
244
245impl<T: ?Sized + Hash> Hash for Arc<T> {
246 fn hash<H: Hasher>(&self, state: &mut H) {
247 (**self).hash(state)
248 }
249}
250
251#[derive(Debug, Eq, PartialEq, Hash, PartialOrd)]
252#[repr(C)]
253pub(crate) struct HeaderSlice<H, T: ?Sized> {
254 pub(crate) header: H,
255 length: usize,
256 slice: T,
257}
258
259impl<H, T> HeaderSlice<H, [T]> {
260 pub(crate) fn slice(&self) -> &[T] {
261 &self.slice
262 }
263
264 pub(crate) fn len(&self) -> usize {
266 self.length
267 }
268}
269
270impl<H, T> Deref for HeaderSlice<H, [T; 0]> {
271 type Target = HeaderSlice<H, [T]>;
272
273 fn deref(&self) -> &Self::Target {
274 unsafe {
275 let len = self.length;
276 let fake_slice: *const [T] =
277 ptr::slice_from_raw_parts(self as *const _ as *const T, len);
278 &*(fake_slice as *const HeaderSlice<H, [T]>)
279 }
280 }
281}
282
283#[repr(transparent)]
299pub(crate) struct ThinArc<H, T> {
300 ptr: ptr::NonNull<ArcInner<HeaderSlice<H, [T; 0]>>>,
301 phantom: PhantomData<(H, T)>,
302}
303
304unsafe impl<H: Sync + Send, T: Sync + Send> Send for ThinArc<H, T> {}
305unsafe impl<H: Sync + Send, T: Sync + Send> Sync for ThinArc<H, T> {}
306
307fn thin_to_thick<H, T>(
309 thin: *mut ArcInner<HeaderSlice<H, [T; 0]>>,
310) -> *mut ArcInner<HeaderSlice<H, [T]>> {
311 let len = unsafe { (*thin).data.length };
312 let fake_slice: *mut [T] = ptr::slice_from_raw_parts_mut(thin as *mut T, len);
313 fake_slice as *mut ArcInner<HeaderSlice<H, [T]>>
315}
316
317impl<H, T> ThinArc<H, T> {
318 #[inline]
321 pub(crate) fn with_arc<F, U>(&self, f: F) -> U
322 where
323 F: FnOnce(&Arc<HeaderSlice<H, [T]>>) -> U,
324 {
325 let transient = unsafe {
327 ManuallyDrop::new(Arc {
328 p: ptr::NonNull::new_unchecked(thin_to_thick(self.ptr.as_ptr())),
329 phantom: PhantomData,
330 })
331 };
332
333 f(&transient)
336 }
337
338 pub(crate) fn from_header_and_iter<I>(header: H, mut items: I) -> Self
341 where
342 I: Iterator<Item = T> + ExactSizeIterator,
343 {
344 assert_ne!(mem::size_of::<T>(), 0, "Need to think about ZST");
345
346 let num_items = items.len();
347
348 let inner_to_data_offset = offset_of!(ArcInner<HeaderSlice<H, [T; 0]>>, data);
350 let data_to_slice_offset = offset_of!(HeaderSlice<H, [T; 0]>, slice);
351 let slice_offset = inner_to_data_offset + data_to_slice_offset;
352
353 let slice_size = mem::size_of::<T>()
355 .checked_mul(num_items)
356 .expect("size overflows");
357 let usable_size = slice_offset
358 .checked_add(slice_size)
359 .expect("size overflows");
360
361 let align = mem::align_of::<ArcInner<HeaderSlice<H, [T; 0]>>>();
363 let size = usable_size.wrapping_add(align - 1) & !(align - 1);
364 assert!(size >= usable_size, "size overflows");
365 let layout = Layout::from_size_align(size, align).expect("invalid layout");
366
367 let ptr: *mut ArcInner<HeaderSlice<H, [T; 0]>>;
368 unsafe {
369 let buffer = alloc::alloc(layout);
370
371 if buffer.is_null() {
372 alloc::handle_alloc_error(layout);
373 }
374
375 ptr = buffer as *mut _;
384
385 let count = atomic::AtomicUsize::new(1);
386
387 ptr::write(ptr::addr_of_mut!((*ptr).count), count);
392 ptr::write(ptr::addr_of_mut!((*ptr).data.header), header);
393 ptr::write(ptr::addr_of_mut!((*ptr).data.length), num_items);
394 if num_items != 0 {
395 let mut current = ptr::addr_of_mut!((*ptr).data.slice) as *mut T;
396 debug_assert_eq!(current as usize - buffer as usize, slice_offset);
397 for _ in 0..num_items {
398 ptr::write(
399 current,
400 items
401 .next()
402 .expect("ExactSizeIterator over-reported length"),
403 );
404 current = current.offset(1);
405 }
406 assert!(
407 items.next().is_none(),
408 "ExactSizeIterator under-reported length"
409 );
410
411 debug_assert_eq!(current as *mut u8, buffer.add(usable_size));
413 }
414 assert!(
415 items.next().is_none(),
416 "ExactSizeIterator under-reported length"
417 );
418 }
419
420 ThinArc {
421 ptr: unsafe { ptr::NonNull::new_unchecked(ptr) },
422 phantom: PhantomData,
423 }
424 }
425}
426
427impl<H, T> Deref for ThinArc<H, T> {
428 type Target = HeaderSlice<H, [T]>;
429
430 #[inline]
431 fn deref(&self) -> &Self::Target {
432 unsafe { &(*thin_to_thick(self.ptr.as_ptr())).data }
433 }
434}
435
436impl<H, T> Clone for ThinArc<H, T> {
437 #[inline]
438 fn clone(&self) -> Self {
439 ThinArc::with_arc(self, |a| Arc::into_thin(a.clone()))
440 }
441}
442
443impl<H, T> Drop for ThinArc<H, T> {
444 #[inline]
445 fn drop(&mut self) {
446 let _ = Arc::from_thin(ThinArc {
447 ptr: self.ptr,
448 phantom: PhantomData,
449 });
450 }
451}
452
453impl<H, T> Arc<HeaderSlice<H, [T]>> {
454 #[inline]
457 pub(crate) fn into_thin(a: Self) -> ThinArc<H, T> {
458 assert_eq!(
459 a.length,
460 a.slice.len(),
461 "Length needs to be correct for ThinArc to work"
462 );
463 let fat_ptr: *mut ArcInner<HeaderSlice<H, [T]>> = a.ptr();
464 mem::forget(a);
465 let thin_ptr = fat_ptr as *mut [usize] as *mut usize;
466 ThinArc {
467 ptr: unsafe {
468 ptr::NonNull::new_unchecked(thin_ptr as *mut ArcInner<HeaderSlice<H, [T; 0]>>)
469 },
470 phantom: PhantomData,
471 }
472 }
473
474 #[inline]
477 pub(crate) fn from_thin(a: ThinArc<H, T>) -> Self {
478 let ptr = thin_to_thick(a.ptr.as_ptr());
479 mem::forget(a);
480 unsafe {
481 Arc {
482 p: ptr::NonNull::new_unchecked(ptr),
483 phantom: PhantomData,
484 }
485 }
486 }
487}
488
489impl<H: PartialEq, T: PartialEq> PartialEq for ThinArc<H, T> {
490 #[inline]
491 fn eq(&self, other: &ThinArc<H, T>) -> bool {
492 **self == **other
493 }
494}
495
496impl<H: Eq, T: Eq> Eq for ThinArc<H, T> {}
497
498impl<H: Hash, T: Hash> Hash for ThinArc<H, T> {
499 fn hash<HSR: Hasher>(&self, state: &mut HSR) {
500 (**self).hash(state)
501 }
502}