arc_slice/
slice.rs

1use alloc::{borrow::Cow, boxed::Box, vec::Vec};
2use core::{
3    any::Any,
4    borrow::Borrow,
5    cmp, fmt,
6    hash::{Hash, Hasher},
7    hint, mem,
8    mem::{ManuallyDrop, MaybeUninit},
9    ops::{Deref, RangeBounds},
10    ptr::NonNull,
11};
12
13#[allow(unused_imports)]
14use crate::msrv::{NonNullExt, StrictProvenance};
15use crate::{
16    arc::{unit_metadata, Arc},
17    buffer::{BorrowMetadata, Buffer, BufferMutExt},
18    layout::{Compact, Layout, Plain},
19    loom::{
20        atomic_ptr_with_mut,
21        sync::atomic::{AtomicPtr, Ordering},
22    },
23    macros::is,
24    msrv::{ptr, NonZero, SubPtrExt},
25    utils::{
26        debug_slice, offset_len, offset_len_subslice, offset_len_subslice_unchecked,
27        panic_out_of_range,
28    },
29    ArcSliceMut,
30};
31
32pub trait ArcSliceLayout {
33    type Base: Copy + 'static;
34    const TRUNCATABLE: bool;
35    fn get_base<T>(full: bool, base: *mut T) -> Option<Self::Base>;
36    fn base_into_ptr<T>(base: Self::Base) -> Option<NonNull<T>>;
37}
38
39impl ArcSliceLayout for Compact {
40    type Base = ();
41    const TRUNCATABLE: bool = false;
42    fn get_base<T>(full: bool, _base: *mut T) -> Option<Self::Base> {
43        full.then_some(())
44    }
45    fn base_into_ptr<T>(_base: Self::Base) -> Option<NonNull<T>> {
46        None
47    }
48}
49
50impl ArcSliceLayout for Plain {
51    type Base = NonNull<()>;
52    const TRUNCATABLE: bool = true;
53    fn get_base<T>(_full: bool, base: *mut T) -> Option<Self::Base> {
54        Some(NonNull::new(base).unwrap().cast())
55    }
56    fn base_into_ptr<T>(base: Self::Base) -> Option<NonNull<T>> {
57        Some(base.cast())
58    }
59}
60
61#[repr(C)]
62pub struct ArcSlice<T: Send + Sync + 'static, L: Layout = Compact> {
63    #[cfg(target_endian = "big")]
64    length: usize,
65    arc_or_capa: AtomicPtr<()>,
66    base: MaybeUninit<<L as ArcSliceLayout>::Base>,
67    start: NonNull<T>,
68    #[cfg(target_endian = "little")]
69    length: usize,
70}
71
72const VEC_FLAG: usize = 1;
73const VEC_CAPA_SHIFT: usize = 1;
74
75enum Inner<T> {
76    Static,
77    Vec { capacity: NonZero<usize> },
78    Arc(ManuallyDrop<Arc<T>>),
79}
80
81impl<T: Send + Sync + 'static, L: Layout> ArcSlice<T, L> {
82    #[inline]
83    pub fn new<B: Buffer<T>>(buffer: B) -> Self {
84        Self::with_metadata(buffer, ())
85    }
86
87    #[cfg(not(all(loom, test)))]
88    #[inline]
89    pub const fn new_static(slice: &'static [T]) -> Self {
90        Self {
91            arc_or_capa: AtomicPtr::new(ptr::null_mut()),
92            base: MaybeUninit::uninit(),
93            start: unsafe { NonNull::new_unchecked(slice.as_ptr().cast_mut()) },
94            length: slice.len(),
95        }
96    }
97
98    #[cfg(all(loom, test))]
99    pub fn new_static(slice: &'static [T]) -> Self {
100        Self {
101            arc_or_capa: AtomicPtr::new(ptr::null_mut()),
102            base: MaybeUninit::uninit(),
103            start: NonNull::new(slice.as_ptr().cast_mut()).unwrap(),
104            length: slice.len(),
105        }
106    }
107
108    #[inline]
109    pub fn with_metadata<B: Buffer<T>, M: Send + Sync + 'static>(
110        mut buffer: B,
111        metadata: M,
112    ) -> Self {
113        if is!(M, ()) {
114            match buffer.try_into_static() {
115                Ok(slice) => return Self::new_static(slice),
116                Err(b) => buffer = b,
117            }
118            match buffer.try_into_vec() {
119                Ok(vec) => return Self::new_vec(vec),
120                Err(b) => buffer = b,
121            }
122        }
123        let (arc, start, length) = Arc::new(buffer, metadata, 1);
124        unsafe { Self::from_arc(start, length, arc) }
125    }
126
127    /// # Safety
128    ///
129    /// Calling [`B::borrow_metadata`](BorrowMetadata::borrow_metadata) must not invalidate
130    /// the buffer slice borrow. The returned metadata must not be used to invalidate the
131    /// buffer slice.
132    #[inline]
133    pub unsafe fn with_borrowed_metadata<B: Buffer<T> + BorrowMetadata>(buffer: B) -> Self {
134        let (arc, start, length) = Arc::new_borrow(buffer);
135        unsafe { Self::from_arc(start, length, arc) }
136    }
137
138    fn new_vec(mut vec: Vec<T>) -> Self {
139        if vec.capacity() == 0 {
140            return Self::new_static(&[]);
141        }
142        let Some(base) = L::get_base(vec.len() == vec.capacity(), vec.as_mut_ptr()) else {
143            #[cold]
144            fn alloc<T: Send + Sync + 'static, L: Layout>(vec: Vec<T>) -> ArcSlice<T, L> {
145                let (arc, start, length) = Arc::new(vec, (), 1);
146                unsafe { ArcSlice::from_arc(start, length, arc) }
147            }
148            return alloc(vec);
149        };
150        let mut vec = ManuallyDrop::new(vec);
151        let arc_or_capa = ptr::without_provenance_mut::<()>(VEC_FLAG | (vec.capacity() << 1));
152        Self {
153            arc_or_capa: AtomicPtr::new(arc_or_capa),
154            base: MaybeUninit::new(base),
155            start: NonNull::new(vec.as_mut_ptr()).unwrap(),
156            length: vec.len(),
157        }
158    }
159
160    pub(crate) unsafe fn new_vec_with_offset(
161        start: NonNull<T>,
162        length: usize,
163        capacity: usize,
164        offset: usize,
165    ) -> Self {
166        if capacity == 0 && offset == 0 {
167            return Self::new_static(&[]);
168        }
169        let base_ptr = unsafe { start.as_ptr().sub(offset) };
170        let Some(base) = L::get_base(length == capacity, base_ptr) else {
171            #[cold]
172            fn alloc<T: Send + Sync + 'static, L: Layout>(
173                start: NonNull<T>,
174                length: usize,
175                capacity: usize,
176                offset: usize,
177            ) -> ArcSlice<T, L> {
178                let base_ptr = unsafe { start.as_ptr().sub(offset) };
179                let vec =
180                    unsafe { Vec::from_raw_parts(base_ptr, offset + length, offset + capacity) };
181                let (arc, _, _) = Arc::new(vec, (), 1);
182                unsafe { ArcSlice::from_arc(start, length, arc) }
183            }
184            return alloc(start, length, capacity, offset);
185        };
186        let arc_or_capa = ptr::without_provenance_mut::<()>(VEC_FLAG | ((offset + capacity) << 1));
187        Self {
188            arc_or_capa: AtomicPtr::new(arc_or_capa),
189            base: MaybeUninit::new(base),
190            start,
191            length,
192        }
193    }
194
195    /// # Safety
196    ///
197    /// `start` and `length` must represent a valid slice for the buffer contained in `arc`.
198    pub(crate) unsafe fn from_arc(start: NonNull<T>, length: usize, arc: Arc<T>) -> Self {
199        Self {
200            arc_or_capa: AtomicPtr::new(arc.into_ptr().as_ptr()),
201            base: MaybeUninit::uninit(),
202            start,
203            length,
204        }
205    }
206
207    #[inline]
208    pub fn from_slice(slice: &[T]) -> Self
209    where
210        T: Clone,
211    {
212        slice.to_vec().into()
213    }
214
215    #[allow(unstable_name_collisions)]
216    unsafe fn rebuild_vec(&self, capacity: NonZero<usize>) -> Vec<T> {
217        let (ptr, len) = if let Some(base) = L::base_into_ptr(unsafe { self.base.assume_init() }) {
218            let len = unsafe { self.start.sub_ptr(base) } + self.length;
219            (base.as_ptr(), len)
220        } else {
221            let offset = capacity.get() - self.length;
222            let ptr = unsafe { self.start.as_ptr().sub(offset) };
223            (ptr, capacity.get())
224        };
225        unsafe { Vec::from_raw_parts(ptr, len, capacity.get()) }
226    }
227
228    #[allow(unstable_name_collisions)]
229    unsafe fn shift_vec(&self, mut vec: Vec<T>) -> Vec<T> {
230        unsafe {
231            let offset = self.start.as_ptr().sub_ptr(vec.as_mut_ptr());
232            vec.shift_left(offset, self.length)
233        };
234        vec
235    }
236
237    #[allow(clippy::incompatible_msrv)]
238    #[inline(always)]
239    fn inner(&self, arc_or_capa: *mut ()) -> Inner<T> {
240        let capacity = arc_or_capa.addr() >> VEC_CAPA_SHIFT;
241        match NonNull::new(arc_or_capa) {
242            Some(_) if arc_or_capa.addr() & VEC_FLAG != 0 => Inner::Vec {
243                capacity: unsafe { NonZero::new_unchecked(capacity) },
244            },
245            Some(arc) => Inner::Arc(ManuallyDrop::new(unsafe { Arc::from_ptr(arc) })),
246            None => Inner::Static,
247        }
248    }
249
250    #[inline(always)]
251    fn inner_mut(&mut self) -> Inner<T> {
252        let arc_or_capa = atomic_ptr_with_mut(&mut self.arc_or_capa, |ptr| *ptr);
253        self.inner(arc_or_capa)
254    }
255
256    #[inline]
257    pub const fn len(&self) -> usize {
258        self.length
259    }
260
261    #[inline]
262    pub const fn is_empty(&self) -> bool {
263        self.len() == 0
264    }
265
266    #[inline]
267    pub const fn as_slice(&self) -> &[T] {
268        unsafe { core::slice::from_raw_parts(self.start.as_ptr(), self.len()) }
269    }
270
271    #[inline]
272    pub fn get_ref(&self, range: impl RangeBounds<usize>) -> ArcSliceRef<T, L> {
273        let (offset, len) = offset_len(self.length, range);
274        ArcSliceRef {
275            slice: &self[offset..offset + len],
276            arc_slice: self,
277        }
278    }
279
280    #[inline]
281    pub fn truncate(&mut self, len: usize) {
282        if len >= self.length {
283            return;
284        }
285        match self.inner_mut() {
286            Inner::Vec { .. } if !L::TRUNCATABLE => return unsafe { self.truncate_vec(len) },
287            Inner::Vec { .. } if mem::needs_drop::<T>() => unsafe {
288                let end = self.start.as_ptr().add(len);
289                ptr::drop_in_place(ptr::slice_from_raw_parts_mut(end, self.len() - len));
290            },
291            _ => {}
292        }
293        self.length = len;
294    }
295
296    #[cold]
297    unsafe fn truncate_vec(&mut self, len: usize) {
298        let Inner::Vec { capacity } = self.inner_mut() else {
299            unsafe { hint::unreachable_unchecked() }
300        };
301        let vec = unsafe { self.rebuild_vec(capacity) };
302        let (arc, _, _) = Arc::new(vec, (), 1);
303        atomic_ptr_with_mut(&mut self.arc_or_capa, |ptr| {
304            *ptr = arc.into_ptr().as_ptr();
305        });
306        self.length = len;
307    }
308
309    #[allow(clippy::incompatible_msrv)]
310    #[inline]
311    pub fn advance(&mut self, offset: usize) {
312        if offset > self.length {
313            panic_out_of_range();
314        }
315        self.start = unsafe { self.start.add(offset) };
316        self.length -= offset;
317    }
318
319    #[allow(clippy::incompatible_msrv)]
320    pub(crate) unsafe fn subslice_impl(&self, offset: usize, len: usize) -> Self {
321        if len == 0 {
322            return Self {
323                arc_or_capa: AtomicPtr::new(ptr::null_mut()),
324                base: MaybeUninit::uninit(),
325                start: unsafe { self.start.add(offset) },
326                length: 0,
327            };
328        }
329        let mut clone = self.clone();
330        clone.start = unsafe { self.start.add(offset) };
331        clone.length = len;
332        clone
333    }
334
335    #[inline]
336    pub fn subslice(&self, range: impl RangeBounds<usize>) -> Self {
337        let (offset, len) = offset_len(self.length, range);
338        unsafe { self.subslice_impl(offset, len) }
339    }
340
341    #[inline]
342    pub fn subslice_from_ref(&self, subset: &[T]) -> Self {
343        let (offset, len) = offset_len_subslice(self, subset);
344        unsafe { self.subslice_impl(offset, len) }
345    }
346
347    #[allow(clippy::incompatible_msrv)]
348    #[inline]
349    #[must_use = "consider `ArcSlice::truncate` if you don't need the other half"]
350    pub fn split_off(&mut self, at: usize) -> Self {
351        if at == 0 {
352            return mem::replace(self, unsafe { self.subslice_impl(0, 0) });
353        } else if at == self.length {
354            return unsafe { self.subslice_impl(at, 0) };
355        } else if at > self.length {
356            panic_out_of_range();
357        }
358        let mut clone = self.clone();
359        clone.start = unsafe { clone.start.add(at) };
360        clone.length -= at;
361        self.length = at;
362        clone
363    }
364
365    #[allow(clippy::incompatible_msrv)]
366    #[inline]
367    #[must_use = "consider `ArcSlice::advance` if you don't need the other half"]
368    pub fn split_to(&mut self, at: usize) -> Self {
369        if at == 0 {
370            return unsafe { self.subslice_impl(0, 0) };
371        } else if at == self.length {
372            return mem::replace(self, unsafe { self.subslice_impl(self.len(), 0) });
373        } else if at > self.length {
374            panic_out_of_range();
375        }
376        let mut clone = self.clone();
377        clone.length = at;
378        self.start = unsafe { self.start.add(at) };
379        self.length -= at;
380        clone
381    }
382
383    #[inline]
384    pub fn try_into_mut(mut self) -> Result<ArcSliceMut<T>, Self> {
385        let mut slice_mut = match self.inner_mut() {
386            Inner::Static => return Err(self),
387            Inner::Vec { capacity } => ArcSliceMut::new(unsafe { self.rebuild_vec(capacity) }),
388            Inner::Arc(mut arc) => match unsafe { arc.try_as_mut() } {
389                Some(s) => s,
390                None => return Err(self),
391            },
392        };
393        unsafe { slice_mut.set_start_len(self.start, self.length) };
394        mem::forget(self);
395        Ok(slice_mut)
396    }
397
398    #[inline]
399    pub fn into_vec(self) -> Vec<T>
400    where
401        T: Clone,
402    {
403        let mut this = ManuallyDrop::new(self);
404        match this.inner_mut() {
405            Inner::Static => this.as_slice().to_vec(),
406            Inner::Vec { capacity } => unsafe { this.shift_vec(this.rebuild_vec(capacity)) },
407            Inner::Arc(mut arc) => unsafe {
408                let mut vec = MaybeUninit::<Vec<T>>::uninit();
409                if !arc.take_buffer(this.length, NonNull::new(vec.as_mut_ptr()).unwrap()) {
410                    let vec = this.as_slice().to_vec();
411                    drop(ManuallyDrop::into_inner(arc));
412                    return vec;
413                }
414                this.shift_vec(vec.assume_init())
415            },
416        }
417    }
418
419    #[inline]
420    pub fn into_cow(mut self) -> Cow<'static, [T]>
421    where
422        T: Clone,
423    {
424        match self.inner_mut() {
425            Inner::Static => unsafe {
426                mem::transmute::<&[T], &'static [T]>(self.as_slice()).into()
427            },
428            _ => self.into_vec().into(),
429        }
430    }
431
432    #[inline]
433    pub fn get_metadata<M: Any>(&self) -> Option<&M> {
434        match self.inner(self.arc_or_capa.load(Ordering::Acquire)) {
435            Inner::Arc(arc) => arc.get_metadata(),
436            _ if is!(M, ()) => Some(unit_metadata()),
437            _ => None,
438        }
439    }
440
441    #[inline]
442    pub fn downcast_buffer<B: Buffer<T>>(mut self) -> Result<B, Self> {
443        let mut buffer = MaybeUninit::<B>::uninit();
444        match self.inner_mut() {
445            Inner::Static if is!(B, &'static [T]) => unsafe {
446                buffer.as_mut_ptr().cast::<&[T]>().write(self.as_slice());
447            },
448            Inner::Vec { capacity } if is!(B, Vec<T>) => unsafe {
449                let vec_ptr = buffer.as_mut_ptr().cast::<Vec<T>>();
450                vec_ptr.write(self.shift_vec(self.rebuild_vec(capacity)));
451            },
452            Inner::Arc(mut arc) => unsafe {
453                if !arc.take_buffer(self.length, NonNull::from(&mut buffer).cast::<B>()) {
454                    return Err(self);
455                }
456                if is!(B, Vec<T>) {
457                    let vec_ptr = buffer.as_mut_ptr().cast::<Vec<T>>();
458                    vec_ptr.write(self.shift_vec(vec_ptr.read()));
459                }
460            },
461            _ => return Err(self),
462        }
463        mem::forget(self);
464        Ok(unsafe { buffer.assume_init() })
465    }
466
467    #[inline]
468    pub fn is_unique(&self) -> bool {
469        match self.inner(self.arc_or_capa.load(Ordering::Acquire)) {
470            Inner::Static => false,
471            Inner::Vec { .. } => true,
472            Inner::Arc(arc) => arc.is_unique(),
473        }
474    }
475
476    #[inline]
477    pub fn with_layout<L2: Layout>(self) -> ArcSlice<T, L2> {
478        let mut this = ManuallyDrop::new(self);
479        let arc_or_capa = atomic_ptr_with_mut(&mut this.arc_or_capa, |ptr| *ptr);
480        match this.inner(arc_or_capa) {
481            Inner::Vec { capacity } => ArcSlice::new_vec(unsafe { this.rebuild_vec(capacity) }),
482            _ => ArcSlice {
483                arc_or_capa: arc_or_capa.into(),
484                base: MaybeUninit::uninit(),
485                start: this.start,
486                length: this.length,
487            },
488        }
489    }
490
491    #[cold]
492    unsafe fn drop_vec(&mut self) {
493        let Inner::Vec { capacity } = self.inner_mut() else {
494            unsafe { hint::unreachable_unchecked() }
495        };
496        drop(unsafe { self.rebuild_vec(capacity) });
497    }
498
499    #[cold]
500    unsafe fn clone_vec(&self, arc_or_capa: *mut ()) -> Self {
501        let Inner::Vec { capacity } = self.inner(arc_or_capa) else {
502            unsafe { hint::unreachable_unchecked() }
503        };
504        let vec = unsafe { self.rebuild_vec(capacity) };
505        let (arc, _, _) = Arc::new(vec, (), 2);
506        let arc_ptr = arc.into_ptr();
507        // Release ordering must be used to ensure the arc vtable is visible
508        // by `get_metadata`. In case of failure, the read arc is cloned with
509        // a FAA, so there is no need of synchronization.
510        let arc = match self.arc_or_capa.compare_exchange(
511            arc_or_capa,
512            arc_ptr.as_ptr(),
513            Ordering::Release,
514            Ordering::Acquire,
515        ) {
516            Ok(_) => unsafe { Arc::from_ptr(arc_ptr) },
517            Err(ptr) => {
518                unsafe { Arc::<T>::from_ptr(arc_ptr).forget_vec() };
519                let arc = unsafe { Arc::from_ptr(NonNull::new(ptr).unwrap_unchecked()) };
520                (*ManuallyDrop::new(arc)).clone()
521            }
522        };
523        unsafe { Self::from_arc(self.start, self.length, arc) }
524    }
525}
526
527unsafe impl<T: Send + Sync + 'static, L: Layout> Send for ArcSlice<T, L> {}
528unsafe impl<T: Send + Sync + 'static, L: Layout> Sync for ArcSlice<T, L> {}
529
530impl<T: Send + Sync + 'static, L: Layout> Drop for ArcSlice<T, L> {
531    #[inline]
532    fn drop(&mut self) {
533        match self.inner_mut() {
534            Inner::Static => {}
535            Inner::Vec { .. } => unsafe { self.drop_vec() },
536            Inner::Arc(arc) => drop(ManuallyDrop::into_inner(arc)),
537        }
538    }
539}
540
541impl<T: Send + Sync + 'static, L: Layout> Clone for ArcSlice<T, L> {
542    #[inline]
543    fn clone(&self) -> Self {
544        let arc_or_capa = self.arc_or_capa.load(Ordering::Acquire);
545        match self.inner(arc_or_capa) {
546            Inner::Static => {}
547            Inner::Vec { .. } => return unsafe { self.clone_vec(arc_or_capa) },
548            Inner::Arc(arc) => mem::forget((*arc).clone()),
549        };
550        Self {
551            arc_or_capa: AtomicPtr::new(arc_or_capa),
552            base: MaybeUninit::uninit(),
553            start: self.start,
554            length: self.length,
555        }
556    }
557}
558
559impl<T: Send + Sync + 'static, L: Layout> Deref for ArcSlice<T, L> {
560    type Target = [T];
561
562    #[inline]
563    fn deref(&self) -> &Self::Target {
564        self.as_slice()
565    }
566}
567
568impl<T: Send + Sync + 'static, L: Layout> AsRef<[T]> for ArcSlice<T, L> {
569    #[inline]
570    fn as_ref(&self) -> &[T] {
571        self
572    }
573}
574
575impl<T: Hash + Send + Sync + 'static, L: Layout> Hash for ArcSlice<T, L> {
576    #[inline]
577    fn hash<H>(&self, state: &mut H)
578    where
579        H: Hasher,
580    {
581        self.as_slice().hash(state);
582    }
583}
584
585impl<T: Send + Sync + 'static, L: Layout> Borrow<[T]> for ArcSlice<T, L> {
586    #[inline]
587    fn borrow(&self) -> &[T] {
588        self
589    }
590}
591
592#[cfg(not(all(loom, test)))]
593impl<T: Send + Sync + 'static, L: Layout> Default for ArcSlice<T, L> {
594    #[inline]
595    fn default() -> Self {
596        Self::new_static(&[])
597    }
598}
599
600impl<T: fmt::Debug + Send + Sync + 'static, L: Layout> fmt::Debug for ArcSlice<T, L> {
601    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
602        debug_slice(self, f)
603    }
604}
605
606impl<L: Layout> fmt::LowerHex for ArcSlice<u8, L> {
607    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
608        for &b in self.as_slice() {
609            write!(f, "{:02x}", b)?;
610        }
611        Ok(())
612    }
613}
614
615impl<L: Layout> fmt::UpperHex for ArcSlice<u8, L> {
616    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
617        for &b in self.as_slice() {
618            write!(f, "{:02X}", b)?;
619        }
620        Ok(())
621    }
622}
623
624impl<T: PartialEq + Send + Sync + 'static, L: Layout> PartialEq for ArcSlice<T, L> {
625    fn eq(&self, other: &ArcSlice<T, L>) -> bool {
626        self.as_slice() == other.as_slice()
627    }
628}
629
630impl<T: PartialEq + Send + Sync + 'static, L: Layout> Eq for ArcSlice<T, L> {}
631
632impl<T: PartialOrd + Send + Sync + 'static, L: Layout> PartialOrd for ArcSlice<T, L> {
633    fn partial_cmp(&self, other: &ArcSlice<T, L>) -> Option<cmp::Ordering> {
634        self.as_slice().partial_cmp(other.as_slice())
635    }
636}
637
638impl<T: Ord + Send + Sync + 'static, L: Layout> Ord for ArcSlice<T, L> {
639    fn cmp(&self, other: &ArcSlice<T, L>) -> cmp::Ordering {
640        self.as_slice().cmp(other.as_slice())
641    }
642}
643
644impl<T: PartialEq + Send + Sync + 'static, L: Layout> PartialEq<[T]> for ArcSlice<T, L> {
645    fn eq(&self, other: &[T]) -> bool {
646        self.as_slice() == other
647    }
648}
649
650impl<T: PartialEq + Send + Sync + 'static, L: Layout> PartialEq<ArcSlice<T, L>> for [T] {
651    fn eq(&self, other: &ArcSlice<T, L>) -> bool {
652        *other == *self
653    }
654}
655
656impl<T: PartialEq + Send + Sync + 'static, L: Layout, const N: usize> PartialEq<[T; N]>
657    for ArcSlice<T, L>
658{
659    fn eq(&self, other: &[T; N]) -> bool {
660        self.as_slice() == other
661    }
662}
663
664impl<T: PartialEq + Send + Sync + 'static, L: Layout, const N: usize> PartialEq<ArcSlice<T, L>>
665    for [T; N]
666{
667    fn eq(&self, other: &ArcSlice<T, L>) -> bool {
668        *other == *self
669    }
670}
671
672impl<T: PartialEq + Send + Sync + 'static, L: Layout> PartialEq<Vec<T>> for ArcSlice<T, L> {
673    fn eq(&self, other: &Vec<T>) -> bool {
674        *self == other[..]
675    }
676}
677
678impl<T: PartialEq + Send + Sync + 'static, L: Layout> PartialEq<ArcSlice<T, L>> for Vec<T> {
679    fn eq(&self, other: &ArcSlice<T, L>) -> bool {
680        *other == *self
681    }
682}
683
684impl<T: PartialEq + Send + Sync + 'static, L: Layout> PartialEq<ArcSlice<T, L>> for &[T] {
685    fn eq(&self, other: &ArcSlice<T, L>) -> bool {
686        *other == *self
687    }
688}
689
690impl<'a, T: PartialEq + Send + Sync + 'static, L: Layout, O: ?Sized> PartialEq<&'a O>
691    for ArcSlice<T, L>
692where
693    ArcSlice<T, L>: PartialEq<O>,
694{
695    fn eq(&self, other: &&'a O) -> bool {
696        *self == **other
697    }
698}
699
700impl<T: Send + Sync + 'static> From<ArcSlice<T, Compact>> for ArcSlice<T, Plain> {
701    fn from(value: ArcSlice<T, Compact>) -> Self {
702        value.with_layout()
703    }
704}
705
706impl<T: Send + Sync + 'static> From<ArcSlice<T, Plain>> for ArcSlice<T, Compact> {
707    fn from(value: ArcSlice<T, Plain>) -> Self {
708        value.with_layout()
709    }
710}
711
712macro_rules! std_impl {
713    ($($(@$N:ident)? $ty:ty $(: $bound:path)?),*) => {$(
714        impl<T: $($bound +)? Send + Sync + 'static, L: Layout, $(const $N: usize,)?> From<$ty> for ArcSlice<T, L> {
715
716            #[inline]
717            fn from(value: $ty) -> Self {
718                Self::new(value)
719            }
720        }
721    )*};
722}
723std_impl!(&'static [T], @N &'static [T; N], @N [T; N], Box<[T]>, Cow<'static, [T]>: Clone);
724
725// Temporary impl until the compiler regression is fixed
726impl<T: Send + Sync + 'static, L: Layout> From<Vec<T>> for ArcSlice<T, L> {
727    fn from(value: Vec<T>) -> Self {
728        Self::new_vec(value)
729    }
730}
731
732impl<T: Clone + Send + Sync + 'static, L: Layout> From<ArcSlice<T, L>> for Vec<T> {
733    #[inline]
734    fn from(value: ArcSlice<T, L>) -> Self {
735        value.into_vec()
736    }
737}
738
739impl<T: Clone + Send + Sync + 'static, L: Layout> From<ArcSlice<T, L>> for Cow<'static, [T]> {
740    #[inline]
741    fn from(value: ArcSlice<T, L>) -> Self {
742        value.into_cow()
743    }
744}
745
746#[derive(Clone, Copy)]
747pub struct ArcSliceRef<'a, T: Send + Sync + 'static, L: Layout = Compact> {
748    slice: &'a [T],
749    arc_slice: &'a ArcSlice<T, L>,
750}
751
752impl<T: Send + Sync + 'static, L: Layout> Deref for ArcSliceRef<'_, T, L> {
753    type Target = [T];
754
755    #[inline]
756    fn deref(&self) -> &Self::Target {
757        self.slice
758    }
759}
760
761impl<T: fmt::Debug + Send + Sync + 'static, L: Layout> fmt::Debug for ArcSliceRef<'_, T, L> {
762    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
763        (**self).fmt(f)
764    }
765}
766
767impl<T: Send + Sync + 'static, L: Layout> ArcSliceRef<'_, T, L> {
768    #[inline]
769    pub fn into_arc(self) -> ArcSlice<T, L> {
770        let (offset, len) = unsafe { offset_len_subslice_unchecked(self.arc_slice, self.slice) };
771        unsafe { self.arc_slice.subslice_impl(offset, len) }
772    }
773}