1use alloc::{borrow::Cow, boxed::Box, vec::Vec};
2use core::{
3 any::Any,
4 borrow::Borrow,
5 cmp, fmt,
6 hash::{Hash, Hasher},
7 hint, mem,
8 mem::{ManuallyDrop, MaybeUninit},
9 ops::{Deref, RangeBounds},
10 ptr::NonNull,
11};
12
13#[allow(unused_imports)]
14use crate::msrv::{NonNullExt, StrictProvenance};
15use crate::{
16 arc::{unit_metadata, Arc},
17 buffer::{BorrowMetadata, Buffer, BufferMutExt},
18 layout::{Compact, Layout, Plain},
19 loom::{
20 atomic_ptr_with_mut,
21 sync::atomic::{AtomicPtr, Ordering},
22 },
23 macros::is,
24 msrv::{ptr, NonZero, SubPtrExt},
25 utils::{
26 debug_slice, offset_len, offset_len_subslice, offset_len_subslice_unchecked,
27 panic_out_of_range,
28 },
29 ArcSliceMut,
30};
31
32pub trait ArcSliceLayout {
33 type Base: Copy + 'static;
34 const TRUNCATABLE: bool;
35 fn get_base<T>(full: bool, base: *mut T) -> Option<Self::Base>;
36 fn base_into_ptr<T>(base: Self::Base) -> Option<NonNull<T>>;
37}
38
39impl ArcSliceLayout for Compact {
40 type Base = ();
41 const TRUNCATABLE: bool = false;
42 fn get_base<T>(full: bool, _base: *mut T) -> Option<Self::Base> {
43 full.then_some(())
44 }
45 fn base_into_ptr<T>(_base: Self::Base) -> Option<NonNull<T>> {
46 None
47 }
48}
49
50impl ArcSliceLayout for Plain {
51 type Base = NonNull<()>;
52 const TRUNCATABLE: bool = true;
53 fn get_base<T>(_full: bool, base: *mut T) -> Option<Self::Base> {
54 Some(NonNull::new(base).unwrap().cast())
55 }
56 fn base_into_ptr<T>(base: Self::Base) -> Option<NonNull<T>> {
57 Some(base.cast())
58 }
59}
60
61#[repr(C)]
62pub struct ArcSlice<T: Send + Sync + 'static, L: Layout = Compact> {
63 #[cfg(target_endian = "big")]
64 length: usize,
65 arc_or_capa: AtomicPtr<()>,
66 base: MaybeUninit<<L as ArcSliceLayout>::Base>,
67 start: NonNull<T>,
68 #[cfg(target_endian = "little")]
69 length: usize,
70}
71
72const VEC_FLAG: usize = 1;
73const VEC_CAPA_SHIFT: usize = 1;
74
75enum Inner<T> {
76 Static,
77 Vec { capacity: NonZero<usize> },
78 Arc(ManuallyDrop<Arc<T>>),
79}
80
81impl<T: Send + Sync + 'static, L: Layout> ArcSlice<T, L> {
82 #[inline]
83 pub fn new<B: Buffer<T>>(buffer: B) -> Self {
84 Self::with_metadata(buffer, ())
85 }
86
87 #[cfg(not(all(loom, test)))]
88 #[inline]
89 pub const fn new_static(slice: &'static [T]) -> Self {
90 Self {
91 arc_or_capa: AtomicPtr::new(ptr::null_mut()),
92 base: MaybeUninit::uninit(),
93 start: unsafe { NonNull::new_unchecked(slice.as_ptr().cast_mut()) },
94 length: slice.len(),
95 }
96 }
97
98 #[cfg(all(loom, test))]
99 pub fn new_static(slice: &'static [T]) -> Self {
100 Self {
101 arc_or_capa: AtomicPtr::new(ptr::null_mut()),
102 base: MaybeUninit::uninit(),
103 start: NonNull::new(slice.as_ptr().cast_mut()).unwrap(),
104 length: slice.len(),
105 }
106 }
107
108 #[inline]
109 pub fn with_metadata<B: Buffer<T>, M: Send + Sync + 'static>(
110 mut buffer: B,
111 metadata: M,
112 ) -> Self {
113 if is!(M, ()) {
114 match buffer.try_into_static() {
115 Ok(slice) => return Self::new_static(slice),
116 Err(b) => buffer = b,
117 }
118 match buffer.try_into_vec() {
119 Ok(vec) => return Self::new_vec(vec),
120 Err(b) => buffer = b,
121 }
122 }
123 let (arc, start, length) = Arc::new(buffer, metadata, 1);
124 unsafe { Self::from_arc(start, length, arc) }
125 }
126
127 #[inline]
128 pub fn with_borrowed_metadata<B: Buffer<T> + BorrowMetadata>(buffer: B) -> Self {
129 let (arc, start, length) = Arc::new_borrow(buffer);
130 unsafe { Self::from_arc(start, length, arc) }
131 }
132
133 fn new_vec(mut vec: Vec<T>) -> Self {
134 if vec.capacity() == 0 {
135 return Self::new_static(&[]);
136 }
137 let Some(base) = L::get_base(vec.len() == vec.capacity(), vec.as_mut_ptr()) else {
138 #[cold]
139 fn alloc<T: Send + Sync + 'static, L: Layout>(vec: Vec<T>) -> ArcSlice<T, L> {
140 let (arc, start, length) = Arc::new(vec, (), 1);
141 unsafe { ArcSlice::from_arc(start, length, arc) }
142 }
143 return alloc(vec);
144 };
145 let mut vec = ManuallyDrop::new(vec);
146 let arc_or_capa = ptr::without_provenance_mut::<()>(VEC_FLAG | (vec.capacity() << 1));
147 Self {
148 arc_or_capa: AtomicPtr::new(arc_or_capa),
149 base: MaybeUninit::new(base),
150 start: NonNull::new(vec.as_mut_ptr()).unwrap(),
151 length: vec.len(),
152 }
153 }
154
155 pub(crate) unsafe fn new_vec_with_offset(
156 start: NonNull<T>,
157 length: usize,
158 capacity: usize,
159 offset: usize,
160 ) -> Self {
161 if capacity == 0 && offset == 0 {
162 return Self::new_static(&[]);
163 }
164 let base_ptr = unsafe { start.as_ptr().sub(offset) };
165 let Some(base) = L::get_base(length == capacity, base_ptr) else {
166 #[cold]
167 fn alloc<T: Send + Sync + 'static, L: Layout>(
168 start: NonNull<T>,
169 length: usize,
170 capacity: usize,
171 offset: usize,
172 ) -> ArcSlice<T, L> {
173 let base_ptr = unsafe { start.as_ptr().sub(offset) };
174 let vec =
175 unsafe { Vec::from_raw_parts(base_ptr, offset + length, offset + capacity) };
176 let (arc, _, _) = Arc::new(vec, (), 1);
177 unsafe { ArcSlice::from_arc(start, length, arc) }
178 }
179 return alloc(start, length, capacity, offset);
180 };
181 let arc_or_capa = ptr::without_provenance_mut::<()>(VEC_FLAG | ((offset + capacity) << 1));
182 Self {
183 arc_or_capa: AtomicPtr::new(arc_or_capa),
184 base: MaybeUninit::new(base),
185 start,
186 length,
187 }
188 }
189
190 pub(crate) unsafe fn from_arc(start: NonNull<T>, length: usize, arc: Arc<T>) -> Self {
194 Self {
195 arc_or_capa: AtomicPtr::new(arc.into_ptr().as_ptr()),
196 base: MaybeUninit::uninit(),
197 start,
198 length,
199 }
200 }
201
202 #[inline]
203 pub fn from_slice(slice: &[T]) -> Self
204 where
205 T: Clone,
206 {
207 slice.to_vec().into()
208 }
209
210 #[allow(unstable_name_collisions)]
211 unsafe fn rebuild_vec(&self, capacity: NonZero<usize>) -> Vec<T> {
212 let (ptr, len) = if let Some(base) = L::base_into_ptr(unsafe { self.base.assume_init() }) {
213 let len = unsafe { self.start.sub_ptr(base) } + self.length;
214 (base.as_ptr(), len)
215 } else {
216 let offset = capacity.get() - self.length;
217 let ptr = unsafe { self.start.as_ptr().sub(offset) };
218 (ptr, capacity.get())
219 };
220 unsafe { Vec::from_raw_parts(ptr, len, capacity.get()) }
221 }
222
223 #[allow(unstable_name_collisions)]
224 unsafe fn shift_vec(&self, mut vec: Vec<T>) -> Vec<T> {
225 unsafe {
226 let offset = self.start.as_ptr().sub_ptr(vec.as_mut_ptr());
227 vec.shift_left(offset, self.length)
228 };
229 vec
230 }
231
232 #[allow(clippy::incompatible_msrv)]
233 #[inline(always)]
234 fn inner(&self, arc_or_capa: *mut ()) -> Inner<T> {
235 let capacity = arc_or_capa.addr() >> VEC_CAPA_SHIFT;
236 match NonNull::new(arc_or_capa) {
237 Some(_) if arc_or_capa.addr() & VEC_FLAG != 0 => Inner::Vec {
238 capacity: unsafe { NonZero::new_unchecked(capacity) },
239 },
240 Some(arc) => Inner::Arc(ManuallyDrop::new(unsafe { Arc::from_ptr(arc) })),
241 None => Inner::Static,
242 }
243 }
244
245 #[inline(always)]
246 fn inner_mut(&mut self) -> Inner<T> {
247 let arc_or_capa = atomic_ptr_with_mut(&mut self.arc_or_capa, |ptr| *ptr);
248 self.inner(arc_or_capa)
249 }
250
251 #[inline]
252 pub const fn len(&self) -> usize {
253 self.length
254 }
255
256 #[inline]
257 pub const fn is_empty(&self) -> bool {
258 self.len() == 0
259 }
260
261 #[inline]
262 pub const fn as_slice(&self) -> &[T] {
263 unsafe { core::slice::from_raw_parts(self.start.as_ptr(), self.len()) }
264 }
265
266 #[inline]
267 pub fn get_ref(&self, range: impl RangeBounds<usize>) -> ArcSliceRef<T, L> {
268 let (offset, len) = offset_len(self.length, range);
269 ArcSliceRef {
270 slice: &self[offset..offset + len],
271 arc_slice: self,
272 }
273 }
274
275 #[inline]
276 pub fn truncate(&mut self, len: usize) {
277 if len >= self.length {
278 return;
279 }
280 match self.inner_mut() {
281 Inner::Vec { .. } if !L::TRUNCATABLE => return unsafe { self.truncate_vec(len) },
282 Inner::Vec { .. } if mem::needs_drop::<T>() => unsafe {
283 let end = self.start.as_ptr().add(len);
284 ptr::drop_in_place(ptr::slice_from_raw_parts_mut(end, self.len() - len));
285 },
286 _ => {}
287 }
288 self.length = len;
289 }
290
291 #[cold]
292 unsafe fn truncate_vec(&mut self, len: usize) {
293 let Inner::Vec { capacity } = self.inner_mut() else {
294 unsafe { hint::unreachable_unchecked() }
295 };
296 let vec = unsafe { self.rebuild_vec(capacity) };
297 let (arc, _, _) = Arc::new(vec, (), 1);
298 atomic_ptr_with_mut(&mut self.arc_or_capa, |ptr| {
299 *ptr = arc.into_ptr().as_ptr();
300 });
301 self.length = len;
302 }
303
304 #[allow(clippy::incompatible_msrv)]
305 #[inline]
306 pub fn advance(&mut self, offset: usize) {
307 if offset > self.length {
308 panic_out_of_range();
309 }
310 self.start = unsafe { self.start.add(offset) };
311 self.length -= offset;
312 }
313
314 #[allow(clippy::incompatible_msrv)]
315 pub(crate) unsafe fn subslice_impl(&self, offset: usize, len: usize) -> Self {
316 if len == 0 {
317 return Self {
318 arc_or_capa: AtomicPtr::new(ptr::null_mut()),
319 base: MaybeUninit::uninit(),
320 start: unsafe { self.start.add(offset) },
321 length: 0,
322 };
323 }
324 let mut clone = self.clone();
325 clone.start = unsafe { self.start.add(offset) };
326 clone.length = len;
327 clone
328 }
329
330 #[inline]
331 pub fn subslice(&self, range: impl RangeBounds<usize>) -> Self {
332 let (offset, len) = offset_len(self.length, range);
333 unsafe { self.subslice_impl(offset, len) }
334 }
335
336 #[inline]
337 pub fn subslice_from_ref(&self, subset: &[T]) -> Self {
338 let (offset, len) = offset_len_subslice(self, subset);
339 unsafe { self.subslice_impl(offset, len) }
340 }
341
342 #[allow(clippy::incompatible_msrv)]
343 #[inline]
344 #[must_use = "consider `ArcSlice::truncate` if you don't need the other half"]
345 pub fn split_off(&mut self, at: usize) -> Self {
346 if at == 0 {
347 return mem::replace(self, unsafe { self.subslice_impl(0, 0) });
348 } else if at == self.length {
349 return unsafe { self.subslice_impl(at, 0) };
350 } else if at > self.length {
351 panic_out_of_range();
352 }
353 let mut clone = self.clone();
354 clone.start = unsafe { clone.start.add(at) };
355 clone.length -= at;
356 self.length = at;
357 clone
358 }
359
360 #[allow(clippy::incompatible_msrv)]
361 #[inline]
362 #[must_use = "consider `ArcSlice::advance` if you don't need the other half"]
363 pub fn split_to(&mut self, at: usize) -> Self {
364 if at == 0 {
365 return unsafe { self.subslice_impl(0, 0) };
366 } else if at == self.length {
367 return mem::replace(self, unsafe { self.subslice_impl(self.len(), 0) });
368 } else if at > self.length {
369 panic_out_of_range();
370 }
371 let mut clone = self.clone();
372 clone.length = at;
373 self.start = unsafe { self.start.add(at) };
374 self.length -= at;
375 clone
376 }
377
378 #[inline]
379 pub fn try_into_mut(mut self) -> Result<ArcSliceMut<T>, Self> {
380 let mut slice_mut = match self.inner_mut() {
381 Inner::Static => return Err(self),
382 Inner::Vec { capacity } => ArcSliceMut::new(unsafe { self.rebuild_vec(capacity) }),
383 Inner::Arc(mut arc) => match unsafe { arc.try_as_mut() } {
384 Some(s) => s,
385 None => return Err(self),
386 },
387 };
388 unsafe { slice_mut.set_start_len(self.start, self.length) };
389 mem::forget(self);
390 Ok(slice_mut)
391 }
392
393 #[inline]
394 pub fn into_vec(self) -> Vec<T>
395 where
396 T: Clone,
397 {
398 let mut this = ManuallyDrop::new(self);
399 match this.inner_mut() {
400 Inner::Static => this.as_slice().to_vec(),
401 Inner::Vec { capacity } => unsafe { this.shift_vec(this.rebuild_vec(capacity)) },
402 Inner::Arc(mut arc) => unsafe {
403 let mut vec = MaybeUninit::<Vec<T>>::uninit();
404 if !arc.take_buffer(this.length, NonNull::new(vec.as_mut_ptr()).unwrap()) {
405 let vec = this.as_slice().to_vec();
406 drop(ManuallyDrop::into_inner(arc));
407 return vec;
408 }
409 this.shift_vec(vec.assume_init())
410 },
411 }
412 }
413
414 #[inline]
415 pub fn into_cow(mut self) -> Cow<'static, [T]>
416 where
417 T: Clone,
418 {
419 match self.inner_mut() {
420 Inner::Static => unsafe {
421 mem::transmute::<&[T], &'static [T]>(self.as_slice()).into()
422 },
423 _ => self.into_vec().into(),
424 }
425 }
426
427 #[inline]
428 pub fn get_metadata<M: Any>(&self) -> Option<&M> {
429 match self.inner(self.arc_or_capa.load(Ordering::Acquire)) {
430 Inner::Arc(arc) => arc.get_metadata(),
431 _ if is!(M, ()) => Some(unit_metadata()),
432 _ => None,
433 }
434 }
435
436 #[inline]
437 pub fn downcast_buffer<B: Buffer<T>>(mut self) -> Result<B, Self> {
438 let mut buffer = MaybeUninit::<B>::uninit();
439 match self.inner_mut() {
440 Inner::Static if is!(B, &'static [T]) => unsafe {
441 buffer.as_mut_ptr().cast::<&[T]>().write(self.as_slice());
442 },
443 Inner::Vec { capacity } if is!(B, Vec<T>) => unsafe {
444 let vec_ptr = buffer.as_mut_ptr().cast::<Vec<T>>();
445 vec_ptr.write(self.shift_vec(self.rebuild_vec(capacity)));
446 },
447 Inner::Arc(mut arc) => unsafe {
448 if !arc.take_buffer(self.length, NonNull::from(&mut buffer).cast::<B>()) {
449 return Err(self);
450 }
451 if is!(B, Vec<T>) {
452 let vec_ptr = buffer.as_mut_ptr().cast::<Vec<T>>();
453 vec_ptr.write(self.shift_vec(vec_ptr.read()));
454 }
455 },
456 _ => return Err(self),
457 }
458 mem::forget(self);
459 Ok(unsafe { buffer.assume_init() })
460 }
461
462 #[inline]
463 pub fn is_unique(&self) -> bool {
464 match self.inner(self.arc_or_capa.load(Ordering::Acquire)) {
465 Inner::Static => false,
466 Inner::Vec { .. } => true,
467 Inner::Arc(arc) => arc.is_unique(),
468 }
469 }
470
471 #[inline]
472 pub fn with_layout<L2: Layout>(self) -> ArcSlice<T, L2> {
473 let mut this = ManuallyDrop::new(self);
474 let arc_or_capa = atomic_ptr_with_mut(&mut this.arc_or_capa, |ptr| *ptr);
475 match this.inner(arc_or_capa) {
476 Inner::Vec { capacity } => ArcSlice::new_vec(unsafe { this.rebuild_vec(capacity) }),
477 _ => ArcSlice {
478 arc_or_capa: arc_or_capa.into(),
479 base: MaybeUninit::uninit(),
480 start: this.start,
481 length: this.length,
482 },
483 }
484 }
485
486 #[cold]
487 unsafe fn drop_vec(&mut self) {
488 let Inner::Vec { capacity } = self.inner_mut() else {
489 unsafe { hint::unreachable_unchecked() }
490 };
491 drop(unsafe { self.rebuild_vec(capacity) });
492 }
493
494 #[cold]
495 unsafe fn clone_vec(&self, arc_or_capa: *mut ()) -> Self {
496 let Inner::Vec { capacity } = self.inner(arc_or_capa) else {
497 unsafe { hint::unreachable_unchecked() }
498 };
499 let vec = unsafe { self.rebuild_vec(capacity) };
500 let (arc, _, _) = Arc::new(vec, (), 2);
501 let arc_ptr = arc.into_ptr();
502 let arc = match self.arc_or_capa.compare_exchange(
506 arc_or_capa,
507 arc_ptr.as_ptr(),
508 Ordering::Release,
509 Ordering::Acquire,
510 ) {
511 Ok(_) => unsafe { Arc::from_ptr(arc_ptr) },
512 Err(ptr) => {
513 unsafe { Arc::<T>::from_ptr(arc_ptr).forget_vec() };
514 let arc = unsafe { Arc::from_ptr(NonNull::new(ptr).unwrap_unchecked()) };
515 (*ManuallyDrop::new(arc)).clone()
516 }
517 };
518 unsafe { Self::from_arc(self.start, self.length, arc) }
519 }
520}
521
522unsafe impl<T: Send + Sync + 'static, L: Layout> Send for ArcSlice<T, L> {}
523unsafe impl<T: Send + Sync + 'static, L: Layout> Sync for ArcSlice<T, L> {}
524
525impl<T: Send + Sync + 'static, L: Layout> Drop for ArcSlice<T, L> {
526 #[inline]
527 fn drop(&mut self) {
528 match self.inner_mut() {
529 Inner::Static => {}
530 Inner::Vec { .. } => unsafe { self.drop_vec() },
531 Inner::Arc(arc) => drop(ManuallyDrop::into_inner(arc)),
532 }
533 }
534}
535
536impl<T: Send + Sync + 'static, L: Layout> Clone for ArcSlice<T, L> {
537 #[inline]
538 fn clone(&self) -> Self {
539 let arc_or_capa = self.arc_or_capa.load(Ordering::Acquire);
540 match self.inner(arc_or_capa) {
541 Inner::Static => {}
542 Inner::Vec { .. } => return unsafe { self.clone_vec(arc_or_capa) },
543 Inner::Arc(arc) => mem::forget((*arc).clone()),
544 };
545 Self {
546 arc_or_capa: AtomicPtr::new(arc_or_capa),
547 base: MaybeUninit::uninit(),
548 start: self.start,
549 length: self.length,
550 }
551 }
552}
553
554impl<T: Send + Sync + 'static, L: Layout> Deref for ArcSlice<T, L> {
555 type Target = [T];
556
557 #[inline]
558 fn deref(&self) -> &Self::Target {
559 self.as_slice()
560 }
561}
562
563impl<T: Send + Sync + 'static, L: Layout> AsRef<[T]> for ArcSlice<T, L> {
564 #[inline]
565 fn as_ref(&self) -> &[T] {
566 self
567 }
568}
569
570impl<T: Hash + Send + Sync + 'static, L: Layout> Hash for ArcSlice<T, L> {
571 #[inline]
572 fn hash<H>(&self, state: &mut H)
573 where
574 H: Hasher,
575 {
576 self.as_slice().hash(state);
577 }
578}
579
580impl<T: Send + Sync + 'static, L: Layout> Borrow<[T]> for ArcSlice<T, L> {
581 #[inline]
582 fn borrow(&self) -> &[T] {
583 self
584 }
585}
586
587#[cfg(not(all(loom, test)))]
588impl<T: Send + Sync + 'static, L: Layout> Default for ArcSlice<T, L> {
589 #[inline]
590 fn default() -> Self {
591 Self::new_static(&[])
592 }
593}
594
595impl<T: fmt::Debug + Send + Sync + 'static, L: Layout> fmt::Debug for ArcSlice<T, L> {
596 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
597 debug_slice(self, f)
598 }
599}
600
601impl<L: Layout> fmt::LowerHex for ArcSlice<u8, L> {
602 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
603 for &b in self.as_slice() {
604 write!(f, "{:02x}", b)?;
605 }
606 Ok(())
607 }
608}
609
610impl<L: Layout> fmt::UpperHex for ArcSlice<u8, L> {
611 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
612 for &b in self.as_slice() {
613 write!(f, "{:02X}", b)?;
614 }
615 Ok(())
616 }
617}
618
619impl<T: PartialEq + Send + Sync + 'static, L: Layout> PartialEq for ArcSlice<T, L> {
620 fn eq(&self, other: &ArcSlice<T, L>) -> bool {
621 self.as_slice() == other.as_slice()
622 }
623}
624
625impl<T: PartialEq + Send + Sync + 'static, L: Layout> Eq for ArcSlice<T, L> {}
626
627impl<T: PartialOrd + Send + Sync + 'static, L: Layout> PartialOrd for ArcSlice<T, L> {
628 fn partial_cmp(&self, other: &ArcSlice<T, L>) -> Option<cmp::Ordering> {
629 self.as_slice().partial_cmp(other.as_slice())
630 }
631}
632
633impl<T: Ord + Send + Sync + 'static, L: Layout> Ord for ArcSlice<T, L> {
634 fn cmp(&self, other: &ArcSlice<T, L>) -> cmp::Ordering {
635 self.as_slice().cmp(other.as_slice())
636 }
637}
638
639impl<T: PartialEq + Send + Sync + 'static, L: Layout> PartialEq<[T]> for ArcSlice<T, L> {
640 fn eq(&self, other: &[T]) -> bool {
641 self.as_slice() == other
642 }
643}
644
645impl<T: PartialEq + Send + Sync + 'static, L: Layout> PartialEq<ArcSlice<T, L>> for [T] {
646 fn eq(&self, other: &ArcSlice<T, L>) -> bool {
647 *other == *self
648 }
649}
650
651impl<T: PartialEq + Send + Sync + 'static, L: Layout, const N: usize> PartialEq<[T; N]>
652 for ArcSlice<T, L>
653{
654 fn eq(&self, other: &[T; N]) -> bool {
655 self.as_slice() == other
656 }
657}
658
659impl<T: PartialEq + Send + Sync + 'static, L: Layout, const N: usize> PartialEq<ArcSlice<T, L>>
660 for [T; N]
661{
662 fn eq(&self, other: &ArcSlice<T, L>) -> bool {
663 *other == *self
664 }
665}
666
667impl<T: PartialEq + Send + Sync + 'static, L: Layout> PartialEq<Vec<T>> for ArcSlice<T, L> {
668 fn eq(&self, other: &Vec<T>) -> bool {
669 *self == other[..]
670 }
671}
672
673impl<T: PartialEq + Send + Sync + 'static, L: Layout> PartialEq<ArcSlice<T, L>> for Vec<T> {
674 fn eq(&self, other: &ArcSlice<T, L>) -> bool {
675 *other == *self
676 }
677}
678
679impl<T: PartialEq + Send + Sync + 'static, L: Layout> PartialEq<ArcSlice<T, L>> for &[T] {
680 fn eq(&self, other: &ArcSlice<T, L>) -> bool {
681 *other == *self
682 }
683}
684
685impl<'a, T: PartialEq + Send + Sync + 'static, L: Layout, O: ?Sized> PartialEq<&'a O>
686 for ArcSlice<T, L>
687where
688 ArcSlice<T, L>: PartialEq<O>,
689{
690 fn eq(&self, other: &&'a O) -> bool {
691 *self == **other
692 }
693}
694
695impl<T: Send + Sync + 'static> From<ArcSlice<T, Compact>> for ArcSlice<T, Plain> {
696 fn from(value: ArcSlice<T, Compact>) -> Self {
697 value.with_layout()
698 }
699}
700
701impl<T: Send + Sync + 'static> From<ArcSlice<T, Plain>> for ArcSlice<T, Compact> {
702 fn from(value: ArcSlice<T, Plain>) -> Self {
703 value.with_layout()
704 }
705}
706
707macro_rules! std_impl {
708 ($($(@$N:ident)? $ty:ty $(: $bound:path)?),*) => {$(
709 impl<T: $($bound +)? Send + Sync + 'static, L: Layout, $(const $N: usize,)?> From<$ty> for ArcSlice<T, L> {
710
711 #[inline]
712 fn from(value: $ty) -> Self {
713 Self::new(value)
714 }
715 }
716 )*};
717}
718std_impl!(&'static [T], @N &'static [T; N], @N [T; N], Box<[T]>, Cow<'static, [T]>: Clone);
719
720impl<T: Send + Sync + 'static, L: Layout> From<Vec<T>> for ArcSlice<T, L> {
722 fn from(value: Vec<T>) -> Self {
723 Self::new_vec(value)
724 }
725}
726
727impl<T: Clone + Send + Sync + 'static, L: Layout> From<ArcSlice<T, L>> for Vec<T> {
728 #[inline]
729 fn from(value: ArcSlice<T, L>) -> Self {
730 value.into_vec()
731 }
732}
733
734impl<T: Clone + Send + Sync + 'static, L: Layout> From<ArcSlice<T, L>> for Cow<'static, [T]> {
735 #[inline]
736 fn from(value: ArcSlice<T, L>) -> Self {
737 value.into_cow()
738 }
739}
740
741#[derive(Clone, Copy)]
742pub struct ArcSliceRef<'a, T: Send + Sync + 'static, L: Layout = Compact> {
743 slice: &'a [T],
744 arc_slice: &'a ArcSlice<T, L>,
745}
746
747impl<T: Send + Sync + 'static, L: Layout> Deref for ArcSliceRef<'_, T, L> {
748 type Target = [T];
749
750 #[inline]
751 fn deref(&self) -> &Self::Target {
752 self.slice
753 }
754}
755
756impl<T: fmt::Debug + Send + Sync + 'static, L: Layout> fmt::Debug for ArcSliceRef<'_, T, L> {
757 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
758 (**self).fmt(f)
759 }
760}
761
762impl<T: Send + Sync + 'static, L: Layout> ArcSliceRef<'_, T, L> {
763 #[inline]
764 pub fn into_arc(self) -> ArcSlice<T, L> {
765 let (offset, len) = unsafe { offset_len_subslice_unchecked(self.arc_slice, self.slice) };
766 unsafe { self.arc_slice.subslice_impl(offset, len) }
767 }
768}