1use alloc::vec::Vec;
2use core::{
3 any::Any,
4 borrow::{Borrow, BorrowMut},
5 cmp, fmt, mem,
6 mem::{ManuallyDrop, MaybeUninit},
7 ops::{Deref, DerefMut},
8 ptr::NonNull,
9 slice,
10};
11
12#[allow(unused_imports)]
13use crate::msrv::{NonNullExt, StrictProvenance};
14use crate::{
15 arc::{unit_metadata, Arc},
16 buffer::{BorrowMetadata, BufferMut, BufferMutExt},
17 error::TryReserveError,
18 layout::Layout,
19 macros::is,
20 msrv::{ptr, NonZero, SubPtrExt},
21 utils::{debug_slice, panic_out_of_range},
22 ArcSlice,
23};
24
25pub struct ArcSliceMut<T: Send + Sync + 'static> {
26 start: NonNull<T>,
27 length: usize,
28 capacity: usize,
29 arc_or_offset: NonNull<()>,
30}
31
32const VEC_FLAG: usize = 0b01;
33const VEC_OFFSET_SHIFT: usize = 1;
34
35enum Inner<T> {
36 Vec {
37 offset: usize,
38 },
39 Arc {
40 arc: ManuallyDrop<Arc<T>>,
41 is_tail: bool,
42 },
43}
44
45impl<T: Send + Sync + 'static> ArcSliceMut<T> {
46 const TAIL_FLAG: usize = if mem::needs_drop::<T>() { 0b10 } else { 0 };
47
48 #[inline]
49 pub fn new<B: BufferMut<T>>(buffer: B) -> Self {
50 Self::with_metadata(buffer, ())
51 }
52
53 #[inline]
54 pub fn with_metadata<B: BufferMut<T>, M: Send + Sync + 'static>(
55 mut buffer: B,
56 metadata: M,
57 ) -> Self {
58 if is!(M, ()) {
59 match buffer.try_into_vec() {
60 Ok(vec) => return Self::new_vec(vec),
61 Err(b) => buffer = b,
62 }
63 }
64 let (arc, start, length, capacity) = Arc::new_mut(buffer, metadata, 1);
65 Self::from_arc(start, length, capacity, arc)
66 }
67
68 #[inline]
74 pub unsafe fn with_borrowed_metadata<B: BufferMut<T> + BorrowMetadata>(buffer: B) -> Self {
75 let (arc, start, length, capacity) = Arc::new_borrow_mut(buffer);
76 Self::from_arc(start, length, capacity, arc)
77 }
78
79 #[allow(clippy::incompatible_msrv)]
80 fn set_tail_flag(&mut self) {
81 if self.length < self.capacity {
82 self.arc_or_offset = self
83 .arc_or_offset
84 .map_addr(|addr| NonZero::new(addr.get() | Self::TAIL_FLAG).unwrap().into());
85 }
86 }
87
88 fn spare_capacity(&self) -> usize {
89 self.capacity - self.length
90 }
91
92 fn update_arc_spare_capacity(&self, arc: &Arc<T>, is_tail: bool) {
93 if is_tail {
94 unsafe { arc.set_spare_capacity(self.spare_capacity()) };
95 }
96 }
97
98 fn new_vec(vec: Vec<T>) -> Self {
99 let mut vec = ManuallyDrop::new(vec);
100 let arc_of_offset = ptr::without_provenance_mut::<()>(VEC_FLAG);
101 Self {
102 start: NonNull::new(vec.as_mut_ptr()).unwrap(),
103 length: vec.len(),
104 capacity: vec.capacity(),
105 arc_or_offset: NonNull::new(arc_of_offset).unwrap(),
106 }
107 }
108
109 unsafe fn rebuild_vec(&self, offset: usize) -> Vec<T> {
110 unsafe {
111 Vec::from_raw_parts(
112 self.start.as_ptr().sub(offset),
113 offset + self.length,
114 offset + self.capacity,
115 )
116 }
117 }
118
119 #[allow(clippy::incompatible_msrv)]
123 pub(crate) unsafe fn set_start_len(&mut self, start: NonNull<T>, len: usize) {
124 self.start = self.start.with_addr(start.addr());
125 self.length = len;
126 }
127
128 pub(crate) fn from_arc(start: NonNull<T>, length: usize, capacity: usize, arc: Arc<T>) -> Self {
129 let mut this = Self {
130 start,
131 length,
132 capacity,
133 arc_or_offset: arc.into_ptr(),
134 };
135 this.set_tail_flag();
136 this
137 }
138
139 #[allow(clippy::incompatible_msrv)]
140 #[inline(always)]
141 fn inner(&self) -> Inner<T> {
142 let arc_or_offset = self.arc_or_offset.addr().get();
143 if arc_or_offset & VEC_FLAG != 0 {
144 Inner::Vec {
145 offset: arc_or_offset >> VEC_OFFSET_SHIFT,
146 }
147 } else {
148 let masked_ptr = self.arc_or_offset.map_addr(|addr| {
149 unsafe { NonZero::new_unchecked(addr.get() & !Self::TAIL_FLAG) }.into()
150 });
151 Inner::Arc {
152 arc: ManuallyDrop::new(unsafe { Arc::from_ptr(masked_ptr) }),
153 is_tail: arc_or_offset & Self::TAIL_FLAG != 0,
154 }
155 }
156 }
157
158 #[inline]
159 pub const fn len(&self) -> usize {
160 self.length
161 }
162
163 #[inline]
164 pub const fn is_empty(&self) -> bool {
165 self.len() == 0
166 }
167
168 #[inline]
169 pub const fn as_slice(&self) -> &[T] {
170 unsafe { slice::from_raw_parts(self.start.as_ptr(), self.length) }
171 }
172
173 #[inline]
174 pub fn as_mut_slice(&mut self) -> &mut [T] {
175 unsafe { slice::from_raw_parts_mut(self.start.as_ptr(), self.length) }
176 }
177
178 #[inline]
179 pub fn capacity(&self) -> usize {
180 self.capacity
181 }
182
183 #[inline]
187 pub unsafe fn spare_capacity_mut(&mut self) -> &mut [MaybeUninit<T>] {
188 unsafe {
189 slice::from_raw_parts_mut(
190 self.start.as_ptr().add(self.length).cast(),
191 self.spare_capacity(),
192 )
193 }
194 }
195
196 #[inline]
200 pub unsafe fn set_len(&mut self, new_len: usize) {
201 self.length = new_len;
202 }
203
204 fn set_offset(&mut self, offset: usize) {
205 let arc_or_offset =
206 ptr::without_provenance_mut::<()>(VEC_FLAG | (offset << VEC_OFFSET_SHIFT));
207 self.arc_or_offset = NonNull::new(arc_or_offset).unwrap();
208 }
209
210 #[allow(clippy::incompatible_msrv)]
211 fn remove_tail_flag(&mut self) {
212 self.arc_or_offset = self.arc_or_offset.map_addr(|addr| {
213 unsafe { NonZero::new_unchecked(addr.get() & !Self::TAIL_FLAG) }.into()
214 });
215 }
216
217 #[inline]
218 pub fn truncate(&mut self, len: usize) {
219 if len >= self.length {
220 return;
221 }
222 if mem::needs_drop::<T>() {
223 match self.inner() {
224 Inner::Vec { .. } => unsafe {
225 ptr::drop_in_place(ptr::slice_from_raw_parts_mut(
226 self.start.as_ptr().add(len),
227 self.length - len,
228 ));
229 },
230 Inner::Arc { is_tail, .. } => {
231 if is_tail {
232 self.remove_tail_flag();
233 }
234 self.capacity = len;
235 }
236 }
237 }
238 self.length = len;
239 }
240
241 #[allow(clippy::incompatible_msrv)]
242 #[inline]
243 pub fn advance(&mut self, offset: usize) {
244 if offset > self.length {
245 panic_out_of_range();
246 }
247 if let Inner::Vec { offset: prev_off } = self.inner() {
248 self.set_offset(prev_off + offset);
249 }
250 self.start = unsafe { self.start.add(offset) };
251 self.length -= offset;
252 self.capacity -= offset;
253 }
254
255 #[cold]
256 unsafe fn clone_vec(&mut self, offset: usize) -> Self {
257 let vec = unsafe { self.rebuild_vec(offset) };
258 if vec.capacity() != 0 {
259 let (arc, _, _, _) = Arc::new_mut(vec, (), 2);
260 self.arc_or_offset = arc.into_ptr();
261 self.set_tail_flag();
262 }
263 unsafe { ptr::read(self) }
264 }
265
266 unsafe fn clone(&mut self) -> Self {
267 match self.inner() {
268 Inner::Vec { offset } => return unsafe { self.clone_vec(offset) },
269 Inner::Arc { arc, .. } => {
270 let _ = arc.clone();
271 }
272 };
273 unsafe { ptr::read(self) }
274 }
275
276 #[allow(clippy::incompatible_msrv)]
277 #[inline]
278 #[must_use = "consider `ArcSliceMut::truncate` if you don't need the other half"]
279 pub fn split_off(&mut self, at: usize) -> Self {
280 if at > self.capacity {
281 panic_out_of_range();
282 }
283 let mut clone = unsafe { self.clone() };
284 clone.start = unsafe { clone.start.add(at) };
285 clone.capacity -= at;
286 self.remove_tail_flag();
287 self.capacity = at;
288 if at > self.length {
289 clone.length = 0;
290 } else {
291 self.length = at;
292 clone.length -= at;
293 }
294 clone
295 }
296
297 #[allow(clippy::incompatible_msrv)]
298 #[inline]
299 #[must_use = "consider `ArcSliceMut::advance` if you don't need the other half"]
300 pub fn split_to(&mut self, at: usize) -> Self {
301 if at > self.length {
302 panic_out_of_range();
303 }
304 let mut clone = unsafe { self.clone() };
305 clone.remove_tail_flag();
306 clone.capacity = at;
307 clone.length = at;
308 self.start = unsafe { self.start.add(at) };
309 self.capacity -= at;
310 self.length -= at;
311 clone
312 }
313
314 #[allow(clippy::incompatible_msrv)]
315 #[inline]
316 pub fn try_unsplit(&mut self, other: ArcSliceMut<T>) -> Result<(), ArcSliceMut<T>> {
317 let end = unsafe { self.start.add(self.length) };
318 let mut other_arc_or_offset = other.arc_or_offset.addr().get();
319 if mem::needs_drop::<T>() {
320 other_arc_or_offset &= !Self::TAIL_FLAG;
321 };
322 if end == other.start
323 && matches!(self.inner(), Inner::Arc { .. })
324 && self.arc_or_offset.addr().get() == other_arc_or_offset
325 {
326 debug_assert_eq!(self.length, self.capacity);
327 self.arc_or_offset = other.arc_or_offset;
329 self.length += other.length;
330 self.capacity += other.capacity;
331 return Ok(());
332 }
333 Err(other)
334 }
335
336 #[inline]
337 pub fn freeze<L: Layout>(self) -> ArcSlice<T, L> {
338 let this = ManuallyDrop::new(self);
339 match this.inner() {
340 Inner::Vec { offset, .. } => unsafe {
341 ArcSlice::new_vec_with_offset(this.start, this.length, this.capacity, offset)
342 },
343 Inner::Arc { arc, is_tail } => {
344 this.update_arc_spare_capacity(&arc, is_tail);
345 unsafe { ArcSlice::from_arc(this.start, this.len(), ManuallyDrop::into_inner(arc)) }
346 }
347 }
348 }
349
350 #[allow(unstable_name_collisions)]
351 unsafe fn shift_vec(&self, mut vec: Vec<T>) -> Vec<T> {
352 unsafe {
353 let offset = self.start.as_ptr().sub_ptr(vec.as_mut_ptr());
354 vec.shift_left(offset, self.length)
355 };
356 vec
357 }
358
359 #[inline]
360 pub fn into_vec(self) -> Vec<T>
361 where
362 T: Clone,
363 {
364 let this = ManuallyDrop::new(self);
365 match this.inner() {
366 Inner::Vec { offset } => unsafe { this.shift_vec(this.rebuild_vec(offset)) },
367 Inner::Arc { mut arc, is_tail } => unsafe {
368 this.update_arc_spare_capacity(&arc, is_tail);
369 let mut vec = MaybeUninit::<Vec<T>>::uninit();
370 if !arc.take_buffer(this.length, NonNull::new(vec.as_mut_ptr()).unwrap()) {
371 let vec = this.as_slice().to_vec();
372 drop(ManuallyDrop::into_inner(arc));
373 return vec;
374 }
375 this.shift_vec(vec.assume_init())
376 },
377 }
378 }
379
380 #[inline]
381 pub fn get_metadata<M: Any>(&self) -> Option<&M> {
382 match self.inner() {
383 Inner::Arc { arc, .. } => arc.get_metadata(),
384 _ if is!(M, ()) => Some(unit_metadata()),
385 _ => None,
386 }
387 }
388
389 #[inline]
390 pub fn downcast_buffer<B: BufferMut<T>>(self) -> Result<B, Self> {
391 let mut buffer = MaybeUninit::<B>::uninit();
392 match self.inner() {
393 Inner::Vec { offset } if is!(B, Vec<T>) => unsafe {
394 let vec_ptr = buffer.as_mut_ptr().cast::<Vec<T>>();
395 vec_ptr.write(self.shift_vec(self.rebuild_vec(offset)));
396 },
397 Inner::Arc { mut arc, is_tail } => unsafe {
398 self.update_arc_spare_capacity(&arc, is_tail);
399 if !arc.take_buffer(self.length, NonNull::from(&mut buffer).cast::<B>()) {
400 return Err(self);
401 }
402 if is!(B, Vec<T>) {
403 let vec_ptr = buffer.as_mut_ptr().cast::<Vec<T>>();
404 vec_ptr.write(self.shift_vec(vec_ptr.read()));
405 }
406 },
407 _ => return Err(self),
408 }
409 mem::forget(self);
410 Ok(unsafe { buffer.assume_init() })
411 }
412
413 #[cold]
414 pub fn try_reserve_inner(
415 &mut self,
416 additional: usize,
417 allocate: bool,
418 ) -> Result<(), TryReserveError> {
419 match self.inner() {
420 Inner::Vec { offset } => {
421 let mut vec = unsafe { ManuallyDrop::new(self.rebuild_vec(offset)) };
422 if unsafe { vec.try_reclaim(offset, self.length, additional) } {
425 self.set_offset(0);
426 self.start = NonNull::new(vec.as_mut_ptr()).unwrap();
427 self.capacity = vec.capacity();
428 return Ok(());
429 } else if !allocate {
430 return Err(TryReserveError::Unsupported);
431 }
432 vec.reserve(additional);
433 let new_start = unsafe { vec.as_mut_ptr().add(offset) };
434 self.start = NonNull::new(new_start).unwrap();
435 self.capacity = vec.capacity() - offset;
436 }
437 Inner::Arc { mut arc, is_tail } => {
438 self.update_arc_spare_capacity(&arc, is_tail);
439 let (res, new_start) =
440 unsafe { arc.try_reserve(additional, allocate, self.start, self.length) };
441 self.start = new_start;
442 match res {
443 Ok(capa) => self.capacity = capa,
444 Err(err) => return Err(err),
445 }
446 }
447 }
448 Ok(())
449 }
450
451 #[inline]
452 pub fn try_reclaim(&mut self, additional: usize) -> bool {
453 if additional < self.spare_capacity() {
454 return true;
455 }
456 self.try_reserve_inner(additional, false).is_ok()
457 }
458
459 #[inline]
460 pub fn try_reserve(&mut self, additional: usize) -> Result<(), TryReserveError> {
461 if additional <= self.spare_capacity() {
462 return Ok(());
463 }
464 self.try_reserve_inner(additional, true)
465 }
466
467 #[inline]
468 pub fn try_extend_from_slice(&mut self, slice: &[T]) -> Result<(), TryReserveError> {
469 self.try_reserve(slice.len())?;
470 unsafe {
471 let end = self.spare_capacity_mut().as_mut_ptr().cast();
472 ptr::copy_nonoverlapping(slice.as_ptr(), end, slice.len());
473 self.set_len(self.length + slice.len());
474 }
475 Ok(())
476 }
477}
478
479unsafe impl<T: Send + Sync + 'static> Send for ArcSliceMut<T> {}
480unsafe impl<T: Send + Sync + 'static> Sync for ArcSliceMut<T> {}
481
482impl<T: Send + Sync + 'static> Drop for ArcSliceMut<T> {
483 #[inline]
484 fn drop(&mut self) {
485 match self.inner() {
486 Inner::Vec { offset } => drop(unsafe { self.rebuild_vec(offset) }),
487 Inner::Arc { arc, is_tail } => {
488 self.update_arc_spare_capacity(&arc, is_tail);
489 drop(ManuallyDrop::into_inner(arc));
490 }
491 }
492 }
493}
494
495impl<T: Send + Sync + 'static> Deref for ArcSliceMut<T> {
496 type Target = [T];
497
498 #[inline]
499 fn deref(&self) -> &Self::Target {
500 self.as_slice()
501 }
502}
503
504impl<T: Send + Sync + 'static> DerefMut for ArcSliceMut<T> {
505 #[inline]
506 fn deref_mut(&mut self) -> &mut Self::Target {
507 self.as_mut_slice()
508 }
509}
510
511impl<T: Send + Sync + 'static> AsRef<[T]> for ArcSliceMut<T> {
512 #[inline]
513 fn as_ref(&self) -> &[T] {
514 self
515 }
516}
517
518impl<T: Send + Sync + 'static> AsMut<[T]> for ArcSliceMut<T> {
519 #[inline]
520 fn as_mut(&mut self) -> &mut [T] {
521 self
522 }
523}
524
525impl<T: Send + Sync + 'static> Borrow<[T]> for ArcSliceMut<T> {
526 #[inline]
527 fn borrow(&self) -> &[T] {
528 self
529 }
530}
531
532impl<T: Send + Sync + 'static> BorrowMut<[T]> for ArcSliceMut<T> {
533 #[inline]
534 fn borrow_mut(&mut self) -> &mut [T] {
535 self
536 }
537}
538
539impl<T: Send + Sync + 'static> Default for ArcSliceMut<T> {
540 #[inline]
541 fn default() -> Self {
542 Self::new_vec(Vec::new())
543 }
544}
545
546impl<T: fmt::Debug + Send + Sync + 'static> fmt::Debug for ArcSliceMut<T> {
547 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
548 debug_slice(self, f)
549 }
550}
551
552impl fmt::LowerHex for ArcSliceMut<u8> {
553 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
554 for &b in self.as_slice() {
555 write!(f, "{:02x}", b)?;
556 }
557 Ok(())
558 }
559}
560
561impl fmt::UpperHex for ArcSliceMut<u8> {
562 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
563 for &b in self.as_slice() {
564 write!(f, "{:02X}", b)?;
565 }
566 Ok(())
567 }
568}
569
570impl<T: PartialEq + Send + Sync + 'static> PartialEq for ArcSliceMut<T> {
571 fn eq(&self, other: &ArcSliceMut<T>) -> bool {
572 self.as_slice() == other.as_slice()
573 }
574}
575
576impl<T: PartialEq + Send + Sync + 'static> Eq for ArcSliceMut<T> {}
577
578impl<T: PartialOrd + Send + Sync + 'static> PartialOrd for ArcSliceMut<T> {
579 fn partial_cmp(&self, other: &ArcSliceMut<T>) -> Option<cmp::Ordering> {
580 self.as_slice().partial_cmp(other.as_slice())
581 }
582}
583
584impl<T: Ord + Send + Sync + 'static> Ord for ArcSliceMut<T> {
585 fn cmp(&self, other: &ArcSliceMut<T>) -> cmp::Ordering {
586 self.as_slice().cmp(other.as_slice())
587 }
588}
589
590impl<T: PartialEq + Send + Sync + 'static> PartialEq<[T]> for ArcSliceMut<T> {
591 fn eq(&self, other: &[T]) -> bool {
592 self.as_slice() == other
593 }
594}
595
596impl<T: PartialEq + Send + Sync + 'static> PartialEq<ArcSliceMut<T>> for [T] {
597 fn eq(&self, other: &ArcSliceMut<T>) -> bool {
598 *other == *self
599 }
600}
601
602impl<T: PartialEq + Send + Sync + 'static, const N: usize> PartialEq<[T; N]> for ArcSliceMut<T> {
603 fn eq(&self, other: &[T; N]) -> bool {
604 self.as_slice() == other
605 }
606}
607
608impl<T: PartialEq + Send + Sync + 'static, const N: usize> PartialEq<ArcSliceMut<T>> for [T; N] {
609 fn eq(&self, other: &ArcSliceMut<T>) -> bool {
610 *other == *self
611 }
612}
613
614impl<T: PartialEq + Send + Sync + 'static> PartialEq<Vec<T>> for ArcSliceMut<T> {
615 fn eq(&self, other: &Vec<T>) -> bool {
616 *self == other[..]
617 }
618}
619
620impl<T: PartialEq + Send + Sync + 'static> PartialEq<ArcSliceMut<T>> for Vec<T> {
621 fn eq(&self, other: &ArcSliceMut<T>) -> bool {
622 *other == *self
623 }
624}
625
626impl<T: PartialEq + Send + Sync + 'static> PartialEq<ArcSliceMut<T>> for &[T] {
627 fn eq(&self, other: &ArcSliceMut<T>) -> bool {
628 *other == *self
629 }
630}
631
632impl<'a, T: PartialEq + Send + Sync + 'static, O: ?Sized> PartialEq<&'a O> for ArcSliceMut<T>
633where
634 ArcSliceMut<T>: PartialEq<O>,
635{
636 fn eq(&self, other: &&'a O) -> bool {
637 *self == **other
638 }
639}
640
641impl<T: Send + Sync + 'static> From<Vec<T>> for ArcSliceMut<T> {
642 #[inline]
643 fn from(value: Vec<T>) -> Self {
644 Self::new_vec(value)
645 }
646}
647
648impl<T: Send + Sync + 'static, const N: usize> From<[T; N]> for ArcSliceMut<T> {
649 #[inline]
650 fn from(value: [T; N]) -> Self {
651 Self::new(value)
652 }
653}
654
655impl<T: Clone + Send + Sync + 'static> From<ArcSliceMut<T>> for Vec<T> {
656 #[inline]
657 fn from(value: ArcSliceMut<T>) -> Self {
658 value.into_vec()
659 }
660}
661
662impl fmt::Write for ArcSliceMut<u8> {
663 #[inline]
664 fn write_str(&mut self, s: &str) -> fmt::Result {
665 if self.spare_capacity() >= s.len() {
666 self.try_extend_from_slice(s.as_bytes()).unwrap();
667 Ok(())
668 } else {
669 Err(fmt::Error)
670 }
671 }
672
673 #[inline]
674 fn write_fmt(&mut self, args: fmt::Arguments<'_>) -> fmt::Result {
675 fmt::write(self, args)
676 }
677}