1use core::fmt::Debug;
2use core::ops::{Deref, DerefMut, Index, IndexMut};
3use core::ptr::NonNull;
4use core::{mem, ptr};
5use core::ops::RangeBounds;
6
7use crate::alloc::{AllocError, Allocator, Global};
8use crate::drain::Drain;
9use crate::raw::{
10 self, buffer_layout, AtomicRefCount, BufferSize, Header, HeaderBuffer, RefCount, VecHeader, move_data,
11};
12use crate::shared::{AtomicSharedVector, SharedVector};
13use crate::splice::Splice;
14use crate::{grow_amortized, DefaultRefCount};
15
16pub struct RawVector<T> {
38 pub(crate) data: NonNull<T>,
39 pub(crate) header: VecHeader,
40}
41
42impl<T> RawVector<T> {
43 pub fn new() -> Self {
45 RawVector { data: NonNull::dangling(), header: VecHeader { len: 0, cap: 0 } }
46 }
47
48 pub fn try_with_capacity<A: Allocator>(allocator: &A, cap: usize) -> Result<RawVector<T>, AllocError> {
52 if cap == 0 {
53 return Ok(RawVector::new());
54 }
55
56 unsafe {
57 let (base_ptr, cap) = raw::allocate_header_buffer::<T, A>(cap, allocator)?;
58 let data = NonNull::new_unchecked(raw::data_ptr::<raw::Header<DefaultRefCount, A>, T>(
59 base_ptr.cast(),
60 ));
61 Ok(RawVector {
62 data,
63 header: VecHeader { cap: cap as BufferSize, len: 0 },
64 })
65 }
66 }
67
68 pub fn try_from_slice<A: Allocator>(allocator: &A, data: &[T]) -> Result<Self, AllocError>
69 where
70 T: Clone,
71 {
72 let mut v = Self::try_with_capacity(allocator, data.len())?;
73 unsafe {
74 v.extend_from_slice(allocator, data);
75 }
76
77 Ok(v)
78 }
79
80 pub fn try_from_elem<A: Allocator>(allocator: &A, elem: T, n: usize) -> Result<Self, AllocError>
82 where
83 T: Clone,
84 {
85 if n == 0 {
86 return Ok(Self::new());
87 }
88
89 let mut v = Self::try_with_capacity(allocator, n)?;
90 unsafe {
91 for _ in 0..(n - 1) {
92 v.push(allocator, elem.clone())
93 }
94
95 v.push(allocator, elem);
96 }
97
98 Ok(v)
99 }
100
101 pub unsafe fn deallocate<A: Allocator>(&mut self, allocator: &A) {
109 if self.header.cap == 0 {
110 return;
111 }
112
113 self.clear();
114
115 self.deallocate_buffer(allocator);
116
117 self.data = NonNull::dangling();
118 self.header.cap = 0;
119 self.header.len = 0;
120 }
121
122 #[inline]
123 pub fn is_empty(&self) -> bool {
125 self.header.len == 0
126 }
127
128 #[inline]
129 pub fn len(&self) -> usize {
131 self.header.len as usize
132 }
133
134 #[inline]
135 pub fn capacity(&self) -> usize {
137 self.header.cap as usize
138 }
139
140 #[inline]
142 pub fn remaining_capacity(&self) -> usize {
143 (self.header.cap - self.header.len) as usize
144 }
145
146 #[inline]
147 fn data_ptr(&self) -> *mut T {
148 self.data.as_ptr()
149 }
150
151 #[inline]
152 pub fn as_slice(&self) -> &[T] {
153 unsafe { core::slice::from_raw_parts(self.data_ptr(), self.len()) }
154 }
155
156 #[inline]
157 pub fn as_mut_slice(&mut self) -> &mut [T] {
158 unsafe { core::slice::from_raw_parts_mut(self.data_ptr(), self.len()) }
159 }
160
161 pub fn clear(&mut self) {
163 unsafe {
164 raw::clear(self.data_ptr(), &mut self.header)
165 }
166 }
167
168 unsafe fn base_ptr<A: Allocator>(&self, _allocator: &A) -> NonNull<u8> {
169 debug_assert!(self.header.cap > 0);
170 raw::header_from_data_ptr::<Header<DefaultRefCount, A>, T>(self.data).cast()
171 }
172
173 #[inline]
183 pub unsafe fn push<A: Allocator>(&mut self, allocator: &A, val: T) {
184 if self.header.len == self.header.cap {
185 self.try_realloc_additional(allocator, 1).unwrap();
186 }
187
188 raw::push_assuming_capacity(self.data_ptr(), &mut self.header, val);
189 }
190
191 #[inline]
197 pub fn push_within_capacity(&mut self, val: T) -> Result<(), T> {
198 if self.header.len == self.header.cap {
199 return Err(val);
200 }
201
202 unsafe {
203 let dst = self.data_ptr().add(self.header.len as usize);
204 self.header.len += 1;
205 ptr::write(dst, val);
206 }
207
208 Ok(())
209 }
210
211 #[inline]
213 pub fn pop(&mut self) -> Option<T> {
214 unsafe {
215 raw::pop(self.data_ptr(), &mut self.header)
216 }
217 }
218
219 pub fn remove(&mut self, index: usize) -> T {
227 #[cold]
228 #[inline(never)]
229 #[track_caller]
230 fn assert_failed(index: usize, len: usize) -> ! {
231 panic!("removal index (is {index}) should be < len (is {len})");
232 }
233
234 let len = self.len();
235 if index >= len {
236 assert_failed(index, len);
237 }
238 unsafe {
239 let ret;
241 {
242 let ptr = self.as_mut_ptr().add(index);
244 ret = ptr::read(ptr);
247
248 ptr::copy(ptr.add(1), ptr, len - index - 1);
250 }
251 self.header.len = len as u32 - 1;
252 ret
253 }
254 }
255
256 #[inline]
264 pub fn swap_remove(&mut self, idx: usize) -> T {
265 let len = self.len();
266 assert!(idx < len);
267
268 unsafe {
269 let ptr = self.data_ptr().add(idx);
270 let item = ptr::read(ptr);
271
272 let last_idx = len - 1;
273 if idx != last_idx {
274 let last_ptr = self.data_ptr().add(last_idx);
275 ptr::write(ptr, ptr::read(last_ptr));
276 }
277
278 self.header.len -= 1;
279
280 item
281 }
282 }
283
284 pub unsafe fn insert<A: Allocator>(&mut self, allocator: &A, index: usize, element: T) {
291 #[cold]
292 #[inline(never)]
293 fn assert_failed(index: usize, len: usize) -> ! {
294 panic!("insertion index (is {index}) should be <= len (is {len})");
295 }
296
297 unsafe {
298 if self.header.len == self.header.cap {
300 self.try_reserve(allocator, 1).unwrap();
301 }
302
303 let len = self.len();
304
305 {
308 let p = self.as_mut_ptr().add(index);
309 if index < len {
310 ptr::copy(p, p.add(1), len - index);
313 } else if index == len {
314 } else {
316 assert_failed(index, len);
317 }
318 ptr::write(p, element);
321 }
322 self.header.len += 1;
323 }
324 }
325
326 pub unsafe fn extend_from_slice<A: Allocator>(&mut self, allocator: &A, slice: &[T])
332 where
333 T: Clone,
334 {
335 self.try_reserve(allocator, slice.len()).unwrap();
336 unsafe {
337 raw::extend_from_slice_assuming_capacity(self.data_ptr(), &mut self.header, slice);
338 }
339 }
340
341 pub unsafe fn append<A: Allocator>(&mut self, allocator: &A, other: &mut Self)
347 where
348 T: Clone,
349 {
350 if other.is_empty() {
351 return;
352 }
353
354 self.try_reserve(allocator, other.len()).unwrap();
355
356 unsafe {
357 move_data(other.data_ptr(), &mut other.header, self.data_ptr(), &mut self.header);
358 }
359 }
360
361 pub unsafe fn extend<A: Allocator>(&mut self, allocator: &A, data: impl IntoIterator<Item = T>) {
367 let mut iter = data.into_iter();
368 let (min, max) = iter.size_hint();
369 self.try_reserve(allocator, max.unwrap_or(min)).unwrap();
370 unsafe {
371 self.extend_within_capacity(&mut iter);
372
373 for item in iter {
374 self.push(allocator, item);
375 }
376 }
377 }
378
379 unsafe fn extend_within_capacity(&mut self, iter: &mut impl Iterator<Item = T>) {
380 let n = self.remaining_capacity() as BufferSize;
381 if n == 0 {
382 return;
383 }
384
385 let mut ptr = self.data_ptr().add(self.len());
386 let mut count = 0;
387
388 unsafe {
389 for item in iter {
390 ptr::write(ptr, item);
391 ptr = ptr.add(1);
392 count += 1;
393 if count == n {
394 break;
395 }
396 }
397 self.header.len += count;
398 }
399 }
400
401 pub fn clone_buffer<A: Allocator>(&self, allocator: &A) -> Self
406 where
407 T: Clone,
408 {
409 self.clone_buffer_with_capacity(allocator, self.len())
410 }
411
412 pub fn clone_buffer_with_capacity<A: Allocator>(&self, allocator: &A, cap: usize) -> Self
416 where
417 T: Clone,
418 {
419 let mut clone =
420 Self::try_with_capacity(allocator, cap.max(self.len())).unwrap();
421
422 unsafe {
423 raw::extend_from_slice_assuming_capacity(clone.data_ptr(), &mut clone.header, self.as_slice());
424 }
425
426 clone
427 }
428
429 #[cold]
431 unsafe fn try_realloc_additional<A: Allocator>(&mut self, allocator: &A, additional: usize) -> Result<(), AllocError> {
432 let new_cap = grow_amortized(self.len(), additional);
433 if new_cap < self.len() {
434 return Err(AllocError);
435 }
436
437 self.try_realloc_with_capacity(allocator, new_cap)
438 }
439
440 pub fn truncate(&mut self, new_len: usize) {
441 let old_len = self.header.len as usize;
442 if old_len <= new_len {
443 return;
444 }
445
446 unsafe {
447 let mut elt = self.data_ptr().add(new_len);
448 let end = self.data_ptr().add(old_len);
449 while elt < end {
450 ptr::drop_in_place(elt);
451 elt = elt.add(1)
452 }
453 }
454
455 self.header.len = new_len as u32;
456 }
457
458 #[cold]
459 unsafe fn try_realloc_with_capacity<A: Allocator>(&mut self, allocator: &A, new_cap: usize) -> Result<(), AllocError> {
460 type R = DefaultRefCount;
461
462 if new_cap == 0 {
463 self.deallocate(allocator);
464 return Ok(());
465 }
466
467 let new_layout = buffer_layout::<Header<R, A>, T>(new_cap).unwrap();
468
469 let old_len = self.len();
470 if old_len > new_cap {
471 self.truncate(new_cap);
472 }
473
474 let new_alloc = if self.header.cap == 0 {
475 allocator.allocate(new_layout)
476 } else {
477 let old_cap = self.capacity();
478 let old_ptr = self.base_ptr(allocator);
479 let old_layout = buffer_layout::<Header<R, A>, T>(old_cap).unwrap();
480 let new_layout = buffer_layout::<Header<R, A>, T>(new_cap).unwrap();
481
482 if new_layout.size() >= old_layout.size() {
483 allocator.grow(old_ptr, old_layout, new_layout)
484 } else {
485 allocator.shrink(old_ptr, old_layout, new_layout)
486 }
487 };
488
489 self.header.len = self.header.len.min(new_cap as u32);
493
494 let new_alloc = new_alloc?;
496
497 let new_data_ptr = crate::raw::data_ptr::<Header<R, A>, T>(new_alloc.cast());
500 self.data = NonNull::new_unchecked(new_data_ptr);
501 self.header.cap = new_cap as u32;
502
503 Ok(())
504 }
505
506 #[cold]
524 pub unsafe fn try_reallocate_in<OldA: Allocator, NewA: Allocator>(
525 &mut self,
526 old_allocator: &OldA,
527 new_allocator: &NewA,
528 new_cap: usize,
529 ) -> Result<(), AllocError> {
530 type R = DefaultRefCount;
531
532 let new_layout = buffer_layout::<Header<R, NewA>, T>(new_cap).unwrap();
533
534 if new_cap == 0 {
535 self.deallocate(old_allocator);
536 return Ok(());
537 }
538
539 let new_alloc = if new_cap > 0 {
540 Some(new_allocator.allocate(new_layout)?)
541 } else {
542 None
543 };
544
545 let old_len = self.len();
546 if old_len > new_cap {
547 self.truncate(new_cap);
548 }
549
550 let old_cap = self.capacity();
551
552 let old_base_ptr = if old_cap > 0 {
553 Some(self.base_ptr(old_allocator))
554 } else {
555 None
556 };
557
558 self.data = if let Some(new_alloc) = new_alloc {
559 let new_data_ptr = crate::raw::data_ptr::<Header<R, NewA>, T>(new_alloc.cast());
560
561 let copy_size = old_len.min(new_cap);
562 if copy_size > 0 {
563 let old_data_ptr = self.data_ptr();
564 ptr::copy_nonoverlapping(old_data_ptr, new_data_ptr, copy_size);
565 }
566
567 NonNull::new_unchecked(new_data_ptr)
568 } else {
569 NonNull::dangling()
570 };
571
572 self.header.cap = new_cap as u32;
573 self.header.len = self.header.len.min(new_cap as u32);
574
575 if let Some(old_base_ptr) = old_base_ptr {
576 let old_layout = buffer_layout::<Header<R, OldA>, T>(old_cap).unwrap();
577 old_allocator.deallocate(old_base_ptr, old_layout);
578 }
579
580 Ok(())
581 }
582
583 unsafe fn deallocate_buffer<A: Allocator>(&mut self, allocator: &A) {
585 let layout = buffer_layout::<Header<DefaultRefCount, A>, T>(self.capacity()).unwrap();
586 let ptr = self.base_ptr(allocator);
587
588 allocator.deallocate(ptr, layout);
589
590 self.header.cap = 0;
591 self.header.len = 0;
592 self.data = NonNull::dangling();
593 }
594
595 #[inline]
601 pub unsafe fn try_reserve<A: Allocator>(&mut self, allocator: &A, additional: usize) -> Result<(), AllocError> {
602 if self.remaining_capacity() < additional {
603 self.try_realloc_additional(allocator, additional)?;
604 }
605
606 Ok(())
607 }
608
609 pub unsafe fn try_reserve_exact<A: Allocator>(&mut self, allocator: &A, additional: usize) -> Result<(), AllocError> {
623 if self.remaining_capacity() >= additional {
624 return Ok(());
625 }
626
627 self.try_realloc_with_capacity(allocator, self.len() + additional)
628 }
629
630 pub unsafe fn shrink_to<A: Allocator>(&mut self, allocator: &A, min_capacity: usize)
639 {
640 let min_capacity = min_capacity.max(self.len());
641 if self.capacity() <= min_capacity {
642 return;
643 }
644
645 self.try_realloc_with_capacity(allocator, min_capacity).unwrap();
646 }
647
648 pub unsafe fn shrink_to_fit<A: Allocator>(&mut self, allocator: &A)
654 {
655 self.shrink_to(allocator, self.len())
656 }
657
658 pub fn drain<R>(&mut self, range: R) -> Drain<'_, T>
677 where
678 R: RangeBounds<usize>,
679 {
680 use core::ops::Bound::*;
691 let len = self.len();
692 let end = match range.end_bound() {
693 Included(n) => *n + 1,
694 Excluded(n) => *n,
695 Unbounded => len
696 };
697 let start = match range.start_bound() {
698 Included(n) => *n,
699 Excluded(n) => *n+1,
700 Unbounded => 0
701 };
702 assert!(end <= len);
703 assert!(start <= end);
704
705 unsafe {
706 self.header.len = start as u32;
708 let range_slice = core::slice::from_raw_parts(self.data.as_ptr().add(start), end - start);
709 Drain {
710 tail_start: end,
711 tail_len: len - end,
712 iter: range_slice.iter(),
713 vec: NonNull::from(self),
714 }
715 }
716 }
717
718 pub unsafe fn splice<'l, A, R, I>(
743 &'l mut self,
744 allocator: &'l A,
745 range: R,
746 replace_with: I
747 ) -> Splice<'l, <I as IntoIterator>::IntoIter, A>
748 where
749 A: Allocator,
750 R: RangeBounds<usize>,
751 I: IntoIterator<Item = T>,
752 {
753 Splice {
754 drain: self.drain(range),
755 replace_with: replace_with.into_iter(),
756 allocator,
757 }
758 }
759
760 pub fn retain<F>(&mut self, mut f: F)
766 where
767 F: FnMut(&T) -> bool,
768 {
769 self.retain_mut(|elem| f(elem));
770 }
771
772 pub fn retain_mut<F>(&mut self, mut f: F)
778 where
779 F: FnMut(&mut T) -> bool,
780 {
781 let original_len = self.len();
782 self.header.len = 0;
785
786 struct BackshiftOnDrop<'a, T> {
798 v: &'a mut RawVector<T>,
799 processed_len: usize,
800 deleted_cnt: usize,
801 original_len: usize,
802 }
803
804 impl<T> Drop for BackshiftOnDrop<'_, T> {
805 fn drop(&mut self) {
806 if self.deleted_cnt > 0 {
807 unsafe {
809 ptr::copy(
810 self.v.as_ptr().add(self.processed_len),
811 self.v.as_mut_ptr().add(self.processed_len - self.deleted_cnt),
812 self.original_len - self.processed_len,
813 );
814 }
815 }
816 self.v.header.len = (self.original_len - self.deleted_cnt) as u32;
818 }
819 }
820
821 let mut g = BackshiftOnDrop { v: self, processed_len: 0, deleted_cnt: 0, original_len };
822
823 fn process_loop<F, T, const DELETED: bool>(
824 original_len: usize,
825 f: &mut F,
826 g: &mut BackshiftOnDrop<'_, T>,
827 ) where
828 F: FnMut(&mut T) -> bool,
829 {
830 while g.processed_len != original_len {
831 let cur = unsafe { &mut *g.v.as_mut_ptr().add(g.processed_len) };
833 if !f(cur) {
834 g.processed_len += 1;
836 g.deleted_cnt += 1;
837 unsafe { ptr::drop_in_place(cur) };
839 if DELETED {
841 continue;
842 } else {
843 break;
844 }
845 }
846 if DELETED {
847 unsafe {
850 let hole_slot = g.v.as_mut_ptr().add(g.processed_len - g.deleted_cnt);
851 ptr::copy_nonoverlapping(cur, hole_slot, 1);
852 }
853 }
854 g.processed_len += 1;
855 }
856 }
857
858 process_loop::<F, T, false>(original_len, &mut f, &mut g);
860
861 process_loop::<F, T, true>(original_len, &mut f, &mut g);
863
864 drop(g);
866 }
867
868 pub fn take(&mut self) -> Self {
871 mem::replace(self, RawVector::new())
872 }
873}
874
875impl<T: PartialEq<T>> PartialEq<RawVector<T>> for RawVector<T> {
876 fn eq(&self, other: &Self) -> bool {
877 self.as_slice() == other.as_slice()
878 }
879}
880
881impl<T: PartialEq<T>> PartialEq<&[T]> for RawVector<T> {
882 fn eq(&self, other: &&[T]) -> bool {
883 self.as_slice() == *other
884 }
885}
886
887impl<T: Eq> Eq for RawVector<T> {
888
889}
890
891impl<T> AsRef<[T]> for RawVector<T> {
892 fn as_ref(&self) -> &[T] {
893 self.as_slice()
894 }
895}
896
897impl<T> AsMut<[T]> for RawVector<T> {
898 fn as_mut(&mut self) -> &mut [T] {
899 self.as_mut_slice()
900 }
901}
902
903impl<T> Default for RawVector<T> {
904 fn default() -> Self {
905 Self::new()
906 }
907}
908
909impl<'a, T> IntoIterator for &'a RawVector<T> {
910 type Item = &'a T;
911 type IntoIter = core::slice::Iter<'a, T>;
912 fn into_iter(self) -> core::slice::Iter<'a, T> {
913 self.as_slice().iter()
914 }
915}
916
917impl<'a, T> IntoIterator for &'a mut RawVector<T> {
918 type Item = &'a mut T;
919 type IntoIter = core::slice::IterMut<'a, T>;
920 fn into_iter(self) -> core::slice::IterMut<'a, T> {
921 self.as_mut_slice().iter_mut()
922 }
923}
924
925impl<T, I> Index<I> for RawVector<T>
926where
927 I: core::slice::SliceIndex<[T]>,
928{
929 type Output = <I as core::slice::SliceIndex<[T]>>::Output;
930 fn index(&self, index: I) -> &Self::Output {
931 self.as_slice().index(index)
932 }
933}
934
935impl<T, I> IndexMut<I> for RawVector<T>
936where
937 I: core::slice::SliceIndex<[T]>,
938{
939 fn index_mut(&mut self, index: I) -> &mut Self::Output {
940 self.as_mut_slice().index_mut(index)
941 }
942}
943
944impl<T> Deref for RawVector<T> {
945 type Target = [T];
946 fn deref(&self) -> &[T] {
947 self.as_slice()
948 }
949}
950
951impl<T> DerefMut for RawVector<T> {
952 fn deref_mut(&mut self) -> &mut [T] {
953 self.as_mut_slice()
954 }
955}
956
957impl<T: Debug> Debug for RawVector<T> {
958 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> Result<(), core::fmt::Error> {
959 self.as_slice().fmt(f)
960 }
961}
962
963impl<T: core::hash::Hash> core::hash::Hash for RawVector<T> {
964 fn hash<H>(&self, state: &mut H) where H: core::hash::Hasher {
965 self.as_slice().hash(state)
966 }
967}
968
969
970pub struct Vector<T, A: Allocator = Global> {
991 pub(crate) raw: RawVector<T>,
992 pub(crate) allocator: A,
993}
994
995impl<T> Vector<T, Global> {
996 pub fn new() -> Vector<T, Global> {
1000 Vector {
1001 raw: RawVector::new(),
1002 allocator: Global,
1003 }
1004 }
1005
1006
1007
1008 pub fn with_capacity(cap: usize) -> Vector<T, Global> {
1012 Self::try_with_capacity(cap).unwrap()
1013 }
1014
1015 pub fn try_with_capacity(cap: usize) -> Result<Vector<T, Global>, AllocError> {
1019 Vector::try_with_capacity_in(cap, Global)
1020 }
1021
1022 pub fn from_slice(data: &[T]) -> Self
1023 where
1024 T: Clone,
1025 {
1026 Vector { raw: RawVector::try_from_slice(&Global, data).unwrap(), allocator: Global }
1027 }
1028
1029 pub fn from_elem(elem: T, n: usize) -> Vector<T, Global>
1031 where
1032 T: Clone,
1033 {
1034 Vector { raw: RawVector::try_from_elem(&Global, elem, n).unwrap(), allocator: Global }
1035 }
1036}
1037
1038impl<T, A: Allocator> Vector<T, A> {
1039 pub fn new_in(allocator: A) -> Self {
1041 Self::try_with_capacity_in(0, allocator).unwrap()
1042 }
1043
1044 pub fn with_capacity_in(cap: usize, allocator: A) -> Self {
1048 Self::try_with_capacity_in(cap, allocator).unwrap()
1049 }
1050
1051 pub fn try_with_capacity_in(cap: usize, allocator: A) -> Result<Vector<T, A>, AllocError> {
1055 let raw = RawVector::try_with_capacity(&allocator, cap)?;
1056
1057 Ok(Vector { raw, allocator })
1058 }
1059
1060 pub fn try_reallocate_in<NewA: Allocator>(&mut self, new_allocator: NewA, new_cap: usize) -> Result<Vector<T, NewA>, AllocError> {
1062 unsafe {
1063 self.raw.try_reallocate_in(&self.allocator, &new_allocator, new_cap)?;
1064 }
1065
1066 Ok(Vector {
1067 raw: mem::take(&mut self.raw),
1068 allocator: new_allocator,
1069 })
1070 }
1071
1072 #[inline(always)]
1073 pub fn is_empty(&self) -> bool {
1075 self.raw.is_empty()
1076 }
1077
1078 #[inline(always)]
1079 pub fn len(&self) -> usize {
1081 self.raw.len()
1082 }
1083
1084 #[inline(always)]
1085 pub fn capacity(&self) -> usize {
1087 self.raw.capacity()
1088 }
1089
1090 #[inline(always)]
1092 pub fn remaining_capacity(&self) -> usize {
1093 self.raw.remaining_capacity()
1094 }
1095
1096 #[inline(always)]
1098 pub fn allocator(&self) -> &A {
1099 &self.allocator
1100 }
1101
1102 #[inline(always)]
1103 pub fn as_slice(&self) -> &[T] {
1104 self.raw.as_slice()
1105 }
1106
1107 #[inline(always)]
1108 pub fn as_mut_slice(&mut self) -> &mut [T] {
1109 self.raw.as_mut_slice()
1110 }
1111
1112 pub fn clear(&mut self) {
1114 unsafe {
1115 raw::clear(self.raw.data_ptr(), &mut self.raw.header)
1116 }
1117 }
1118
1119 unsafe fn into_header_buffer<R>(mut self) -> HeaderBuffer<T, R, A>
1120 where
1121 R: RefCount,
1122 {
1123 debug_assert!(self.raw.header.cap != 0);
1124 unsafe {
1125 let mut header = raw::header_from_data_ptr(self.raw.data);
1126
1127 *header.as_mut() = raw::Header {
1128 vec: VecHeader {
1129 len: self.raw.header.len,
1130 cap: self.raw.header.cap,
1131 },
1132 ref_count: R::new(1),
1133 allocator: ptr::read(&mut self.allocator),
1134 };
1135
1136 mem::forget(self);
1137
1138 HeaderBuffer::from_raw(header)
1139 }
1140 }
1141
1142 #[inline]
1147 pub fn into_shared(self) -> SharedVector<T, A>
1148 where
1149 A: Allocator + Clone,
1150 {
1151 if self.raw.header.cap == 0 {
1152 return SharedVector::try_with_capacity_in(0, self.allocator.clone()).unwrap();
1153 }
1154 unsafe {
1155 let inner = self.into_header_buffer::<DefaultRefCount>();
1156 SharedVector { inner }
1157 }
1158 }
1159
1160 #[inline]
1165 pub fn into_shared_atomic(self) -> AtomicSharedVector<T, A>
1166 where
1167 A: Allocator + Clone,
1168 {
1169 if self.raw.header.cap == 0 {
1170 return AtomicSharedVector::try_with_capacity_in(0, self.allocator.clone()).unwrap();
1171 }
1172 unsafe {
1173 let inner = self.into_header_buffer::<AtomicRefCount>();
1174 AtomicSharedVector { inner }
1175 }
1176 }
1177
1178 #[inline(always)]
1184 pub fn push(&mut self, val: T) {
1185 unsafe {
1186 self.raw.push(&self.allocator, val);
1187 }
1188 }
1189
1190 #[inline(always)]
1196 pub fn push_within_capacity(&mut self, val: T) -> Result<(), T> {
1197 self.raw.push_within_capacity(val)
1198 }
1199
1200 #[inline(always)]
1202 pub fn pop(&mut self) -> Option<T> {
1203 self.raw.pop()
1204 }
1205
1206 #[inline(always)]
1214 pub fn remove(&mut self, index: usize) -> T {
1215 self.raw.remove(index)
1216 }
1217
1218 #[inline(always)]
1226 pub fn swap_remove(&mut self, idx: usize) -> T {
1227 self.raw.swap_remove(idx)
1228 }
1229
1230 #[inline(always)]
1237 pub fn insert(&mut self, index: usize, element: T) {
1238 unsafe { self.raw.insert(&self.allocator, index, element) }
1239 }
1240
1241 #[inline(always)]
1243 pub fn extend_from_slice(&mut self, data: &[T])
1244 where
1245 T: Clone,
1246 {
1247 unsafe {
1248 self.raw.extend_from_slice(&self.allocator, data)
1249 }
1250 }
1251
1252 #[inline(always)]
1254 pub fn append(&mut self, other: &mut Self)
1255 where
1256 T: Clone,
1257 {
1258 unsafe {
1259 self.raw.append(&self.allocator, &mut other.raw)
1260 }
1261 }
1262
1263 #[inline(always)]
1265 pub fn extend(&mut self, data: impl IntoIterator<Item = T>) {
1266 unsafe {
1267 self.raw.extend(&self.allocator, data)
1268 }
1269 }
1270
1271 #[inline(always)]
1273 pub fn clone_buffer(&self) -> Self
1274 where
1275 T: Clone,
1276 A: Clone,
1277 {
1278 Vector {
1279 raw: self.raw.clone_buffer(&self.allocator),
1280 allocator: self.allocator.clone(),
1281 }
1282 }
1283
1284 #[inline(always)]
1288 pub fn clone_buffer_with_capacity(&self, cap: usize) -> Self
1289 where
1290 T: Clone,
1291 A: Clone,
1292 {
1293 Vector {
1294 raw: self.raw.clone_buffer_with_capacity(&self.allocator, cap),
1295 allocator: self.allocator.clone(),
1296 }
1297 }
1298
1299 #[inline(always)]
1300 pub fn reserve(&mut self, additional: usize) {
1301 unsafe {
1302 self.raw.try_reserve(&self.allocator, additional).unwrap()
1303 }
1304 }
1305
1306 #[inline(always)]
1307 pub fn try_reserve(&mut self, additional: usize) -> Result<(), AllocError> {
1308 unsafe {
1309 self.raw.try_reserve(&self.allocator, additional)
1310 }
1311 }
1312
1313 pub fn reserve_exact(&mut self, additional: usize)
1324 where
1325 T: Clone,
1326 {
1327 self.try_reserve_exact(additional).unwrap();
1328 }
1329
1330 pub fn try_reserve_exact(&mut self, additional: usize) -> Result<(), AllocError> {
1340 unsafe {
1341 self.raw.try_reserve_exact(&self.allocator, additional)
1342 }
1343 }
1344
1345 #[inline(always)]
1350 pub fn shrink_to(&mut self, min_capacity: usize)
1351 where
1352 T: Clone,
1353 {
1354 unsafe {
1355 self.raw.shrink_to(&self.allocator, min_capacity)
1356 }
1357 }
1358
1359 #[inline(always)]
1361 pub fn shrink_to_fit(&mut self)
1362 where
1363 T: Clone,
1364 {
1365 unsafe {
1366 self.raw.shrink_to_fit(&self.allocator)
1367 }
1368 }
1369
1370 pub fn drain<R>(&mut self, range: R) -> Drain<'_, T>
1389 where
1390 R: RangeBounds<usize>,
1391 {
1392 self.raw.drain(range)
1393 }
1394
1395 pub fn splice<R, I>(
1420 &mut self,
1421 range: R,
1422 replace_with: I
1423 ) -> Splice<'_, <I as IntoIterator>::IntoIter, A>
1424 where
1425 R: RangeBounds<usize>,
1426 I: IntoIterator<Item = T>,
1427 {
1428 unsafe { self.raw.splice(&self.allocator, range, replace_with) }
1429 }
1430
1431 pub fn retain<F>(&mut self, f: F)
1437 where
1438 F: FnMut(&T) -> bool,
1439 {
1440 self.raw.retain(f)
1441 }
1442
1443 pub fn retain_mut<F>(&mut self, f: F)
1449 where
1450 F: FnMut(&mut T) -> bool,
1451 {
1452 self.raw.retain_mut(f)
1453 }
1454
1455 #[inline(always)]
1456 pub fn take(&mut self) -> Self
1457 where
1458 A: Clone,
1459 {
1460 let other = Vector {
1461 raw: self.raw.take(),
1462 allocator: self.allocator.clone(),
1463 };
1464
1465 other
1466 }
1467}
1468
1469impl<T, A: Allocator> Drop for Vector<T, A> {
1470 fn drop(&mut self) {
1471 unsafe {
1472 self.raw.deallocate(&self.allocator)
1473 }
1474 }
1475}
1476
1477impl<T: Clone, A: Allocator + Clone> Clone for Vector<T, A> {
1478 fn clone(&self) -> Self {
1479 self.clone_buffer()
1480 }
1481}
1482
1483impl<T: PartialEq<T>, A: Allocator> PartialEq<Vector<T, A>> for Vector<T, A> {
1484 fn eq(&self, other: &Self) -> bool {
1485 self.as_slice() == other.as_slice()
1486 }
1487}
1488
1489impl<T: PartialEq<T>, A: Allocator> PartialEq<&[T]> for Vector<T, A> {
1490 fn eq(&self, other: &&[T]) -> bool {
1491 self.as_slice() == *other
1492 }
1493}
1494
1495impl<T, A: Allocator> AsRef<[T]> for Vector<T, A> {
1496 fn as_ref(&self) -> &[T] {
1497 self.as_slice()
1498 }
1499}
1500
1501impl<T, A: Allocator> AsMut<[T]> for Vector<T, A> {
1502 fn as_mut(&mut self) -> &mut [T] {
1503 self.as_mut_slice()
1504 }
1505}
1506
1507impl<T> Default for Vector<T, Global> {
1508 fn default() -> Self {
1509 Self::new()
1510 }
1511}
1512
1513impl<'a, T, A: Allocator> IntoIterator for &'a Vector<T, A> {
1514 type Item = &'a T;
1515 type IntoIter = core::slice::Iter<'a, T>;
1516 fn into_iter(self) -> core::slice::Iter<'a, T> {
1517 self.as_slice().iter()
1518 }
1519}
1520
1521impl<'a, T, A: Allocator> IntoIterator for &'a mut Vector<T, A> {
1522 type Item = &'a mut T;
1523 type IntoIter = core::slice::IterMut<'a, T>;
1524 fn into_iter(self) -> core::slice::IterMut<'a, T> {
1525 self.as_mut_slice().iter_mut()
1526 }
1527}
1528
1529impl<T, A: Allocator, I> Index<I> for Vector<T, A>
1530where
1531 I: core::slice::SliceIndex<[T]>,
1532{
1533 type Output = <I as core::slice::SliceIndex<[T]>>::Output;
1534 fn index(&self, index: I) -> &Self::Output {
1535 self.as_slice().index(index)
1536 }
1537}
1538
1539impl<T, A: Allocator, I> IndexMut<I> for Vector<T, A>
1540where
1541 I: core::slice::SliceIndex<[T]>,
1542{
1543 fn index_mut(&mut self, index: I) -> &mut Self::Output {
1544 self.as_mut_slice().index_mut(index)
1545 }
1546}
1547
1548impl<T, A: Allocator> Deref for Vector<T, A> {
1549 type Target = [T];
1550 fn deref(&self) -> &[T] {
1551 self.as_slice()
1552 }
1553}
1554
1555impl<T, A: Allocator> DerefMut for Vector<T, A> {
1556 fn deref_mut(&mut self) -> &mut [T] {
1557 self.as_mut_slice()
1558 }
1559}
1560
1561impl<T: Debug, A: Allocator> Debug for Vector<T, A> {
1562 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> Result<(), core::fmt::Error> {
1563 self.as_slice().fmt(f)
1564 }
1565}
1566
1567impl<T: Clone, A: Allocator + Clone> From<SharedVector<T, A>> for Vector<T, A> {
1568 fn from(shared: SharedVector<T, A>) -> Self {
1569 shared.into_unique()
1570 }
1571}
1572
1573impl<T: Clone, A: Allocator + Clone> From<AtomicSharedVector<T, A>> for Vector<T, A> {
1574 fn from(shared: AtomicSharedVector<T, A>) -> Self {
1575 shared.into_unique()
1576 }
1577}
1578
1579impl<T: core::hash::Hash, A: Allocator> core::hash::Hash for Vector<T, A> {
1580 fn hash<H>(&self, state: &mut H) where H: core::hash::Hasher {
1581 self.as_slice().hash(state)
1582 }
1583}
1584
1585#[test]
1586fn bump_alloc() {
1587 use blink_alloc::BlinkAlloc;
1588
1589 let allocator = BlinkAlloc::new();
1590
1591 {
1592 let mut v1: Vector<u32, &BlinkAlloc> = Vector::try_with_capacity_in(4, &allocator).unwrap();
1593 v1.push(0);
1594 v1.push(1);
1595 v1.push(2);
1596 assert_eq!(v1.capacity(), 4);
1597 assert_eq!(v1.as_slice(), &[0, 1, 2]);
1598
1599 let mut v2 = crate::vector!([10, 11] in &allocator);
1601 assert_eq!(v2.capacity(), 2);
1602
1603 assert_eq!(v2.as_slice(), &[10, 11]);
1604
1605 v1.push(3);
1606 v1.push(4);
1607
1608 assert_eq!(v1.as_slice(), &[0, 1, 2, 3, 4]);
1609
1610 assert!(v1.capacity() > 4);
1611
1612 v2.push(12);
1613 v2.push(13);
1614 v2.push(14);
1615
1616 let v2 = v2.into_shared();
1617
1618 assert_eq!(v1.as_slice(), &[0, 1, 2, 3, 4]);
1619 assert_eq!(v2.as_slice(), &[10, 11, 12, 13, 14]);
1620 }
1621}
1622
1623#[test]
1624fn basic_unique() {
1625 fn num(val: u32) -> Box<u32> {
1626 Box::new(val)
1627 }
1628
1629 let mut a = Vector::with_capacity(256);
1630
1631 a.push(num(0));
1632 a.push(num(1));
1633 a.push(num(2));
1634
1635 let a = a.into_shared();
1636
1637 assert_eq!(a.len(), 3);
1638
1639 assert_eq!(a.as_slice(), &[num(0), num(1), num(2)]);
1640
1641 assert!(a.is_unique());
1642
1643 let b = Vector::from_slice(&[num(0), num(1), num(2), num(3), num(4)]);
1644
1645 assert_eq!(b.as_slice(), &[num(0), num(1), num(2), num(3), num(4)]);
1646
1647 let c = a.clone_buffer();
1648 assert!(!c.ptr_eq(&a));
1649
1650 let a2 = a.new_ref();
1651 assert!(a2.ptr_eq(&a));
1652 assert!(!a.is_unique());
1653 assert!(!a2.is_unique());
1654
1655 mem::drop(a2);
1656
1657 assert!(a.is_unique());
1658
1659 let _ = c.clone_buffer();
1660 let _ = b.clone_buffer();
1661
1662 let mut d = Vector::with_capacity(64);
1663 d.extend_from_slice(&[num(0), num(1), num(2)]);
1664 d.extend_from_slice(&[]);
1665 d.extend_from_slice(&[num(3), num(4)]);
1666
1667 assert_eq!(d.as_slice(), &[num(0), num(1), num(2), num(3), num(4)]);
1668}
1669
1670#[test]
1671fn shrink() {
1672 let mut v: Vector<u32> = Vector::with_capacity(32);
1673 v.shrink_to(8);
1674}
1675
1676#[test]
1677fn zst() {
1678 let mut v = Vector::new();
1679 v.push(());
1680 v.push(());
1681 v.push(());
1682 v.push(());
1683
1684 assert_eq!(v.len(), 4);
1685}
1686
1687#[test]
1688fn dyn_allocator() {
1689 let allocator: &dyn Allocator = &Global;
1690 let mut v = crate::vector!([1u32, 2, 3] in allocator);
1691
1692 v.push(4);
1693
1694 assert_eq!(&v[..], &[1, 2, 3, 4]);
1695}
1696
1697#[test]
1698fn borrowd_dyn_alloc() {
1699 struct DataStructure<'a> {
1700 data: Vector<u32, &'a dyn Allocator>,
1701 }
1702
1703 impl DataStructure<'static> {
1704 fn new() -> DataStructure<'static> {
1705 DataStructure {
1706 data: Vector::new_in(&Global as &'static dyn Allocator)
1707 }
1708 }
1709 }
1710
1711 impl<'a> DataStructure<'a> {
1712 fn new_in(allocator: &'a dyn Allocator) -> DataStructure<'a> {
1713 DataStructure { data: Vector::new_in(allocator) }
1714 }
1715
1716 fn push(&mut self, val: u32) {
1717 self.data.push(val);
1718 }
1719 }
1720
1721 let mut ds1 = DataStructure::new();
1722 ds1.push(1);
1723
1724 let alloc = Global;
1725 let mut ds2 = DataStructure::new_in(&alloc);
1726 ds2.push(2);
1727
1728}
1729
1730#[test]
1731fn splice1() {
1732 let mut vec = Vector::new();
1733 vec.splice(0..0, vec![Box::new(1); 5].into_iter());
1734 vec.splice(0..0, vec![Box::new(2); 5].into_iter());
1735}
1736
1737#[test]
1738fn drain1() {
1739 let mut vectors: [Vector<Box<u32>>; 4] = [
1740 Vector::new(),
1741 Vector::new(),
1742 Vector::new(),
1743 Vector::new(),
1744 ];
1745 vectors[0].shrink_to(3906369431118283232);
1746 vectors[2].extend_from_slice(&[Box::new(1), Box::new(2), Box::new(3)]);
1747 let vec = &mut vectors[2];
1748 let len = vec.len();
1749 let start = if len > 0 { 16059518370053021184 % len } else { 0 };
1750 let end = 16059518370053021185.min(len);
1751 vectors[2].drain(start..end);
1752}
1753
1754#[test]
1755fn reallocate_in() {
1756 pub use crate::alloc::Global;
1757 use std::rc::Rc;
1758
1759 let alloc1 = Global;
1760 let alloc2 = Global;
1761
1762 let mut vec = Vector::new_in(alloc1);
1763 let val = Rc::new(());
1764
1765 for _ in 0..8 {
1766 vec.push(val.clone());
1767 }
1768
1769 assert_eq!(Rc::strong_count(&val), 9);
1770
1771 let mut vec = vec.try_reallocate_in(alloc2, 4).unwrap();
1772
1773 assert_eq!(vec.len(), 4);
1774 assert_eq!(vec.capacity(), 4);
1775 assert_eq!(Rc::strong_count(&val), 5);
1776
1777 for _ in 0..12 {
1778 vec.push(val.clone());
1779 }
1780
1781 assert_eq!(vec.len(), 16);
1782 assert!(vec.capacity() >= 16);
1783 assert_eq!(Rc::strong_count(&val), 17);
1784
1785 let vec = vec.try_reallocate_in(alloc1, 0).unwrap();
1786
1787 assert_eq!(vec.len(), 0);
1788 assert_eq!(vec.capacity(), 0);
1789 assert_eq!(Rc::strong_count(&val), 1);
1790}
1791
1792#[test]
1793fn extend_within_capacity() {
1794 use std::sync::atomic::{AtomicUsize, Ordering};
1795 static COUNTER: AtomicUsize = AtomicUsize::new(0);
1796
1797 struct Foo;
1798 impl Drop for Foo {
1799 fn drop(&mut self) {
1800 COUNTER.fetch_add(1, Ordering::SeqCst);
1801 }
1802 }
1803
1804 let values = [
1805 Foo,
1806 Foo,
1807 Foo,
1808 Foo,
1809 Foo,
1810 Foo,
1811 Foo,
1812 Foo,
1813 Foo,
1814 Foo,
1815 Foo,
1816 Foo,
1817 Foo,
1818 Foo,
1819 Foo,
1820 Foo,
1821 ];
1822
1823 let n = values.len();
1824 let cap;
1825
1826 let mut vector = RawVector::new();
1827 unsafe {
1828 vector.try_reserve(&Global, 4).unwrap();
1829 cap = vector.capacity();
1830 assert!(cap < n);
1836
1837 let mut iter = values.into_iter();
1838
1839 vector.extend_within_capacity(&mut iter);
1840
1841 assert_eq!(COUNTER.load(Ordering::SeqCst), 0);
1842 }
1843
1844 assert_eq!(COUNTER.load(Ordering::SeqCst), n - cap);
1845
1846 vector.clear();
1847
1848 assert_eq!(COUNTER.load(Ordering::SeqCst), n);
1849
1850 unsafe {
1851 vector.deallocate(&Global);
1852 }
1853}