1mod slice;
28mod sort;
29
30pub use slice::{
31 Chunks, ChunksExact, RChunks, SegmentedSlice, SegmentedSliceMut, SliceIter, SliceIterMut,
32 Windows,
33};
34
35use std::alloc::{self, Layout};
36use std::cmp::Ordering;
37use std::marker::PhantomData;
38use std::mem::MaybeUninit;
39use std::ops::{Index, IndexMut};
40use std::ptr::NonNull;
41
42pub struct SegmentedVec<T, const PREALLOC: usize = 0> {
47 prealloc_segment: MaybeUninit<[T; PREALLOC]>,
48 dynamic_segments: Vec<NonNull<T>>,
49 len: usize,
50 write_ptr: *mut T,
52 segment_remaining: usize,
54 _marker: PhantomData<T>,
55}
56
57unsafe impl<T: Send, const PREALLOC: usize> Send for SegmentedVec<T, PREALLOC> {}
59unsafe impl<T: Sync, const PREALLOC: usize> Sync for SegmentedVec<T, PREALLOC> {}
61
62impl<T, const PREALLOC: usize> SegmentedVec<T, PREALLOC> {
63 const PREALLOC_EXP: u32 = if PREALLOC == 0 {
64 0
65 } else {
66 assert!(
67 PREALLOC.is_power_of_two(),
68 "PREALLOC must be 0 or a power of 2"
69 );
70 PREALLOC.trailing_zeros()
71 };
72
73 const MIN_NON_ZERO_CAP: usize = {
79 let size = std::mem::size_of::<T>();
80 if size == 1 {
81 8
82 } else if size <= 1024 {
83 4
84 } else {
85 1
86 }
87 };
88
89 const MIN_CAP_EXP: u32 = Self::MIN_NON_ZERO_CAP.trailing_zeros();
90
91 const BIAS: usize = if PREALLOC > 0 {
92 PREALLOC
93 } else {
94 Self::MIN_NON_ZERO_CAP
95 };
96
97 const SHELF_OFFSET: u32 = if PREALLOC == 0 {
99 Self::MIN_CAP_EXP
100 } else {
101 Self::PREALLOC_EXP + 1
102 };
103
104 #[inline]
114 pub const fn new() -> Self {
115 Self {
116 prealloc_segment: MaybeUninit::uninit(),
117 dynamic_segments: Vec::new(),
118 len: 0,
119 write_ptr: std::ptr::null_mut(),
120 segment_remaining: 0,
121 _marker: PhantomData,
122 }
123 }
124
125 #[inline]
127 fn init_write_ptr(&mut self) {
128 if PREALLOC > 0 && self.len < PREALLOC {
129 self.write_ptr = unsafe {
130 (*self.prealloc_segment.as_mut_ptr())
131 .as_mut_ptr()
132 .add(self.len)
133 };
134 self.segment_remaining = PREALLOC - self.len;
135 } else if !self.dynamic_segments.is_empty() {
136 let (shelf, box_idx, remaining) = Self::location_with_capacity(self.len);
137 self.write_ptr = unsafe { self.dynamic_segments[shelf].as_ptr().add(box_idx) };
138 self.segment_remaining = remaining;
139 } else if PREALLOC > 0 {
140 self.write_ptr = unsafe { (*self.prealloc_segment.as_mut_ptr()).as_mut_ptr() };
141 self.segment_remaining = PREALLOC;
142 } else {
143 self.write_ptr = std::ptr::null_mut();
144 self.segment_remaining = 0;
145 }
146 }
147
148 #[inline]
150 pub const fn len(&self) -> usize {
151 self.len
152 }
153
154 #[inline]
156 pub const fn is_empty(&self) -> bool {
157 self.len == 0
158 }
159
160 #[inline]
162 pub fn capacity(&self) -> usize {
163 Self::compute_capacity(self.dynamic_segments.len() as u32)
164 }
165
166 #[inline]
182 pub fn push(&mut self, value: T) {
183 if self.segment_remaining > 0 {
185 unsafe {
186 std::ptr::write(self.write_ptr, value);
187 self.write_ptr = self.write_ptr.add(1);
188 }
189 self.segment_remaining -= 1;
190 self.len += 1;
191 return;
192 }
193
194 self.push_slow(value);
196 }
197
198 #[cold]
199 #[inline(never)]
200 fn push_slow(&mut self, value: T) {
201 let new_len = self.len + 1;
202
203 if PREALLOC > 0 && self.len == 0 {
205 unsafe {
206 let ptr = (*self.prealloc_segment.as_mut_ptr()).as_mut_ptr();
207 std::ptr::write(ptr, value);
208 self.write_ptr = ptr.add(1);
209 }
210 self.segment_remaining = PREALLOC - 1;
211 self.len = 1;
212 return;
213 }
214
215 let biased = self.len + Self::BIAS;
218 let shelf = (biased.trailing_zeros() - Self::SHELF_OFFSET) as usize;
219 let shelf_size = Self::shelf_size(shelf as u32);
220
221 self.grow_capacity(new_len);
222
223 unsafe {
224 let ptr = self.dynamic_segments.get_unchecked(shelf).as_ptr();
225 std::ptr::write(ptr, value);
226 self.write_ptr = ptr.add(1);
227 }
228 self.segment_remaining = shelf_size - 1;
229 self.len = new_len;
230 }
231
232 #[inline]
246 pub fn pop(&mut self) -> Option<T> {
247 if self.len == 0 {
248 return None;
249 }
250 self.len -= 1;
251 if self.segment_remaining < Self::current_segment_size(self.len) {
253 unsafe {
254 self.write_ptr = self.write_ptr.sub(1);
255 }
256 self.segment_remaining += 1;
257 Some(unsafe { std::ptr::read(self.write_ptr) })
258 } else {
259 self.init_write_ptr();
261 Some(unsafe { self.unchecked_read(self.len) })
262 }
263 }
264
265 #[inline]
267 fn current_segment_size(index: usize) -> usize {
268 1 << (index + Self::BIAS).ilog2()
270 }
271
272 #[inline]
276 pub fn get(&self, index: usize) -> Option<&T> {
277 if index < self.len {
278 Some(unsafe { self.unchecked_at(index) })
279 } else {
280 None
281 }
282 }
283
284 #[inline]
288 pub fn get_mut(&mut self, index: usize) -> Option<&mut T> {
289 if index < self.len {
290 Some(unsafe { self.unchecked_at_mut(index) })
291 } else {
292 None
293 }
294 }
295
296 #[inline]
298 pub fn first(&self) -> Option<&T> {
299 self.get(0)
300 }
301
302 #[inline]
304 pub fn first_mut(&mut self) -> Option<&mut T> {
305 self.get_mut(0)
306 }
307
308 #[inline]
310 pub fn last(&self) -> Option<&T> {
311 if self.len == 0 {
312 None
313 } else {
314 self.get(self.len - 1)
315 }
316 }
317
318 #[inline]
320 pub fn last_mut(&mut self) -> Option<&mut T> {
321 if self.len == 0 {
322 None
323 } else {
324 self.get_mut(self.len - 1)
325 }
326 }
327
328 #[inline]
330 pub fn contains(&self, x: &T) -> bool
331 where
332 T: PartialEq,
333 {
334 self.as_slice().contains(x)
335 }
336
337 pub fn starts_with(&self, needle: &[T]) -> bool
339 where
340 T: PartialEq,
341 {
342 self.as_slice().starts_with(needle)
343 }
344
345 pub fn ends_with(&self, needle: &[T]) -> bool
347 where
348 T: PartialEq,
349 {
350 self.as_slice().ends_with(needle)
351 }
352
353 pub fn binary_search(&self, x: &T) -> Result<usize, usize>
358 where
359 T: Ord,
360 {
361 self.as_slice().binary_search(x)
362 }
363
364 pub fn binary_search_by<F>(&self, f: F) -> Result<usize, usize>
366 where
367 F: FnMut(&T) -> Ordering,
368 {
369 self.as_slice().binary_search_by(f)
370 }
371
372 pub fn binary_search_by_key<B, F>(&self, b: &B, f: F) -> Result<usize, usize>
374 where
375 F: FnMut(&T) -> B,
376 B: Ord,
377 {
378 self.as_slice().binary_search_by_key(b, f)
379 }
380
381 #[inline]
387 pub fn swap(&mut self, a: usize, b: usize) {
388 self.as_mut_slice().swap(a, b)
389 }
390
391 pub fn reverse(&mut self) {
393 self.as_mut_slice().reverse()
394 }
395
396 pub fn fill(&mut self, value: T)
398 where
399 T: Clone,
400 {
401 self.as_mut_slice().fill(value)
402 }
403
404 pub fn fill_with<F>(&mut self, f: F)
406 where
407 F: FnMut() -> T,
408 {
409 self.as_mut_slice().fill_with(f)
410 }
411
412 pub fn clear(&mut self) {
416 if self.len == 0 {
417 return;
418 }
419
420 if std::mem::needs_drop::<T>() {
421 if PREALLOC > 0 {
423 let count = self.len.min(PREALLOC);
424 let ptr = unsafe { (*self.prealloc_segment.as_mut_ptr()).as_mut_ptr() };
425 unsafe { std::ptr::drop_in_place(std::slice::from_raw_parts_mut(ptr, count)) };
426 }
427
428 if self.len > PREALLOC {
430 let mut remaining = self.len - PREALLOC;
431 for shelf in 0..self.dynamic_segments.len() {
432 let size = Self::shelf_size(shelf as u32);
433 let count = remaining.min(size);
434 let ptr = unsafe { self.dynamic_segments.get_unchecked(shelf).as_ptr() };
435 unsafe { std::ptr::drop_in_place(std::slice::from_raw_parts_mut(ptr, count)) };
436 remaining -= count;
437 if remaining == 0 {
438 break;
439 }
440 }
441 }
442 }
443
444 self.len = 0;
445 if PREALLOC > 0 {
447 self.write_ptr = unsafe { (*self.prealloc_segment.as_mut_ptr()).as_mut_ptr() };
448 self.segment_remaining = PREALLOC;
449 } else {
450 self.write_ptr = std::ptr::null_mut();
451 self.segment_remaining = 0;
452 }
453 }
454
455 pub fn truncate(&mut self, new_len: usize) {
459 if new_len >= self.len {
460 return;
461 }
462
463 if std::mem::needs_drop::<T>() {
464 if PREALLOC > 0 && new_len < PREALLOC {
466 let start = new_len;
467 let end = self.len.min(PREALLOC);
468 if start < end {
469 let ptr = unsafe { (*self.prealloc_segment.as_mut_ptr()).as_mut_ptr().add(start) };
470 unsafe { std::ptr::drop_in_place(std::slice::from_raw_parts_mut(ptr, end - start)) };
471 }
472 }
473
474 if self.len > PREALLOC {
476 let dynamic_new_len = new_len.saturating_sub(PREALLOC);
477 let dynamic_old_len = self.len - PREALLOC;
478
479 if dynamic_new_len < dynamic_old_len {
480 let mut pos = 0usize;
481 for shelf in 0..self.dynamic_segments.len() {
482 let size = Self::shelf_size(shelf as u32);
483 let seg_end = pos + size;
484
485 let drop_start = dynamic_new_len.max(pos);
487 let drop_end = dynamic_old_len.min(seg_end);
488
489 if drop_start < drop_end {
490 let offset = drop_start - pos;
491 let count = drop_end - drop_start;
492 let ptr = unsafe { self.dynamic_segments.get_unchecked(shelf).as_ptr().add(offset) };
493 unsafe { std::ptr::drop_in_place(std::slice::from_raw_parts_mut(ptr, count)) };
494 }
495
496 pos = seg_end;
497 if pos >= dynamic_old_len {
498 break;
499 }
500 }
501 }
502 }
503 }
504
505 self.len = new_len;
506 if new_len > 0 {
508 self.init_write_ptr();
509 } else if PREALLOC > 0 {
510 self.write_ptr = unsafe { (*self.prealloc_segment.as_mut_ptr()).as_mut_ptr() };
511 self.segment_remaining = PREALLOC;
512 } else {
513 self.write_ptr = std::ptr::null_mut();
514 self.segment_remaining = 0;
515 }
516 }
517
518 pub fn reserve(&mut self, additional: usize) {
520 let old_capacity = self.capacity();
521 self.grow_capacity(self.len + additional);
522 if old_capacity == 0 && self.capacity() > 0 && self.segment_remaining == 0 {
524 self.init_write_ptr();
525 }
526 }
527
528 pub fn shrink_to_fit(&mut self) {
532 self.shrink_capacity(self.len);
533 }
534
535 #[inline]
537 pub fn iter(&self) -> Iter<'_, T, PREALLOC> {
538 Iter {
540 vec: self,
541 ptr: std::ptr::null(),
542 segment_end: std::ptr::null(),
543 index: 0,
544 shelf_index: 0,
545 }
546 }
547
548 #[inline]
550 pub fn iter_mut(&mut self) -> IterMut<'_, T, PREALLOC> {
551 IterMut {
553 vec: self,
554 ptr: std::ptr::null_mut(),
555 segment_end: std::ptr::null_mut(),
556 index: 0,
557 shelf_index: 0,
558 }
559 }
560
561 #[inline]
576 pub fn as_slice(&self) -> SegmentedSlice<'_, T, PREALLOC> {
577 SegmentedSlice::new(self)
578 }
579
580 #[inline]
595 pub fn as_mut_slice(&mut self) -> SegmentedSliceMut<'_, T, PREALLOC> {
596 SegmentedSliceMut::new(self)
597 }
598
599 #[inline]
618 pub fn slice(&self, range: std::ops::Range<usize>) -> SegmentedSlice<'_, T, PREALLOC> {
619 assert!(range.start <= range.end && range.end <= self.len);
620 SegmentedSlice::from_range(self, range.start, range.end)
621 }
622
623 #[inline]
642 pub fn slice_mut(
643 &mut self,
644 range: std::ops::Range<usize>,
645 ) -> SegmentedSliceMut<'_, T, PREALLOC> {
646 assert!(range.start <= range.end && range.end <= self.len);
647 SegmentedSliceMut::from_range(self, range.start, range.end)
648 }
649
650 pub fn extend_from_slice(&mut self, other: &[T])
652 where
653 T: Clone,
654 {
655 if other.is_empty() {
656 return;
657 }
658 self.reserve(other.len());
659
660 let mut src = other;
661
662 if PREALLOC > 0 && self.len < PREALLOC {
664 let prealloc_remaining = PREALLOC - self.len;
665 let to_copy = src.len().min(prealloc_remaining);
666 let dest = unsafe {
667 (*self.prealloc_segment.as_mut_ptr())
668 .as_mut_ptr()
669 .add(self.len)
670 };
671 for i in 0..to_copy {
672 unsafe { std::ptr::write(dest.add(i), src[i].clone()) };
673 }
674 self.len += to_copy;
675 self.write_ptr = unsafe { dest.add(to_copy) };
676 self.segment_remaining = prealloc_remaining - to_copy;
677 src = &src[to_copy..];
678 }
679
680 while !src.is_empty() {
682 let (shelf, box_idx, remaining) = Self::location_with_capacity(self.len);
683 let to_copy = src.len().min(remaining);
684 let dest = unsafe {
685 self.dynamic_segments
686 .get_unchecked(shelf)
687 .as_ptr()
688 .add(box_idx)
689 };
690 for i in 0..to_copy {
691 unsafe { std::ptr::write(dest.add(i), src[i].clone()) };
692 }
693 self.len += to_copy;
694 self.segment_remaining -= to_copy;
695 src = &src[to_copy..];
696 }
697 }
698
699 pub fn extend_from_copy_slice(&mut self, other: &[T])
704 where
705 T: Copy,
706 {
707 if other.is_empty() {
708 return;
709 }
710 self.reserve(other.len());
711
712 let mut src = other;
713
714 if PREALLOC > 0 && self.len < PREALLOC {
716 let prealloc_remaining = PREALLOC - self.len;
717 let to_copy = src.len().min(prealloc_remaining);
718 unsafe {
719 let dest = (*self.prealloc_segment.as_mut_ptr())
720 .as_mut_ptr()
721 .add(self.len);
722 std::ptr::copy_nonoverlapping(src.as_ptr(), dest, to_copy);
723 self.write_ptr = dest.add(to_copy);
724 };
725 self.len += to_copy;
726 self.segment_remaining = prealloc_remaining - to_copy;
727 src = &src[to_copy..];
728 }
729
730 while !src.is_empty() {
732 let (shelf, box_idx, remaining) = Self::location_with_capacity(self.len);
733 let to_copy = src.len().min(remaining);
734 self.segment_remaining = remaining;
735 unsafe {
736 let dest = self
737 .dynamic_segments
738 .get_unchecked(shelf)
739 .as_ptr()
740 .add(box_idx);
741 std::ptr::copy_nonoverlapping(src.as_ptr(), dest, to_copy);
742 self.write_ptr = self.write_ptr.add(to_copy);
743 };
744
745 self.len += to_copy;
746 self.segment_remaining -= to_copy;
747 src = &src[to_copy..];
748 }
749 }
750
751 #[inline]
768 pub fn sort(&mut self)
769 where
770 T: Ord,
771 {
772 self.as_mut_slice().sort();
773 }
774
775 pub fn sort_by<F>(&mut self, compare: F)
792 where
793 F: FnMut(&T, &T) -> Ordering,
794 {
795 self.as_mut_slice().sort_by(compare);
796 }
797
798 #[inline]
814 pub fn sort_by_key<K, F>(&mut self, f: F)
815 where
816 F: FnMut(&T) -> K,
817 K: Ord,
818 {
819 self.as_mut_slice().sort_by_key(f);
820 }
821
822 #[inline]
839 pub fn sort_unstable(&mut self)
840 where
841 T: Ord,
842 {
843 self.as_mut_slice().sort_unstable();
844 }
845
846 pub fn sort_unstable_by<F>(&mut self, compare: F)
863 where
864 F: FnMut(&T, &T) -> Ordering,
865 {
866 self.as_mut_slice().sort_unstable_by(compare);
867 }
868
869 #[inline]
889 pub fn sort_unstable_by_key<K, F>(&mut self, f: F)
890 where
891 F: FnMut(&T) -> K,
892 K: Ord,
893 {
894 self.as_mut_slice().sort_unstable_by_key(f);
895 }
896
897 pub fn is_sorted(&self) -> bool
899 where
900 T: PartialOrd,
901 {
902 self.as_slice().is_sorted()
903 }
904
905 pub fn is_sorted_by<F>(&self, compare: F) -> bool
907 where
908 F: FnMut(&T, &T) -> bool,
909 {
910 self.as_slice().is_sorted_by(compare)
911 }
912
913 pub fn is_sorted_by_key<K, F>(&self, f: F) -> bool
915 where
916 F: FnMut(&T) -> K,
917 K: PartialOrd,
918 {
919 self.as_slice().is_sorted_by_key(f)
920 }
921
922 pub fn partition_point<P>(&self, pred: P) -> usize
924 where
925 P: FnMut(&T) -> bool,
926 {
927 self.as_slice().partition_point(pred)
928 }
929
930 pub fn rotate_left(&mut self, mid: usize) {
932 self.as_mut_slice().rotate_left(mid);
933 }
934
935 pub fn rotate_right(&mut self, k: usize) {
937 self.as_mut_slice().rotate_right(k);
938 }
939
940 pub fn with_capacity(capacity: usize) -> Self {
942 let mut vec = Self::new();
943 vec.reserve(capacity);
944 vec
945 }
946
947 pub fn insert(&mut self, index: usize, element: T) {
953 assert!(index <= self.len);
954 self.push(element);
955 if index < self.len - 1 {
957 for i in (index..self.len - 1).rev() {
958 unsafe {
959 let ptr_a = self.unchecked_at_mut(i) as *mut T;
960 let ptr_b = self.unchecked_at_mut(i + 1) as *mut T;
961 std::ptr::swap(ptr_a, ptr_b);
962 }
963 }
964 }
965 }
966
967 pub fn remove(&mut self, index: usize) -> T {
973 assert!(index < self.len);
974 for i in index..self.len - 1 {
976 unsafe {
977 let ptr_a = self.unchecked_at_mut(i) as *mut T;
978 let ptr_b = self.unchecked_at_mut(i + 1) as *mut T;
979 std::ptr::swap(ptr_a, ptr_b);
980 }
981 }
982 self.pop().unwrap()
983 }
984
985 pub fn swap_remove(&mut self, index: usize) -> T {
994 assert!(index < self.len);
995 let last_index = self.len - 1;
996 if index != last_index {
997 unsafe {
998 let ptr_a = self.unchecked_at_mut(index) as *mut T;
999 let ptr_b = self.unchecked_at_mut(last_index) as *mut T;
1000 std::ptr::swap(ptr_a, ptr_b);
1001 }
1002 }
1003 self.pop().unwrap()
1004 }
1005
1006 pub fn retain<F>(&mut self, mut f: F)
1010 where
1011 F: FnMut(&T) -> bool,
1012 {
1013 let mut i = 0;
1014 while i < self.len {
1015 if f(unsafe { self.unchecked_at(i) }) {
1016 i += 1;
1017 } else {
1018 self.remove(i);
1019 }
1020 }
1021 }
1022
1023 pub fn retain_mut<F>(&mut self, mut f: F)
1025 where
1026 F: FnMut(&mut T) -> bool,
1027 {
1028 let mut i = 0;
1029 while i < self.len {
1030 if f(unsafe { self.unchecked_at_mut(i) }) {
1031 i += 1;
1032 } else {
1033 self.remove(i);
1034 }
1035 }
1036 }
1037
1038 pub fn dedup(&mut self)
1042 where
1043 T: PartialEq,
1044 {
1045 self.dedup_by(|a, b| a == b);
1046 }
1047
1048 pub fn dedup_by<F>(&mut self, mut same_bucket: F)
1050 where
1051 F: FnMut(&mut T, &mut T) -> bool,
1052 {
1053 if self.len <= 1 {
1054 return;
1055 }
1056 let mut write = 1;
1057 for read in 1..self.len {
1058 let should_keep = unsafe {
1059 let prev_ptr = self.unchecked_at_mut(write - 1) as *mut T;
1060 let curr_ptr = self.unchecked_at_mut(read) as *mut T;
1061 !same_bucket(&mut *prev_ptr, &mut *curr_ptr)
1062 };
1063 if should_keep {
1064 if read != write {
1065 unsafe {
1066 let ptr_src = self.unchecked_at_mut(read) as *mut T;
1067 let ptr_dst = self.unchecked_at_mut(write) as *mut T;
1068 std::ptr::swap(ptr_dst, ptr_src);
1069 }
1070 }
1071 write += 1;
1072 } else {
1073 unsafe {
1075 std::ptr::drop_in_place(self.unchecked_at_mut(read));
1076 }
1077 }
1078 }
1079 self.len = write;
1080 }
1081
1082 pub fn dedup_by_key<K, F>(&mut self, mut key: F)
1084 where
1085 F: FnMut(&mut T) -> K,
1086 K: PartialEq,
1087 {
1088 self.dedup_by(|a, b| key(a) == key(b));
1089 }
1090
1091 pub fn resize(&mut self, new_len: usize, value: T)
1097 where
1098 T: Clone,
1099 {
1100 if new_len > self.len {
1101 self.reserve(new_len - self.len);
1102 while self.len < new_len {
1103 self.push(value.clone());
1104 }
1105 } else {
1106 self.truncate(new_len);
1107 }
1108 }
1109
1110 pub fn resize_with<F>(&mut self, new_len: usize, mut f: F)
1115 where
1116 F: FnMut() -> T,
1117 {
1118 if new_len > self.len {
1119 self.reserve(new_len - self.len);
1120 while self.len < new_len {
1121 self.push(f());
1122 }
1123 } else {
1124 self.truncate(new_len);
1125 }
1126 }
1127
1128 pub fn append(&mut self, other: &mut Self) {
1130 let other_len = other.len;
1131 self.reserve(other_len);
1132 let start = self.len;
1133 while let Some(item) = other.pop() {
1134 self.push(item);
1135 }
1136 let mut left = start;
1138 let mut right = self.len;
1139 while left < right {
1140 right -= 1;
1141 if left < right {
1142 unsafe {
1143 let ptr_a = self.unchecked_at_mut(left) as *mut T;
1144 let ptr_b = self.unchecked_at_mut(right) as *mut T;
1145 std::ptr::swap(ptr_a, ptr_b);
1146 }
1147 left += 1;
1148 }
1149 }
1150 }
1151
1152 pub fn split_off(&mut self, at: usize) -> Self {
1161 assert!(at <= self.len);
1162 let mut other = Self::new();
1163 other.reserve(self.len - at);
1164 for i in at..self.len {
1165 other.push(unsafe { self.unchecked_read(i) });
1166 }
1167 self.len = at;
1168 other
1169 }
1170
1171 pub fn chunks(&self, chunk_size: usize) -> Chunks<'_, T, PREALLOC> {
1177 self.as_slice().chunks(chunk_size)
1178 }
1179
1180 pub fn windows(&self, size: usize) -> Windows<'_, T, PREALLOC> {
1186 self.as_slice().windows(size)
1187 }
1188
1189 pub fn rchunks(&self, chunk_size: usize) -> RChunks<'_, T, PREALLOC> {
1195 self.as_slice().rchunks(chunk_size)
1196 }
1197
1198 pub fn chunks_exact(&self, chunk_size: usize) -> ChunksExact<'_, T, PREALLOC> {
1204 self.as_slice().chunks_exact(chunk_size)
1205 }
1206
1207 pub fn drain(&mut self, range: std::ops::Range<usize>) -> Drain<'_, T, PREALLOC> {
1213 assert!(range.start <= range.end && range.end <= self.len);
1214 Drain {
1215 vec: self,
1216 range_start: range.start,
1217 range_end: range.end,
1218 index: range.start,
1219 }
1220 }
1221
1222 pub fn to_vec(&self) -> Vec<T>
1224 where
1225 T: Clone,
1226 {
1227 self.as_slice().to_vec()
1228 }
1229
1230 #[inline]
1234 fn shelf_count(box_count: usize) -> u32 {
1235 if box_count == 0 {
1236 return 0;
1237 }
1238 if PREALLOC == 0 {
1239 let val = box_count + Self::MIN_NON_ZERO_CAP;
1240 val.next_power_of_two().trailing_zeros() - Self::MIN_CAP_EXP
1241 } else {
1242 let val = box_count + PREALLOC;
1243 val.next_power_of_two().trailing_zeros() - Self::PREALLOC_EXP - 1
1244 }
1245 }
1246
1247 #[inline]
1249 fn shelf_size(shelf_index: u32) -> usize {
1250 if PREALLOC == 0 {
1251 Self::MIN_NON_ZERO_CAP << shelf_index
1252 } else {
1253 1usize << (shelf_index + Self::PREALLOC_EXP + 1)
1254 }
1255 }
1256
1257 #[inline]
1260 fn location(list_index: usize) -> (usize, usize) {
1261 let biased = list_index + Self::BIAS;
1262 let msb = biased.ilog2();
1263 let shelf = msb - Self::SHELF_OFFSET;
1264 let box_idx = biased ^ (1usize << msb);
1266 (shelf as usize, box_idx)
1267 }
1268
1269 #[inline]
1272 fn location_with_capacity(list_index: usize) -> (usize, usize, usize) {
1273 let biased = list_index + Self::BIAS;
1274 let msb = biased.ilog2();
1275 let shelf = msb - Self::SHELF_OFFSET;
1276 let box_idx = biased ^ (1usize << msb);
1277 let segment_remaining = (2usize << msb) - biased;
1281 (shelf as usize, box_idx, segment_remaining)
1282 }
1283
1284 #[inline]
1290 pub(crate) unsafe fn unchecked_at(&self, index: usize) -> &T {
1291 unsafe {
1292 if index < PREALLOC {
1293 &(*self.prealloc_segment.as_ptr())[index]
1294 } else {
1295 let (shelf, box_idx) = Self::location(index);
1296 &*self
1297 .dynamic_segments
1298 .get_unchecked(shelf)
1299 .as_ptr()
1300 .add(box_idx)
1301 }
1302 }
1303 }
1304
1305 #[inline]
1311 pub(crate) unsafe fn unchecked_at_mut(&mut self, index: usize) -> &mut T {
1312 unsafe {
1313 if index < PREALLOC {
1314 &mut (*self.prealloc_segment.as_mut_ptr())[index]
1315 } else {
1316 let (shelf, box_idx) = Self::location(index);
1317 &mut *self
1318 .dynamic_segments
1319 .get_unchecked(shelf)
1320 .as_ptr()
1321 .add(box_idx)
1322 }
1323 }
1324 }
1325
1326 #[inline]
1332 unsafe fn unchecked_read(&self, index: usize) -> T {
1333 unsafe {
1334 if index < PREALLOC {
1335 std::ptr::read(&(*self.prealloc_segment.as_ptr())[index])
1336 } else {
1337 let (shelf, box_idx) = Self::location(index);
1338 std::ptr::read(
1339 self.dynamic_segments
1340 .get_unchecked(shelf)
1341 .as_ptr()
1342 .add(box_idx),
1343 )
1344 }
1345 }
1346 }
1347
1348 fn grow_capacity(&mut self, new_capacity: usize) {
1350 let new_shelf_count = Self::shelf_count(new_capacity);
1351 let old_shelf_count = self.dynamic_segments.len() as u32;
1352
1353 if new_shelf_count > old_shelf_count {
1354 self.dynamic_segments
1355 .reserve((new_shelf_count - old_shelf_count) as usize);
1356
1357 for i in old_shelf_count..new_shelf_count {
1358 let size = Self::shelf_size(i);
1359 let layout = Layout::array::<T>(size).expect("Layout overflow");
1360 let ptr = if layout.size() == 0 {
1361 NonNull::dangling()
1362 } else {
1363 let ptr = unsafe { alloc::alloc(layout) };
1364 NonNull::new(ptr as *mut T).expect("Allocation failed")
1365 };
1366 self.dynamic_segments.push(ptr);
1367 }
1368 }
1369 }
1370
1371 #[inline]
1373 fn compute_capacity(shelf_count: u32) -> usize {
1374 if shelf_count == 0 {
1375 PREALLOC
1376 } else if PREALLOC == 0 {
1377 (Self::MIN_NON_ZERO_CAP << shelf_count) - Self::MIN_NON_ZERO_CAP
1378 } else {
1379 (1usize << (Self::PREALLOC_EXP + 1 + shelf_count)) - PREALLOC
1380 }
1381 }
1382
1383 fn shrink_capacity(&mut self, new_capacity: usize) {
1385 if new_capacity <= PREALLOC {
1386 self.free_shelves(self.dynamic_segments.len() as u32, 0);
1388 self.dynamic_segments.clear();
1389 self.dynamic_segments.shrink_to_fit();
1390 return;
1391 }
1392
1393 let new_shelf_count = Self::shelf_count(new_capacity);
1394 let old_shelf_count = self.dynamic_segments.len() as u32;
1395
1396 if new_shelf_count < old_shelf_count {
1397 self.free_shelves(old_shelf_count, new_shelf_count);
1398 self.dynamic_segments.truncate(new_shelf_count as usize);
1399 self.dynamic_segments.shrink_to_fit();
1400 }
1401 }
1402
1403 fn free_shelves(&mut self, from_count: u32, to_count: u32) {
1405 for i in (to_count..from_count).rev() {
1406 let size = Self::shelf_size(i);
1407 let layout = Layout::array::<T>(size).expect("Layout overflow");
1408 if layout.size() > 0 {
1409 unsafe {
1410 alloc::dealloc(
1411 self.dynamic_segments[i as usize].as_ptr() as *mut u8,
1412 layout,
1413 );
1414 }
1415 }
1416 }
1417 }
1418}
1419
1420impl<T, const PREALLOC: usize> Drop for SegmentedVec<T, PREALLOC> {
1421 fn drop(&mut self) {
1422 self.clear();
1424 self.free_shelves(self.dynamic_segments.len() as u32, 0);
1426 }
1427}
1428
1429impl<T, const PREALLOC: usize> sort::IndexedAccess<T> for SegmentedVec<T, PREALLOC> {
1430 #[inline]
1431 fn get_ref(&self, index: usize) -> &T {
1432 debug_assert!(index < self.len);
1433 unsafe { self.unchecked_at(index) }
1434 }
1435
1436 #[inline]
1437 fn get_ptr(&self, index: usize) -> *const T {
1438 debug_assert!(index < self.len);
1439 if index < PREALLOC {
1440 unsafe { (*self.prealloc_segment.as_ptr()).as_ptr().add(index) }
1441 } else {
1442 let (shelf, box_idx) = Self::location(index);
1443 unsafe {
1444 self.dynamic_segments
1445 .get_unchecked(shelf)
1446 .as_ptr()
1447 .add(box_idx)
1448 }
1449 }
1450 }
1451
1452 #[inline]
1453 fn get_ptr_mut(&mut self, index: usize) -> *mut T {
1454 debug_assert!(index < self.len);
1455 if index < PREALLOC {
1456 unsafe {
1457 (*self.prealloc_segment.as_mut_ptr())
1458 .as_mut_ptr()
1459 .add(index)
1460 }
1461 } else {
1462 let (shelf, box_idx) = Self::location(index);
1463 unsafe {
1464 self.dynamic_segments
1465 .get_unchecked(shelf)
1466 .as_ptr()
1467 .add(box_idx)
1468 }
1469 }
1470 }
1471
1472 #[inline]
1473 fn swap(&mut self, a: usize, b: usize) {
1474 if a == b {
1475 return;
1476 }
1477 debug_assert!(a < self.len && b < self.len);
1478 unsafe {
1479 let ptr_a = self.get_ptr_mut(a);
1480 let ptr_b = self.get_ptr_mut(b);
1481 std::ptr::swap(ptr_a, ptr_b);
1482 }
1483 }
1484}
1485
1486impl<T, const PREALLOC: usize> Default for SegmentedVec<T, PREALLOC> {
1487 fn default() -> Self {
1488 Self::new()
1489 }
1490}
1491
1492impl<T, const PREALLOC: usize> Index<usize> for SegmentedVec<T, PREALLOC> {
1493 type Output = T;
1494
1495 fn index(&self, index: usize) -> &Self::Output {
1496 self.get(index).expect("index out of bounds")
1497 }
1498}
1499
1500impl<T, const PREALLOC: usize> IndexMut<usize> for SegmentedVec<T, PREALLOC> {
1501 fn index_mut(&mut self, index: usize) -> &mut Self::Output {
1502 self.get_mut(index).expect("index out of bounds")
1503 }
1504}
1505
1506impl<T: Clone, const PREALLOC: usize> Clone for SegmentedVec<T, PREALLOC> {
1507 fn clone(&self) -> Self {
1508 if self.len == 0 {
1509 return Self::new();
1510 }
1511
1512 let mut new_vec = Self::new();
1513 new_vec.reserve(self.len);
1514
1515 if PREALLOC > 0 {
1517 let count = self.len.min(PREALLOC);
1518 let src = unsafe { (*self.prealloc_segment.as_ptr()).as_ptr() };
1519 let dst = unsafe { (*new_vec.prealloc_segment.as_mut_ptr()).as_mut_ptr() };
1520 for i in 0..count {
1521 unsafe { std::ptr::write(dst.add(i), (*src.add(i)).clone()) };
1522 }
1523 new_vec.len = count;
1524 }
1525
1526 if self.len > PREALLOC {
1528 let mut remaining = self.len - PREALLOC;
1529 for shelf in 0..self.dynamic_segments.len() {
1530 let size = Self::shelf_size(shelf as u32);
1531 let count = remaining.min(size);
1532 let src = unsafe { self.dynamic_segments.get_unchecked(shelf).as_ptr() };
1533 let dst = unsafe { new_vec.dynamic_segments.get_unchecked(shelf).as_ptr() };
1534 for i in 0..count {
1535 unsafe { std::ptr::write(dst.add(i), (*src.add(i)).clone()) };
1536 }
1537 new_vec.len += count;
1538 remaining -= count;
1539 if remaining == 0 {
1540 break;
1541 }
1542 }
1543 }
1544
1545 if new_vec.len < new_vec.capacity() {
1547 new_vec.init_write_ptr();
1548 } else {
1549 new_vec.write_ptr = std::ptr::null_mut();
1550 new_vec.segment_remaining = 0;
1551 }
1552
1553 new_vec
1554 }
1555}
1556
1557impl<T: std::fmt::Debug, const PREALLOC: usize> std::fmt::Debug for SegmentedVec<T, PREALLOC> {
1558 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
1559 f.debug_list().entries(self.iter()).finish()
1560 }
1561}
1562
1563impl<T: PartialEq, const PREALLOC: usize> PartialEq for SegmentedVec<T, PREALLOC> {
1564 fn eq(&self, other: &Self) -> bool {
1565 if self.len != other.len {
1566 return false;
1567 }
1568 for i in 0..self.len {
1569 if unsafe { self.unchecked_at(i) } != unsafe { other.unchecked_at(i) } {
1570 return false;
1571 }
1572 }
1573 true
1574 }
1575}
1576
1577impl<T: Eq, const PREALLOC: usize> Eq for SegmentedVec<T, PREALLOC> {}
1578
1579impl<T, const PREALLOC: usize> FromIterator<T> for SegmentedVec<T, PREALLOC> {
1580 fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> Self {
1581 let iter = iter.into_iter();
1582 let (lower, _) = iter.size_hint();
1583 let mut vec = Self::new();
1584 vec.reserve(lower);
1585 for item in iter {
1586 vec.push(item);
1587 }
1588 vec
1589 }
1590}
1591
1592impl<T, const PREALLOC: usize> Extend<T> for SegmentedVec<T, PREALLOC> {
1593 fn extend<I: IntoIterator<Item = T>>(&mut self, iter: I) {
1594 let iter = iter.into_iter();
1595 let (lower, _) = iter.size_hint();
1596 self.reserve(lower);
1597 for item in iter {
1598 self.push(item);
1599 }
1600 }
1601}
1602
1603pub struct Iter<'a, T, const PREALLOC: usize> {
1607 vec: &'a SegmentedVec<T, PREALLOC>,
1608 ptr: *const T,
1610 segment_end: *const T,
1612 index: usize,
1614 shelf_index: u32,
1616}
1617
1618impl<'a, T, const PREALLOC: usize> Iterator for Iter<'a, T, PREALLOC> {
1619 type Item = &'a T;
1620
1621 #[inline]
1622 fn next(&mut self) -> Option<Self::Item> {
1623 if self.ptr < self.segment_end {
1624 let result = unsafe { &*self.ptr };
1625 self.ptr = unsafe { self.ptr.add(1) };
1626 self.index += 1;
1627 return Some(result);
1628 }
1629 self.next_segment()
1630 }
1631
1632 #[inline]
1633 fn size_hint(&self) -> (usize, Option<usize>) {
1634 let remaining = self.vec.len.saturating_sub(self.index);
1635 (remaining, Some(remaining))
1636 }
1637}
1638
1639impl<'a, T, const PREALLOC: usize> Iter<'a, T, PREALLOC> {
1640 #[inline]
1641 fn next_segment(&mut self) -> Option<&'a T> {
1642 if self.index >= self.vec.len {
1643 return None;
1644 }
1645
1646 if self.index < PREALLOC {
1648 let ptr = unsafe {
1650 (*self.vec.prealloc_segment.as_ptr())
1651 .as_ptr()
1652 .add(self.index)
1653 };
1654 let remaining = PREALLOC - self.index;
1655 let segment_len = remaining.min(self.vec.len - self.index);
1656 self.ptr = ptr;
1657 self.segment_end = unsafe { ptr.add(segment_len) };
1658 } else {
1659 let shelf_idx = self.shelf_index as usize;
1661 let shelf_size = SegmentedVec::<T, PREALLOC>::shelf_size(self.shelf_index);
1662 let ptr = self.vec.dynamic_segments[shelf_idx].as_ptr();
1663 let segment_len = shelf_size.min(self.vec.len - self.index);
1664 self.ptr = ptr;
1665 self.segment_end = unsafe { ptr.add(segment_len) };
1666 self.shelf_index += 1;
1667 }
1668
1669 let result = unsafe { &*self.ptr };
1670 self.ptr = unsafe { self.ptr.add(1) };
1671 self.index += 1;
1672 Some(result)
1673 }
1674}
1675
1676impl<'a, T, const PREALLOC: usize> ExactSizeIterator for Iter<'a, T, PREALLOC> {}
1677
1678pub struct IterMut<'a, T, const PREALLOC: usize> {
1680 vec: &'a mut SegmentedVec<T, PREALLOC>,
1681 ptr: *mut T,
1683 segment_end: *mut T,
1685 index: usize,
1687 shelf_index: u32,
1689}
1690
1691impl<'a, T, const PREALLOC: usize> Iterator for IterMut<'a, T, PREALLOC> {
1692 type Item = &'a mut T;
1693
1694 #[inline]
1695 fn next(&mut self) -> Option<Self::Item> {
1696 if self.ptr < self.segment_end {
1697 let result = self.ptr;
1698 self.ptr = unsafe { self.ptr.add(1) };
1699 self.index += 1;
1700 return Some(unsafe { &mut *result });
1701 }
1702 self.next_segment()
1703 }
1704
1705 #[inline]
1706 fn size_hint(&self) -> (usize, Option<usize>) {
1707 let remaining = self.vec.len.saturating_sub(self.index);
1708 (remaining, Some(remaining))
1709 }
1710}
1711
1712impl<'a, T, const PREALLOC: usize> IterMut<'a, T, PREALLOC> {
1713 #[inline]
1714 fn next_segment(&mut self) -> Option<&'a mut T> {
1715 if self.index >= self.vec.len {
1716 return None;
1717 }
1718
1719 if self.index < PREALLOC {
1721 let ptr = unsafe {
1723 (*self.vec.prealloc_segment.as_mut_ptr())
1724 .as_mut_ptr()
1725 .add(self.index)
1726 };
1727 let remaining = PREALLOC - self.index;
1728 let segment_len = remaining.min(self.vec.len - self.index);
1729 self.ptr = ptr;
1730 self.segment_end = unsafe { ptr.add(segment_len) };
1731 } else {
1732 let shelf_idx = self.shelf_index as usize;
1734 let shelf_size = SegmentedVec::<T, PREALLOC>::shelf_size(self.shelf_index);
1735 let ptr = self.vec.dynamic_segments[shelf_idx].as_ptr();
1737 let segment_len = shelf_size.min(self.vec.len - self.index);
1738 self.ptr = ptr;
1739 self.segment_end = unsafe { ptr.add(segment_len) };
1740 self.shelf_index += 1;
1741 }
1742
1743 let result = self.ptr;
1744 self.ptr = unsafe { self.ptr.add(1) };
1745 self.index += 1;
1746 Some(unsafe { &mut *result })
1747 }
1748}
1749
1750impl<'a, T, const PREALLOC: usize> ExactSizeIterator for IterMut<'a, T, PREALLOC> {}
1751
1752impl<T, const PREALLOC: usize> IntoIterator for SegmentedVec<T, PREALLOC> {
1753 type Item = T;
1754 type IntoIter = IntoIter<T, PREALLOC>;
1755
1756 fn into_iter(self) -> Self::IntoIter {
1757 IntoIter {
1758 vec: self,
1759 index: 0,
1760 }
1761 }
1762}
1763
1764impl<'a, T, const PREALLOC: usize> IntoIterator for &'a SegmentedVec<T, PREALLOC> {
1765 type Item = &'a T;
1766 type IntoIter = Iter<'a, T, PREALLOC>;
1767
1768 fn into_iter(self) -> Self::IntoIter {
1769 self.iter()
1770 }
1771}
1772
1773impl<'a, T, const PREALLOC: usize> IntoIterator for &'a mut SegmentedVec<T, PREALLOC> {
1774 type Item = &'a mut T;
1775 type IntoIter = IterMut<'a, T, PREALLOC>;
1776
1777 fn into_iter(self) -> Self::IntoIter {
1778 self.iter_mut()
1779 }
1780}
1781
1782pub struct IntoIter<T, const PREALLOC: usize> {
1784 vec: SegmentedVec<T, PREALLOC>,
1785 index: usize,
1786}
1787
1788impl<T, const PREALLOC: usize> Iterator for IntoIter<T, PREALLOC> {
1789 type Item = T;
1790
1791 fn next(&mut self) -> Option<Self::Item> {
1792 if self.index >= self.vec.len {
1793 return None;
1794 }
1795 let value = unsafe { self.vec.unchecked_read(self.index) };
1796 self.index += 1;
1797 Some(value)
1798 }
1799
1800 fn size_hint(&self) -> (usize, Option<usize>) {
1801 let remaining = self.vec.len - self.index;
1802 (remaining, Some(remaining))
1803 }
1804}
1805
1806impl<T, const PREALLOC: usize> ExactSizeIterator for IntoIter<T, PREALLOC> {}
1807
1808impl<T, const PREALLOC: usize> Drop for IntoIter<T, PREALLOC> {
1809 fn drop(&mut self) {
1810 for i in self.index..self.vec.len {
1812 unsafe {
1813 std::ptr::drop_in_place(self.vec.unchecked_at_mut(i));
1814 }
1815 }
1816 self.vec.len = 0;
1818 }
1819}
1820
1821pub struct Drain<'a, T, const PREALLOC: usize> {
1825 vec: &'a mut SegmentedVec<T, PREALLOC>,
1826 range_start: usize,
1827 range_end: usize,
1828 index: usize,
1829}
1830
1831impl<'a, T, const PREALLOC: usize> Iterator for Drain<'a, T, PREALLOC> {
1832 type Item = T;
1833
1834 fn next(&mut self) -> Option<Self::Item> {
1835 if self.index >= self.range_end {
1836 None
1837 } else {
1838 let value = unsafe { std::ptr::read(self.vec.unchecked_at(self.index)) };
1839 self.index += 1;
1840 Some(value)
1841 }
1842 }
1843
1844 fn size_hint(&self) -> (usize, Option<usize>) {
1845 let remaining = self.range_end - self.index;
1846 (remaining, Some(remaining))
1847 }
1848}
1849
1850impl<'a, T, const PREALLOC: usize> DoubleEndedIterator for Drain<'a, T, PREALLOC> {
1851 fn next_back(&mut self) -> Option<Self::Item> {
1852 if self.index >= self.range_end {
1853 None
1854 } else {
1855 self.range_end -= 1;
1856 Some(unsafe { std::ptr::read(self.vec.unchecked_at(self.range_end)) })
1857 }
1858 }
1859}
1860
1861impl<'a, T, const PREALLOC: usize> ExactSizeIterator for Drain<'a, T, PREALLOC> {}
1862
1863impl<'a, T, const PREALLOC: usize> Drop for Drain<'a, T, PREALLOC> {
1864 fn drop(&mut self) {
1865 for i in self.index..self.range_end {
1867 unsafe {
1868 std::ptr::drop_in_place(self.vec.unchecked_at_mut(i));
1869 }
1870 }
1871
1872 let original_range_end = self.range_end;
1874 let original_len = self.vec.len;
1875 let drain_count = original_range_end - self.range_start;
1876
1877 for i in 0..(original_len - original_range_end) {
1878 unsafe {
1879 let src = self.vec.unchecked_at(original_range_end + i) as *const T;
1880 let dst = self.vec.unchecked_at_mut(self.range_start + i) as *mut T;
1881 std::ptr::copy_nonoverlapping(src, dst, 1);
1882 }
1883 }
1884
1885 self.vec.len = original_len - drain_count;
1886 }
1887}
1888
1889#[cfg(test)]
1890mod tests {
1891 use super::*;
1892
1893 #[test]
1894 fn test_new_empty() {
1895 let vec: SegmentedVec<i32, 4> = SegmentedVec::new();
1896 assert!(vec.is_empty());
1897 assert_eq!(vec.len(), 0);
1898 }
1899
1900 #[test]
1901 fn test_push_pop() {
1902 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
1903 vec.push(1);
1904 vec.push(2);
1905 vec.push(3);
1906 assert_eq!(vec.len(), 3);
1907 assert_eq!(vec.pop(), Some(3));
1908 assert_eq!(vec.pop(), Some(2));
1909 assert_eq!(vec.pop(), Some(1));
1910 assert_eq!(vec.pop(), None);
1911 }
1912
1913 #[test]
1914 fn test_get() {
1915 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
1916 vec.push(10);
1917 vec.push(20);
1918 vec.push(30);
1919 assert_eq!(vec.get(0), Some(&10));
1920 assert_eq!(vec.get(1), Some(&20));
1921 assert_eq!(vec.get(2), Some(&30));
1922 assert_eq!(vec.get(3), None);
1923 }
1924
1925 #[test]
1926 fn test_index() {
1927 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
1928 vec.push(10);
1929 vec.push(20);
1930 assert_eq!(vec[0], 10);
1931 assert_eq!(vec[1], 20);
1932 vec[0] = 100;
1933 assert_eq!(vec[0], 100);
1934 }
1935
1936 #[test]
1937 fn test_stable_pointers() {
1938 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
1939 vec.push(1);
1940 let ptr = &vec[0] as *const i32;
1941
1942 for i in 2..1000 {
1944 vec.push(i);
1945 }
1946
1947 assert_eq!(unsafe { *ptr }, 1);
1949 }
1950
1951 #[test]
1952 fn test_iter() {
1953 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
1954 for i in 0..100 {
1955 vec.push(i);
1956 }
1957
1958 let collected: Vec<i32> = vec.iter().copied().collect();
1959 let expected: Vec<i32> = (0..100).collect();
1960 assert_eq!(collected, expected);
1961 }
1962
1963 #[test]
1964 fn test_iter_mut() {
1965 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
1966 for i in 0..10 {
1967 vec.push(i);
1968 }
1969
1970 for item in vec.iter_mut() {
1971 *item *= 2;
1972 }
1973
1974 let collected: Vec<i32> = vec.iter().copied().collect();
1975 let expected: Vec<i32> = (0..10).map(|x| x * 2).collect();
1976 assert_eq!(collected, expected);
1977 }
1978
1979 #[test]
1980 fn test_into_iter() {
1981 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
1982 for i in 0..10 {
1983 vec.push(i);
1984 }
1985
1986 let collected: Vec<i32> = vec.into_iter().collect();
1987 let expected: Vec<i32> = (0..10).collect();
1988 assert_eq!(collected, expected);
1989 }
1990
1991 #[test]
1992 fn test_from_iter() {
1993 let vec: SegmentedVec<i32, 4> = (0..10).collect();
1994 assert_eq!(vec.len(), 10);
1995 for i in 0..10 {
1996 assert_eq!(vec[i], i as i32);
1997 }
1998 }
1999
2000 #[test]
2001 fn test_extend() {
2002 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2003 vec.extend(0..5);
2004 vec.extend(5..10);
2005 assert_eq!(vec.len(), 10);
2006 for i in 0..10 {
2007 assert_eq!(vec[i], i as i32);
2008 }
2009 }
2010
2011 #[test]
2012 fn test_clear() {
2013 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2014 vec.extend(0..10);
2015 vec.clear();
2016 assert!(vec.is_empty());
2017 assert_eq!(vec.len(), 0);
2018 }
2019
2020 #[test]
2021 fn test_truncate() {
2022 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2023 vec.extend(0..10);
2024 vec.truncate(5);
2025 assert_eq!(vec.len(), 5);
2026 for i in 0..5 {
2027 assert_eq!(vec[i], i as i32);
2028 }
2029 }
2030
2031 #[test]
2032 fn test_zero_prealloc() {
2033 let mut vec: SegmentedVec<i32, 0> = SegmentedVec::new();
2034 for i in 0..100 {
2035 vec.push(i);
2036 }
2037
2038 for i in 0..100 {
2039 assert_eq!(vec[i], i as i32);
2040 }
2041
2042 assert_eq!(vec.pop(), Some(99));
2043 assert_eq!(vec.len(), 99);
2044 }
2045
2046 #[test]
2047 fn test_various_prealloc_sizes() {
2048 fn test_prealloc<const N: usize>() {
2049 let mut vec: SegmentedVec<i32, N> = SegmentedVec::new();
2050 for i in 0..100 {
2051 vec.push(i);
2052 }
2053 for i in 0..100 {
2054 assert_eq!(vec[i], i as i32);
2055 }
2056 }
2057
2058 test_prealloc::<0>();
2059 test_prealloc::<1>();
2060 test_prealloc::<2>();
2061 test_prealloc::<4>();
2062 test_prealloc::<8>();
2063 test_prealloc::<16>();
2064 }
2065
2066 #[test]
2067 fn test_clone() {
2068 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2069 vec.extend(0..10);
2070 let vec2 = vec.clone();
2071 assert_eq!(vec, vec2);
2072 }
2073
2074 #[test]
2075 fn test_debug() {
2076 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2077 vec.extend(0..3);
2078 let debug_str = format!("{:?}", vec);
2079 assert_eq!(debug_str, "[0, 1, 2]");
2080 }
2081
2082 #[test]
2083 fn test_first_last() {
2084 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2085 assert_eq!(vec.first(), None);
2086 assert_eq!(vec.last(), None);
2087
2088 vec.push(1);
2089 assert_eq!(vec.first(), Some(&1));
2090 assert_eq!(vec.last(), Some(&1));
2091
2092 vec.push(2);
2093 vec.push(3);
2094 assert_eq!(vec.first(), Some(&1));
2095 assert_eq!(vec.last(), Some(&3));
2096 }
2097
2098 #[test]
2099 fn test_drop_elements() {
2100 use std::cell::Cell;
2101 use std::rc::Rc;
2102
2103 let drop_count = Rc::new(Cell::new(0));
2104
2105 struct DropCounter {
2106 counter: Rc<Cell<i32>>,
2107 }
2108
2109 impl Drop for DropCounter {
2110 fn drop(&mut self) {
2111 self.counter.set(self.counter.get() + 1);
2112 }
2113 }
2114
2115 {
2116 let mut vec: SegmentedVec<DropCounter, 4> = SegmentedVec::new();
2117 for _ in 0..10 {
2118 vec.push(DropCounter {
2119 counter: Rc::clone(&drop_count),
2120 });
2121 }
2122 }
2123
2124 assert_eq!(drop_count.get(), 10);
2125 }
2126
2127 #[test]
2128 fn test_shrink_to_fit() {
2129 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2130 vec.extend(0..100);
2131 vec.truncate(5);
2132 vec.shrink_to_fit();
2133 assert_eq!(vec.len(), 5);
2134 for i in 0..5 {
2135 assert_eq!(vec[i], i as i32);
2136 }
2137 }
2138
2139 #[test]
2140 fn test_sort() {
2141 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2142 vec.extend([5, 2, 8, 1, 9, 3, 7, 4, 6, 0]);
2143 vec.sort();
2144 let result: Vec<i32> = vec.iter().copied().collect();
2145 assert_eq!(result, vec![0, 1, 2, 3, 4, 5, 6, 7, 8, 9]);
2146 }
2147
2148 #[test]
2149 fn test_sort_empty() {
2150 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2151 vec.sort();
2152 assert!(vec.is_empty());
2153 }
2154
2155 #[test]
2156 fn test_sort_single() {
2157 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2158 vec.push(42);
2159 vec.sort();
2160 assert_eq!(vec[0], 42);
2161 }
2162
2163 #[test]
2164 fn test_sort_already_sorted() {
2165 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2166 vec.extend(0..10);
2167 vec.sort();
2168 let result: Vec<i32> = vec.iter().copied().collect();
2169 assert_eq!(result, (0..10).collect::<Vec<_>>());
2170 }
2171
2172 #[test]
2173 fn test_sort_reverse_sorted() {
2174 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2175 vec.extend((0..10).rev());
2176 vec.sort();
2177 let result: Vec<i32> = vec.iter().copied().collect();
2178 assert_eq!(result, (0..10).collect::<Vec<_>>());
2179 }
2180
2181 #[test]
2182 fn test_sort_by() {
2183 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2184 vec.extend([5, 2, 8, 1, 9, 3, 7, 4, 6, 0]);
2185 vec.sort_by(|a, b| b.cmp(a)); let result: Vec<i32> = vec.iter().copied().collect();
2187 assert_eq!(result, vec![9, 8, 7, 6, 5, 4, 3, 2, 1, 0]);
2188 }
2189
2190 #[test]
2191 fn test_sort_by_key() {
2192 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2193 vec.extend([-5, 2, -8, 1, -9, 3, -7, 4, -6, 0]);
2194 vec.sort_by_key(|k| k.abs());
2195 let result: Vec<i32> = vec.iter().copied().collect();
2196 assert_eq!(result, vec![0, 1, 2, 3, 4, -5, -6, -7, -8, -9]);
2197 }
2198
2199 #[test]
2200 fn test_sort_stable() {
2201 #[derive(Debug, Clone, Eq, PartialEq)]
2203 struct Item {
2204 key: i32,
2205 order: usize,
2206 }
2207
2208 let mut vec: SegmentedVec<Item, 4> = SegmentedVec::new();
2209 vec.push(Item { key: 1, order: 0 });
2210 vec.push(Item { key: 2, order: 1 });
2211 vec.push(Item { key: 1, order: 2 });
2212 vec.push(Item { key: 2, order: 3 });
2213 vec.push(Item { key: 1, order: 4 });
2214
2215 vec.sort_by_key(|item| item.key);
2216
2217 assert_eq!(vec[0], Item { key: 1, order: 0 });
2219 assert_eq!(vec[1], Item { key: 1, order: 2 });
2220 assert_eq!(vec[2], Item { key: 1, order: 4 });
2221 assert_eq!(vec[3], Item { key: 2, order: 1 });
2223 assert_eq!(vec[4], Item { key: 2, order: 3 });
2224 }
2225
2226 #[test]
2227 fn test_sort_unstable() {
2228 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2229 vec.extend([5, 2, 8, 1, 9, 3, 7, 4, 6, 0]);
2230 vec.sort_unstable();
2231 let result: Vec<i32> = vec.iter().copied().collect();
2232 assert_eq!(result, vec![0, 1, 2, 3, 4, 5, 6, 7, 8, 9]);
2233 }
2234
2235 #[test]
2236 fn test_sort_unstable_by() {
2237 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2238 vec.extend([5, 2, 8, 1, 9, 3, 7, 4, 6, 0]);
2239 vec.sort_unstable_by(|a, b| b.cmp(a)); let result: Vec<i32> = vec.iter().copied().collect();
2241 assert_eq!(result, vec![9, 8, 7, 6, 5, 4, 3, 2, 1, 0]);
2242 }
2243
2244 #[test]
2245 fn test_sort_unstable_by_key() {
2246 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2247 vec.extend([-5, 2, -8, 1, -9, 3, -7, 4, -6, 0]);
2248 vec.sort_unstable_by_key(|k| k.abs());
2249 let result: Vec<i32> = vec.iter().copied().collect();
2251 for i in 1..result.len() {
2252 assert!(result[i - 1].abs() <= result[i].abs());
2253 }
2254 }
2255
2256 #[test]
2257 fn test_sort_large() {
2258 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2259 vec.extend((0..1000).rev());
2261 vec.sort();
2262 for i in 0..1000 {
2263 assert_eq!(vec[i], i as i32);
2264 }
2265 }
2266
2267 #[test]
2268 fn test_sort_unstable_large() {
2269 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2270 vec.extend((0..1000).rev());
2272 vec.sort_unstable();
2273 for i in 0..1000 {
2274 assert_eq!(vec[i], i as i32);
2275 }
2276 }
2277
2278 #[test]
2279 fn test_sort_with_zero_prealloc() {
2280 let mut vec: SegmentedVec<i32, 0> = SegmentedVec::new();
2281 vec.extend([5, 2, 8, 1, 9, 3, 7, 4, 6, 0]);
2282 vec.sort();
2283 let result: Vec<i32> = vec.iter().copied().collect();
2284 assert_eq!(result, vec![0, 1, 2, 3, 4, 5, 6, 7, 8, 9]);
2285 }
2286
2287 #[test]
2288 fn test_sort_pointers_remain_stable_after_sort() {
2289 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2292 vec.extend([5, 2, 8, 1, 9]);
2293
2294 let ptr = &vec[0] as *const i32;
2296
2297 vec.sort();
2298
2299 assert_eq!(unsafe { *ptr }, 1); }
2302
2303 #[test]
2306 fn test_as_slice() {
2307 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2308 vec.extend(0..10);
2309
2310 let slice = vec.as_slice();
2311 assert_eq!(slice.len(), 10);
2312 assert_eq!(slice[0], 0);
2313 assert_eq!(slice[9], 9);
2314 }
2315
2316 #[test]
2317 fn test_as_mut_slice() {
2318 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2319 vec.extend(0..10);
2320
2321 {
2322 let mut slice = vec.as_mut_slice();
2323 slice[0] = 100;
2324 slice[9] = 200;
2325 }
2326
2327 assert_eq!(vec[0], 100);
2328 assert_eq!(vec[9], 200);
2329 }
2330
2331 #[test]
2332 fn test_slice_range() {
2333 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2334 vec.extend(0..10);
2335
2336 let slice = vec.slice(2..5);
2337 assert_eq!(slice.len(), 3);
2338 assert_eq!(slice[0], 2);
2339 assert_eq!(slice[1], 3);
2340 assert_eq!(slice[2], 4);
2341 }
2342
2343 #[test]
2344 fn test_slice_mut_range() {
2345 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2346 vec.extend(0..10);
2347
2348 {
2349 let mut slice = vec.slice_mut(2..5);
2350 slice[0] = 100;
2351 slice[2] = 200;
2352 }
2353
2354 assert_eq!(vec[2], 100);
2355 assert_eq!(vec[4], 200);
2356 }
2357
2358 #[test]
2359 fn test_slice_first_last() {
2360 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2361 vec.extend(0..10);
2362
2363 let slice = vec.as_slice();
2364 assert_eq!(slice.first(), Some(&0));
2365 assert_eq!(slice.last(), Some(&9));
2366 }
2367
2368 #[test]
2369 fn test_slice_iter() {
2370 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2371 vec.extend(0..10);
2372
2373 let slice = vec.as_slice();
2374 let collected: Vec<i32> = slice.iter().copied().collect();
2375 assert_eq!(collected, (0..10).collect::<Vec<_>>());
2376 }
2377
2378 #[test]
2379 fn test_slice_iter_rev() {
2380 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2381 vec.extend(0..10);
2382
2383 let slice = vec.as_slice();
2384 let collected: Vec<i32> = slice.iter().rev().copied().collect();
2385 assert_eq!(collected, (0..10).rev().collect::<Vec<_>>());
2386 }
2387
2388 #[test]
2389 fn test_slice_contains() {
2390 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2391 vec.extend(0..10);
2392
2393 let slice = vec.as_slice();
2394 assert!(slice.contains(&5));
2395 assert!(!slice.contains(&100));
2396 }
2397
2398 #[test]
2399 fn test_slice_binary_search() {
2400 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2401 vec.extend(0..100);
2402
2403 let slice = vec.as_slice();
2404 assert_eq!(slice.binary_search(&50), Ok(50));
2405 assert_eq!(slice.binary_search(&0), Ok(0));
2406 assert_eq!(slice.binary_search(&99), Ok(99));
2407 assert_eq!(slice.binary_search(&100), Err(100));
2408 }
2409
2410 #[test]
2411 fn test_slice_split_at() {
2412 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2413 vec.extend(0..10);
2414
2415 let slice = vec.as_slice();
2416 let (left, right) = slice.split_at(5);
2417 assert_eq!(left.len(), 5);
2418 assert_eq!(right.len(), 5);
2419 assert_eq!(left[0], 0);
2420 assert_eq!(right[0], 5);
2421 }
2422
2423 #[test]
2424 fn test_slice_to_vec() {
2425 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2426 vec.extend(0..10);
2427
2428 let slice = vec.as_slice();
2429 let converted = slice.to_vec();
2430 assert_eq!(converted, (0..10).collect::<Vec<_>>());
2431 }
2432
2433 #[test]
2434 fn test_slice_mut_sort() {
2435 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2436 vec.extend([5, 3, 1, 4, 2, 8, 7, 6, 0, 9]);
2437
2438 {
2440 let mut slice = vec.slice_mut(2..8);
2441 slice.sort();
2442 }
2443
2444 let result: Vec<i32> = vec.iter().copied().collect();
2445 assert_eq!(result, vec![5, 3, 1, 2, 4, 6, 7, 8, 0, 9]);
2446 }
2447
2448 #[test]
2449 fn test_slice_mut_reverse() {
2450 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2451 vec.extend(0..10);
2452
2453 {
2454 let mut slice = vec.slice_mut(2..8);
2455 slice.reverse();
2456 }
2457
2458 let result: Vec<i32> = vec.iter().copied().collect();
2459 assert_eq!(result, vec![0, 1, 7, 6, 5, 4, 3, 2, 8, 9]);
2460 }
2461
2462 #[test]
2463 fn test_slice_mut_fill() {
2464 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2465 vec.extend(0..10);
2466
2467 {
2468 let mut slice = vec.slice_mut(2..5);
2469 slice.fill(99);
2470 }
2471
2472 let result: Vec<i32> = vec.iter().copied().collect();
2473 assert_eq!(result, vec![0, 1, 99, 99, 99, 5, 6, 7, 8, 9]);
2474 }
2475
2476 #[test]
2477 fn test_slice_starts_with() {
2478 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2479 vec.extend(0..10);
2480
2481 let slice = vec.as_slice();
2482 assert!(slice.starts_with(&[0, 1, 2]));
2483 assert!(!slice.starts_with(&[1, 2, 3]));
2484 }
2485
2486 #[test]
2487 fn test_slice_ends_with() {
2488 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2489 vec.extend(0..10);
2490
2491 let slice = vec.as_slice();
2492 assert!(slice.ends_with(&[7, 8, 9]));
2493 assert!(!slice.ends_with(&[6, 7, 8]));
2494 }
2495
2496 #[test]
2497 fn test_slice_eq() {
2498 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2499 vec.extend(0..10);
2500
2501 let slice = vec.as_slice();
2502 assert_eq!(slice, (0..10).collect::<Vec<_>>());
2503 }
2504
2505 #[test]
2506 fn test_min_non_zero_cap() {
2507 let mut vec_u8: SegmentedVec<u8, 0> = SegmentedVec::new();
2509 vec_u8.push(1);
2510 assert_eq!(vec_u8.capacity(), 8);
2511
2512 let mut vec_i32: SegmentedVec<i32, 0> = SegmentedVec::new();
2514 vec_i32.push(1);
2515 assert_eq!(vec_i32.capacity(), 4);
2516
2517 for i in 0u8..100 {
2519 vec_u8.push(i);
2520 }
2521 for i in 0..101 {
2522 assert_eq!(vec_u8[i], if i == 0 { 1 } else { (i - 1) as u8 });
2523 }
2524 }
2525
2526 #[test]
2527 fn test_extend_from_copy_slice() {
2528 let mut vec: SegmentedVec<i32, 0> = SegmentedVec::new();
2530 let data: Vec<i32> = (0..100).collect();
2531 vec.extend_from_copy_slice(&data);
2532 assert_eq!(vec.len(), 100);
2533 for i in 0..100 {
2534 assert_eq!(vec[i], i as i32);
2535 }
2536
2537 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2539 vec.extend_from_copy_slice(&data);
2540 assert_eq!(vec.len(), 100);
2541 for i in 0..100 {
2542 assert_eq!(vec[i], i as i32);
2543 }
2544
2545 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2547 vec.push(999);
2548 vec.push(998);
2549 vec.extend_from_copy_slice(&data[..10]);
2550 assert_eq!(vec.len(), 12);
2551 assert_eq!(vec[0], 999);
2552 assert_eq!(vec[1], 998);
2553 for i in 0..10 {
2554 assert_eq!(vec[i + 2], i as i32);
2555 }
2556
2557 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2559 vec.extend_from_copy_slice(&[]);
2560 assert!(vec.is_empty());
2561
2562 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2564 vec.extend_from_copy_slice(&[1, 2, 3]); assert_eq!(vec.len(), 3);
2566 vec.push(4); vec.push(5);
2568 vec.push(6);
2569 assert_eq!(vec.len(), 6);
2570 for i in 0..6 {
2571 assert_eq!(vec[i], (i + 1) as i32);
2572 }
2573
2574 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2576 vec.extend_from_copy_slice(&[1, 2, 3, 4]); assert_eq!(vec.len(), 4);
2578 vec.push(5); assert_eq!(vec.len(), 5);
2580 assert_eq!(vec[4], 5);
2581 }
2582}