1mod slice;
28mod sort;
29
30pub use slice::{
31 Chunks, ChunksExact, RChunks, SegmentedSlice, SegmentedSliceMut, SliceIter, SliceIterMut,
32 Windows,
33};
34
35use std::alloc::{self, Layout};
36use std::cmp::Ordering;
37use std::marker::PhantomData;
38use std::mem::MaybeUninit;
39use std::ops::{Index, IndexMut};
40use std::ptr::NonNull;
41
42pub struct SegmentedVec<T, const PREALLOC: usize = 0> {
47 prealloc_segment: MaybeUninit<[T; PREALLOC]>,
48 dynamic_segments: Vec<NonNull<T>>,
49 len: usize,
50 write_ptr: *mut T,
52 segment_end: *mut T,
54 _marker: PhantomData<T>,
55}
56
57unsafe impl<T: Send, const PREALLOC: usize> Send for SegmentedVec<T, PREALLOC> {}
59unsafe impl<T: Sync, const PREALLOC: usize> Sync for SegmentedVec<T, PREALLOC> {}
61
62impl<T, const PREALLOC: usize> SegmentedVec<T, PREALLOC> {
63 const PREALLOC_EXP: u32 = if PREALLOC == 0 {
64 0
65 } else {
66 assert!(
67 PREALLOC.is_power_of_two(),
68 "PREALLOC must be 0 or a power of 2"
69 );
70 PREALLOC.trailing_zeros()
71 };
72
73 const MIN_NON_ZERO_CAP: usize = {
79 let size = std::mem::size_of::<T>();
80 if size == 1 {
81 8
82 } else if size <= 1024 {
83 4
84 } else {
85 1
86 }
87 };
88
89 const MIN_CAP_EXP: u32 = Self::MIN_NON_ZERO_CAP.trailing_zeros();
90
91 const BIAS: usize = if PREALLOC > 0 {
92 PREALLOC
93 } else {
94 Self::MIN_NON_ZERO_CAP
95 };
96
97 const SHELF_OFFSET: u32 = if PREALLOC == 0 {
99 Self::MIN_CAP_EXP
100 } else {
101 Self::PREALLOC_EXP + 1
102 };
103
104 #[inline]
114 pub const fn new() -> Self {
115 Self {
116 prealloc_segment: MaybeUninit::uninit(),
117 dynamic_segments: Vec::new(),
118 len: 0,
119 write_ptr: std::ptr::null_mut(),
120 segment_end: std::ptr::null_mut(),
121 _marker: PhantomData,
122 }
123 }
124
125 #[inline]
127 fn init_write_ptr(&mut self) {
128 if PREALLOC > 0 && self.len < PREALLOC {
129 let base = unsafe { (*self.prealloc_segment.as_mut_ptr()).as_mut_ptr() };
130 self.write_ptr = unsafe { base.add(self.len) };
131 self.segment_end = unsafe { base.add(PREALLOC) };
132 } else if !self.dynamic_segments.is_empty() {
133 let (shelf, box_idx) = Self::location(self.len);
134 let shelf_size = Self::shelf_size(shelf as u32);
135 let base = unsafe { self.dynamic_segments.get_unchecked(shelf).as_ptr() };
136 self.write_ptr = unsafe { base.add(box_idx) };
137 self.segment_end = unsafe { base.add(shelf_size) };
138 } else if PREALLOC > 0 {
139 let base = unsafe { (*self.prealloc_segment.as_mut_ptr()).as_mut_ptr() };
140 self.write_ptr = base;
141 self.segment_end = unsafe { base.add(PREALLOC) };
142 } else {
143 self.write_ptr = std::ptr::null_mut();
144 self.segment_end = std::ptr::null_mut();
145 }
146 }
147
148 #[inline]
150 pub const fn len(&self) -> usize {
151 self.len
152 }
153
154 #[inline]
156 pub const fn is_empty(&self) -> bool {
157 self.len == 0
158 }
159
160 #[inline]
162 pub fn capacity(&self) -> usize {
163 Self::compute_capacity(self.dynamic_segments.len() as u32)
164 }
165
166 #[inline]
182 pub fn push(&mut self, value: T) {
183 if self.write_ptr < self.segment_end {
185 unsafe {
186 std::ptr::write(self.write_ptr, value);
187 self.write_ptr = self.write_ptr.add(1);
188 }
189 self.len += 1;
190 return;
191 }
192
193 self.push_slow(value);
195 }
196
197 #[cold]
198 #[inline(never)]
199 fn push_slow(&mut self, value: T) {
200 let new_len = self.len + 1;
201
202 if PREALLOC > 0 && self.len == 0 {
204 unsafe {
205 let base = (*self.prealloc_segment.as_mut_ptr()).as_mut_ptr();
206 std::ptr::write(base, value);
207 self.write_ptr = base.add(1);
208 self.segment_end = base.add(PREALLOC);
209 }
210 self.len = 1;
211 return;
212 }
213
214 let biased = self.len + Self::BIAS;
217 let shelf = (biased.trailing_zeros() - Self::SHELF_OFFSET) as usize;
218 let shelf_size = Self::shelf_size(shelf as u32);
219
220 self.grow_capacity(new_len);
221
222 unsafe {
223 let base = self.dynamic_segments.get_unchecked(shelf).as_ptr();
224 std::ptr::write(base, value);
225 self.write_ptr = base.add(1);
226 self.segment_end = base.add(shelf_size);
227 }
228 self.len = new_len;
229 }
230
231 #[inline]
245 pub fn pop(&mut self) -> Option<T> {
246 if self.len == 0 {
247 return None;
248 }
249 self.len -= 1;
250
251 let biased = self.len + Self::BIAS;
253 if biased & (biased - 1) == 0 {
254 self.init_write_ptr();
256 Some(unsafe { self.unchecked_read(self.len) })
257 } else {
258 unsafe {
260 self.write_ptr = self.write_ptr.sub(1);
261 }
262 Some(unsafe { std::ptr::read(self.write_ptr) })
263 }
264 }
265
266 #[inline]
270 pub fn get(&self, index: usize) -> Option<&T> {
271 if index < self.len {
272 Some(unsafe { self.unchecked_at(index) })
273 } else {
274 None
275 }
276 }
277
278 #[inline]
282 pub fn get_mut(&mut self, index: usize) -> Option<&mut T> {
283 if index < self.len {
284 Some(unsafe { self.unchecked_at_mut(index) })
285 } else {
286 None
287 }
288 }
289
290 #[inline]
292 pub fn first(&self) -> Option<&T> {
293 self.get(0)
294 }
295
296 #[inline]
298 pub fn first_mut(&mut self) -> Option<&mut T> {
299 self.get_mut(0)
300 }
301
302 #[inline]
304 pub fn last(&self) -> Option<&T> {
305 if self.len == 0 {
306 None
307 } else {
308 self.get(self.len - 1)
309 }
310 }
311
312 #[inline]
314 pub fn last_mut(&mut self) -> Option<&mut T> {
315 if self.len == 0 {
316 None
317 } else {
318 self.get_mut(self.len - 1)
319 }
320 }
321
322 #[inline]
324 pub fn contains(&self, x: &T) -> bool
325 where
326 T: PartialEq,
327 {
328 self.as_slice().contains(x)
329 }
330
331 pub fn starts_with(&self, needle: &[T]) -> bool
333 where
334 T: PartialEq,
335 {
336 self.as_slice().starts_with(needle)
337 }
338
339 pub fn ends_with(&self, needle: &[T]) -> bool
341 where
342 T: PartialEq,
343 {
344 self.as_slice().ends_with(needle)
345 }
346
347 pub fn binary_search(&self, x: &T) -> Result<usize, usize>
352 where
353 T: Ord,
354 {
355 self.as_slice().binary_search(x)
356 }
357
358 pub fn binary_search_by<F>(&self, f: F) -> Result<usize, usize>
360 where
361 F: FnMut(&T) -> Ordering,
362 {
363 self.as_slice().binary_search_by(f)
364 }
365
366 pub fn binary_search_by_key<B, F>(&self, b: &B, f: F) -> Result<usize, usize>
368 where
369 F: FnMut(&T) -> B,
370 B: Ord,
371 {
372 self.as_slice().binary_search_by_key(b, f)
373 }
374
375 #[inline]
381 pub fn swap(&mut self, a: usize, b: usize) {
382 self.as_mut_slice().swap(a, b)
383 }
384
385 pub fn reverse(&mut self) {
387 self.as_mut_slice().reverse()
388 }
389
390 pub fn fill(&mut self, value: T)
392 where
393 T: Clone,
394 {
395 self.as_mut_slice().fill(value)
396 }
397
398 pub fn fill_with<F>(&mut self, f: F)
400 where
401 F: FnMut() -> T,
402 {
403 self.as_mut_slice().fill_with(f)
404 }
405
406 pub fn clear(&mut self) {
410 if self.len == 0 {
411 return;
412 }
413
414 if std::mem::needs_drop::<T>() {
415 if PREALLOC > 0 {
417 let count = self.len.min(PREALLOC);
418 let ptr = unsafe { (*self.prealloc_segment.as_mut_ptr()).as_mut_ptr() };
419 unsafe { std::ptr::drop_in_place(std::slice::from_raw_parts_mut(ptr, count)) };
420 }
421
422 if self.len > PREALLOC {
424 let mut remaining = self.len - PREALLOC;
425 for shelf in 0..self.dynamic_segments.len() {
426 let size = Self::shelf_size(shelf as u32);
427 let count = remaining.min(size);
428 let ptr = unsafe { self.dynamic_segments.get_unchecked(shelf).as_ptr() };
429 unsafe { std::ptr::drop_in_place(std::slice::from_raw_parts_mut(ptr, count)) };
430 remaining -= count;
431 if remaining == 0 {
432 break;
433 }
434 }
435 }
436 }
437
438 self.len = 0;
439 if PREALLOC > 0 {
441 let base = unsafe { (*self.prealloc_segment.as_mut_ptr()).as_mut_ptr() };
442 self.write_ptr = base;
443 self.segment_end = unsafe { base.add(PREALLOC) };
444 } else {
445 self.write_ptr = std::ptr::null_mut();
446 self.segment_end = std::ptr::null_mut();
447 }
448 }
449
450 pub fn truncate(&mut self, new_len: usize) {
454 if new_len >= self.len {
455 return;
456 }
457
458 if std::mem::needs_drop::<T>() {
459 if PREALLOC > 0 && new_len < PREALLOC {
461 let start = new_len;
462 let end = self.len.min(PREALLOC);
463 if start < end {
464 let ptr = unsafe {
465 (*self.prealloc_segment.as_mut_ptr())
466 .as_mut_ptr()
467 .add(start)
468 };
469 unsafe {
470 std::ptr::drop_in_place(std::slice::from_raw_parts_mut(ptr, end - start))
471 };
472 }
473 }
474
475 if self.len > PREALLOC {
477 let dynamic_new_len = new_len.saturating_sub(PREALLOC);
478 let dynamic_old_len = self.len - PREALLOC;
479
480 if dynamic_new_len < dynamic_old_len {
481 let mut pos = 0usize;
482 for shelf in 0..self.dynamic_segments.len() {
483 let size = Self::shelf_size(shelf as u32);
484 let seg_end = pos + size;
485
486 let drop_start = dynamic_new_len.max(pos);
488 let drop_end = dynamic_old_len.min(seg_end);
489
490 if drop_start < drop_end {
491 let offset = drop_start - pos;
492 let count = drop_end - drop_start;
493 let ptr = unsafe {
494 self.dynamic_segments
495 .get_unchecked(shelf)
496 .as_ptr()
497 .add(offset)
498 };
499 unsafe {
500 std::ptr::drop_in_place(std::slice::from_raw_parts_mut(ptr, count))
501 };
502 }
503
504 pos = seg_end;
505 if pos >= dynamic_old_len {
506 break;
507 }
508 }
509 }
510 }
511 }
512
513 self.len = new_len;
514 if new_len > 0 {
516 self.init_write_ptr();
517 } else if PREALLOC > 0 {
518 let base = unsafe { (*self.prealloc_segment.as_mut_ptr()).as_mut_ptr() };
519 self.write_ptr = base;
520 self.segment_end = unsafe { base.add(PREALLOC) };
521 } else {
522 self.write_ptr = std::ptr::null_mut();
523 self.segment_end = std::ptr::null_mut();
524 }
525 }
526
527 pub fn reserve(&mut self, additional: usize) {
529 let old_capacity = self.capacity();
530 self.grow_capacity(self.len + additional);
531 if old_capacity == 0 && self.capacity() > 0 && self.segment_end.is_null() {
533 self.init_write_ptr();
534 }
535 }
536
537 pub fn shrink_to_fit(&mut self) {
541 self.shrink_capacity(self.len);
542 }
543
544 #[inline]
546 pub fn iter(&self) -> Iter<'_, T, PREALLOC> {
547 Iter {
549 vec: self,
550 ptr: std::ptr::null(),
551 segment_end: std::ptr::null(),
552 index: 0,
553 shelf_index: 0,
554 }
555 }
556
557 #[inline]
559 pub fn iter_mut(&mut self) -> IterMut<'_, T, PREALLOC> {
560 IterMut {
562 vec: self,
563 ptr: std::ptr::null_mut(),
564 segment_end: std::ptr::null_mut(),
565 index: 0,
566 shelf_index: 0,
567 }
568 }
569
570 #[inline]
585 pub fn as_slice(&self) -> SegmentedSlice<'_, T, PREALLOC> {
586 SegmentedSlice::new(self)
587 }
588
589 #[inline]
604 pub fn as_mut_slice(&mut self) -> SegmentedSliceMut<'_, T, PREALLOC> {
605 SegmentedSliceMut::new(self)
606 }
607
608 #[inline]
627 pub fn slice(&self, range: std::ops::Range<usize>) -> SegmentedSlice<'_, T, PREALLOC> {
628 assert!(range.start <= range.end && range.end <= self.len);
629 SegmentedSlice::from_range(self, range.start, range.end)
630 }
631
632 #[inline]
651 pub fn slice_mut(
652 &mut self,
653 range: std::ops::Range<usize>,
654 ) -> SegmentedSliceMut<'_, T, PREALLOC> {
655 assert!(range.start <= range.end && range.end <= self.len);
656 SegmentedSliceMut::from_range(self, range.start, range.end)
657 }
658
659 pub fn extend_from_slice(&mut self, other: &[T])
661 where
662 T: Clone,
663 {
664 if other.is_empty() {
665 return;
666 }
667 self.reserve(other.len());
668
669 let mut src = other;
670
671 if PREALLOC > 0 && self.len < PREALLOC {
673 let prealloc_remaining = PREALLOC - self.len;
674 let to_copy = src.len().min(prealloc_remaining);
675 let dest = unsafe {
676 (*self.prealloc_segment.as_mut_ptr())
677 .as_mut_ptr()
678 .add(self.len)
679 };
680 for i in 0..to_copy {
681 unsafe { std::ptr::write(dest.add(i), src[i].clone()) };
682 }
683 self.len += to_copy;
684 src = &src[to_copy..];
685 }
686
687 while !src.is_empty() {
689 let (shelf, box_idx, remaining) = Self::location_with_capacity(self.len);
690 let to_copy = src.len().min(remaining);
691 let dest = unsafe {
692 self.dynamic_segments
693 .get_unchecked(shelf)
694 .as_ptr()
695 .add(box_idx)
696 };
697 for i in 0..to_copy {
698 unsafe { std::ptr::write(dest.add(i), src[i].clone()) };
699 }
700 self.len += to_copy;
701 src = &src[to_copy..];
702 }
703
704 if self.len < self.capacity() {
706 self.init_write_ptr();
707 } else {
708 self.write_ptr = std::ptr::null_mut();
709 self.segment_end = std::ptr::null_mut();
710 }
711 }
712
713 pub fn extend_from_copy_slice(&mut self, other: &[T])
718 where
719 T: Copy,
720 {
721 if other.is_empty() {
722 return;
723 }
724 self.reserve(other.len());
725
726 let mut src = other;
727
728 if PREALLOC > 0 && self.len < PREALLOC {
730 let prealloc_remaining = PREALLOC - self.len;
731 let to_copy = src.len().min(prealloc_remaining);
732 unsafe {
733 let dest = (*self.prealloc_segment.as_mut_ptr())
734 .as_mut_ptr()
735 .add(self.len);
736 std::ptr::copy_nonoverlapping(src.as_ptr(), dest, to_copy);
737 };
738 self.len += to_copy;
739 src = &src[to_copy..];
740 }
741
742 while !src.is_empty() {
744 let (shelf, box_idx, remaining) = Self::location_with_capacity(self.len);
745 let to_copy = src.len().min(remaining);
746 unsafe {
747 let dest = self
748 .dynamic_segments
749 .get_unchecked(shelf)
750 .as_ptr()
751 .add(box_idx);
752 std::ptr::copy_nonoverlapping(src.as_ptr(), dest, to_copy);
753 };
754 self.len += to_copy;
755 src = &src[to_copy..];
756 }
757
758 if self.len < self.capacity() {
760 self.init_write_ptr();
761 } else {
762 self.write_ptr = std::ptr::null_mut();
763 self.segment_end = std::ptr::null_mut();
764 }
765 }
766
767 #[inline]
784 pub fn sort(&mut self)
785 where
786 T: Ord,
787 {
788 self.as_mut_slice().sort();
789 }
790
791 pub fn sort_by<F>(&mut self, compare: F)
808 where
809 F: FnMut(&T, &T) -> Ordering,
810 {
811 self.as_mut_slice().sort_by(compare);
812 }
813
814 #[inline]
830 pub fn sort_by_key<K, F>(&mut self, f: F)
831 where
832 F: FnMut(&T) -> K,
833 K: Ord,
834 {
835 self.as_mut_slice().sort_by_key(f);
836 }
837
838 #[inline]
855 pub fn sort_unstable(&mut self)
856 where
857 T: Ord,
858 {
859 self.as_mut_slice().sort_unstable();
860 }
861
862 pub fn sort_unstable_by<F>(&mut self, compare: F)
879 where
880 F: FnMut(&T, &T) -> Ordering,
881 {
882 self.as_mut_slice().sort_unstable_by(compare);
883 }
884
885 #[inline]
905 pub fn sort_unstable_by_key<K, F>(&mut self, f: F)
906 where
907 F: FnMut(&T) -> K,
908 K: Ord,
909 {
910 self.as_mut_slice().sort_unstable_by_key(f);
911 }
912
913 pub fn is_sorted(&self) -> bool
915 where
916 T: PartialOrd,
917 {
918 self.as_slice().is_sorted()
919 }
920
921 pub fn is_sorted_by<F>(&self, compare: F) -> bool
923 where
924 F: FnMut(&T, &T) -> bool,
925 {
926 self.as_slice().is_sorted_by(compare)
927 }
928
929 pub fn is_sorted_by_key<K, F>(&self, f: F) -> bool
931 where
932 F: FnMut(&T) -> K,
933 K: PartialOrd,
934 {
935 self.as_slice().is_sorted_by_key(f)
936 }
937
938 pub fn partition_point<P>(&self, pred: P) -> usize
940 where
941 P: FnMut(&T) -> bool,
942 {
943 self.as_slice().partition_point(pred)
944 }
945
946 pub fn rotate_left(&mut self, mid: usize) {
948 self.as_mut_slice().rotate_left(mid);
949 }
950
951 pub fn rotate_right(&mut self, k: usize) {
953 self.as_mut_slice().rotate_right(k);
954 }
955
956 pub fn with_capacity(capacity: usize) -> Self {
958 let mut vec = Self::new();
959 vec.reserve(capacity);
960 vec
961 }
962
963 pub fn insert(&mut self, index: usize, element: T) {
969 assert!(index <= self.len);
970 self.push(element);
971 if index < self.len - 1 {
973 for i in (index..self.len - 1).rev() {
974 unsafe {
975 let ptr_a = self.unchecked_at_mut(i) as *mut T;
976 let ptr_b = self.unchecked_at_mut(i + 1) as *mut T;
977 std::ptr::swap(ptr_a, ptr_b);
978 }
979 }
980 }
981 }
982
983 pub fn remove(&mut self, index: usize) -> T {
989 assert!(index < self.len);
990 for i in index..self.len - 1 {
992 unsafe {
993 let ptr_a = self.unchecked_at_mut(i) as *mut T;
994 let ptr_b = self.unchecked_at_mut(i + 1) as *mut T;
995 std::ptr::swap(ptr_a, ptr_b);
996 }
997 }
998 self.pop().unwrap()
999 }
1000
1001 pub fn swap_remove(&mut self, index: usize) -> T {
1010 assert!(index < self.len);
1011 let last_index = self.len - 1;
1012 if index != last_index {
1013 unsafe {
1014 let ptr_a = self.unchecked_at_mut(index) as *mut T;
1015 let ptr_b = self.unchecked_at_mut(last_index) as *mut T;
1016 std::ptr::swap(ptr_a, ptr_b);
1017 }
1018 }
1019 self.pop().unwrap()
1020 }
1021
1022 pub fn retain<F>(&mut self, mut f: F)
1026 where
1027 F: FnMut(&T) -> bool,
1028 {
1029 let mut i = 0;
1030 while i < self.len {
1031 if f(unsafe { self.unchecked_at(i) }) {
1032 i += 1;
1033 } else {
1034 self.remove(i);
1035 }
1036 }
1037 }
1038
1039 pub fn retain_mut<F>(&mut self, mut f: F)
1041 where
1042 F: FnMut(&mut T) -> bool,
1043 {
1044 let mut i = 0;
1045 while i < self.len {
1046 if f(unsafe { self.unchecked_at_mut(i) }) {
1047 i += 1;
1048 } else {
1049 self.remove(i);
1050 }
1051 }
1052 }
1053
1054 pub fn dedup(&mut self)
1058 where
1059 T: PartialEq,
1060 {
1061 self.dedup_by(|a, b| a == b);
1062 }
1063
1064 pub fn dedup_by<F>(&mut self, mut same_bucket: F)
1066 where
1067 F: FnMut(&mut T, &mut T) -> bool,
1068 {
1069 if self.len <= 1 {
1070 return;
1071 }
1072 let mut write = 1;
1073 for read in 1..self.len {
1074 let should_keep = unsafe {
1075 let prev_ptr = self.unchecked_at_mut(write - 1) as *mut T;
1076 let curr_ptr = self.unchecked_at_mut(read) as *mut T;
1077 !same_bucket(&mut *prev_ptr, &mut *curr_ptr)
1078 };
1079 if should_keep {
1080 if read != write {
1081 unsafe {
1082 let ptr_src = self.unchecked_at_mut(read) as *mut T;
1083 let ptr_dst = self.unchecked_at_mut(write) as *mut T;
1084 std::ptr::swap(ptr_dst, ptr_src);
1085 }
1086 }
1087 write += 1;
1088 } else {
1089 unsafe {
1091 std::ptr::drop_in_place(self.unchecked_at_mut(read));
1092 }
1093 }
1094 }
1095 self.len = write;
1096 }
1097
1098 pub fn dedup_by_key<K, F>(&mut self, mut key: F)
1100 where
1101 F: FnMut(&mut T) -> K,
1102 K: PartialEq,
1103 {
1104 self.dedup_by(|a, b| key(a) == key(b));
1105 }
1106
1107 pub fn resize(&mut self, new_len: usize, value: T)
1113 where
1114 T: Clone,
1115 {
1116 if new_len > self.len {
1117 self.reserve(new_len - self.len);
1118 while self.len < new_len {
1119 self.push(value.clone());
1120 }
1121 } else {
1122 self.truncate(new_len);
1123 }
1124 }
1125
1126 pub fn resize_with<F>(&mut self, new_len: usize, mut f: F)
1131 where
1132 F: FnMut() -> T,
1133 {
1134 if new_len > self.len {
1135 self.reserve(new_len - self.len);
1136 while self.len < new_len {
1137 self.push(f());
1138 }
1139 } else {
1140 self.truncate(new_len);
1141 }
1142 }
1143
1144 pub fn append(&mut self, other: &mut Self) {
1146 let other_len = other.len;
1147 self.reserve(other_len);
1148 let start = self.len;
1149 while let Some(item) = other.pop() {
1150 self.push(item);
1151 }
1152 let mut left = start;
1154 let mut right = self.len;
1155 while left < right {
1156 right -= 1;
1157 if left < right {
1158 unsafe {
1159 let ptr_a = self.unchecked_at_mut(left) as *mut T;
1160 let ptr_b = self.unchecked_at_mut(right) as *mut T;
1161 std::ptr::swap(ptr_a, ptr_b);
1162 }
1163 left += 1;
1164 }
1165 }
1166 }
1167
1168 pub fn split_off(&mut self, at: usize) -> Self {
1177 assert!(at <= self.len);
1178 let mut other = Self::new();
1179 other.reserve(self.len - at);
1180 for i in at..self.len {
1181 other.push(unsafe { self.unchecked_read(i) });
1182 }
1183 self.len = at;
1184 other
1185 }
1186
1187 pub fn chunks(&self, chunk_size: usize) -> Chunks<'_, T, PREALLOC> {
1193 self.as_slice().chunks(chunk_size)
1194 }
1195
1196 pub fn windows(&self, size: usize) -> Windows<'_, T, PREALLOC> {
1202 self.as_slice().windows(size)
1203 }
1204
1205 pub fn rchunks(&self, chunk_size: usize) -> RChunks<'_, T, PREALLOC> {
1211 self.as_slice().rchunks(chunk_size)
1212 }
1213
1214 pub fn chunks_exact(&self, chunk_size: usize) -> ChunksExact<'_, T, PREALLOC> {
1220 self.as_slice().chunks_exact(chunk_size)
1221 }
1222
1223 pub fn drain(&mut self, range: std::ops::Range<usize>) -> Drain<'_, T, PREALLOC> {
1229 assert!(range.start <= range.end && range.end <= self.len);
1230 Drain {
1231 vec: self,
1232 range_start: range.start,
1233 range_end: range.end,
1234 index: range.start,
1235 }
1236 }
1237
1238 pub fn to_vec(&self) -> Vec<T>
1240 where
1241 T: Clone,
1242 {
1243 self.as_slice().to_vec()
1244 }
1245
1246 #[inline]
1250 fn shelf_count(box_count: usize) -> u32 {
1251 if box_count == 0 {
1252 return 0;
1253 }
1254 if PREALLOC == 0 {
1255 let val = box_count + Self::MIN_NON_ZERO_CAP;
1256 val.next_power_of_two().trailing_zeros() - Self::MIN_CAP_EXP
1257 } else {
1258 let val = box_count + PREALLOC;
1259 val.next_power_of_two().trailing_zeros() - Self::PREALLOC_EXP - 1
1260 }
1261 }
1262
1263 #[inline]
1265 fn shelf_size(shelf_index: u32) -> usize {
1266 if PREALLOC == 0 {
1267 Self::MIN_NON_ZERO_CAP << shelf_index
1268 } else {
1269 1usize << (shelf_index + Self::PREALLOC_EXP + 1)
1270 }
1271 }
1272
1273 #[inline]
1276 fn location(list_index: usize) -> (usize, usize) {
1277 let biased = list_index + Self::BIAS;
1278 let msb = biased.ilog2();
1279 let shelf = msb - Self::SHELF_OFFSET;
1280 let box_idx = biased ^ (1usize << msb);
1282 (shelf as usize, box_idx)
1283 }
1284
1285 #[inline]
1288 fn location_with_capacity(list_index: usize) -> (usize, usize, usize) {
1289 let biased = list_index + Self::BIAS;
1290 let msb = biased.ilog2();
1291 let shelf = msb - Self::SHELF_OFFSET;
1292 let box_idx = biased ^ (1usize << msb);
1293 let segment_remaining = (2usize << msb) - biased;
1297 (shelf as usize, box_idx, segment_remaining)
1298 }
1299
1300 #[inline]
1306 pub(crate) unsafe fn unchecked_at(&self, index: usize) -> &T {
1307 unsafe {
1308 if index < PREALLOC {
1309 &(*self.prealloc_segment.as_ptr())[index]
1310 } else {
1311 let (shelf, box_idx) = Self::location(index);
1312 &*self
1313 .dynamic_segments
1314 .get_unchecked(shelf)
1315 .as_ptr()
1316 .add(box_idx)
1317 }
1318 }
1319 }
1320
1321 #[inline]
1327 pub(crate) unsafe fn unchecked_at_mut(&mut self, index: usize) -> &mut T {
1328 unsafe {
1329 if index < PREALLOC {
1330 &mut (*self.prealloc_segment.as_mut_ptr())[index]
1331 } else {
1332 let (shelf, box_idx) = Self::location(index);
1333 &mut *self
1334 .dynamic_segments
1335 .get_unchecked(shelf)
1336 .as_ptr()
1337 .add(box_idx)
1338 }
1339 }
1340 }
1341
1342 #[inline]
1348 unsafe fn unchecked_read(&self, index: usize) -> T {
1349 unsafe {
1350 if index < PREALLOC {
1351 std::ptr::read(&(*self.prealloc_segment.as_ptr())[index])
1352 } else {
1353 let (shelf, box_idx) = Self::location(index);
1354 std::ptr::read(
1355 self.dynamic_segments
1356 .get_unchecked(shelf)
1357 .as_ptr()
1358 .add(box_idx),
1359 )
1360 }
1361 }
1362 }
1363
1364 fn grow_capacity(&mut self, new_capacity: usize) {
1366 let new_shelf_count = Self::shelf_count(new_capacity);
1367 let old_shelf_count = self.dynamic_segments.len() as u32;
1368
1369 if new_shelf_count > old_shelf_count {
1370 self.dynamic_segments
1371 .reserve((new_shelf_count - old_shelf_count) as usize);
1372
1373 for i in old_shelf_count..new_shelf_count {
1374 let size = Self::shelf_size(i);
1375 let layout = Layout::array::<T>(size).expect("Layout overflow");
1376 let ptr = if layout.size() == 0 {
1377 NonNull::dangling()
1378 } else {
1379 let ptr = unsafe { alloc::alloc(layout) };
1380 NonNull::new(ptr as *mut T).expect("Allocation failed")
1381 };
1382 self.dynamic_segments.push(ptr);
1383 }
1384 }
1385 }
1386
1387 #[inline]
1389 fn compute_capacity(shelf_count: u32) -> usize {
1390 if shelf_count == 0 {
1391 PREALLOC
1392 } else if PREALLOC == 0 {
1393 (Self::MIN_NON_ZERO_CAP << shelf_count) - Self::MIN_NON_ZERO_CAP
1394 } else {
1395 (1usize << (Self::PREALLOC_EXP + 1 + shelf_count)) - PREALLOC
1396 }
1397 }
1398
1399 fn shrink_capacity(&mut self, new_capacity: usize) {
1401 if new_capacity <= PREALLOC {
1402 self.free_shelves(self.dynamic_segments.len() as u32, 0);
1404 self.dynamic_segments.clear();
1405 self.dynamic_segments.shrink_to_fit();
1406 return;
1407 }
1408
1409 let new_shelf_count = Self::shelf_count(new_capacity);
1410 let old_shelf_count = self.dynamic_segments.len() as u32;
1411
1412 if new_shelf_count < old_shelf_count {
1413 self.free_shelves(old_shelf_count, new_shelf_count);
1414 self.dynamic_segments.truncate(new_shelf_count as usize);
1415 self.dynamic_segments.shrink_to_fit();
1416 }
1417 }
1418
1419 fn free_shelves(&mut self, from_count: u32, to_count: u32) {
1421 for i in (to_count..from_count).rev() {
1422 let size = Self::shelf_size(i);
1423 let layout = Layout::array::<T>(size).expect("Layout overflow");
1424 if layout.size() > 0 {
1425 unsafe {
1426 alloc::dealloc(
1427 self.dynamic_segments[i as usize].as_ptr() as *mut u8,
1428 layout,
1429 );
1430 }
1431 }
1432 }
1433 }
1434}
1435
1436impl<T, const PREALLOC: usize> Drop for SegmentedVec<T, PREALLOC> {
1437 fn drop(&mut self) {
1438 self.clear();
1440 self.free_shelves(self.dynamic_segments.len() as u32, 0);
1442 }
1443}
1444
1445impl<T, const PREALLOC: usize> sort::IndexedAccess<T> for SegmentedVec<T, PREALLOC> {
1446 #[inline]
1447 fn get_ref(&self, index: usize) -> &T {
1448 debug_assert!(index < self.len);
1449 unsafe { self.unchecked_at(index) }
1450 }
1451
1452 #[inline]
1453 fn get_ptr(&self, index: usize) -> *const T {
1454 debug_assert!(index < self.len);
1455 if index < PREALLOC {
1456 unsafe { (*self.prealloc_segment.as_ptr()).as_ptr().add(index) }
1457 } else {
1458 let (shelf, box_idx) = Self::location(index);
1459 unsafe {
1460 self.dynamic_segments
1461 .get_unchecked(shelf)
1462 .as_ptr()
1463 .add(box_idx)
1464 }
1465 }
1466 }
1467
1468 #[inline]
1469 fn get_ptr_mut(&mut self, index: usize) -> *mut T {
1470 debug_assert!(index < self.len);
1471 if index < PREALLOC {
1472 unsafe {
1473 (*self.prealloc_segment.as_mut_ptr())
1474 .as_mut_ptr()
1475 .add(index)
1476 }
1477 } else {
1478 let (shelf, box_idx) = Self::location(index);
1479 unsafe {
1480 self.dynamic_segments
1481 .get_unchecked(shelf)
1482 .as_ptr()
1483 .add(box_idx)
1484 }
1485 }
1486 }
1487
1488 #[inline]
1489 fn swap(&mut self, a: usize, b: usize) {
1490 if a == b {
1491 return;
1492 }
1493 debug_assert!(a < self.len && b < self.len);
1494 unsafe {
1495 let ptr_a = self.get_ptr_mut(a);
1496 let ptr_b = self.get_ptr_mut(b);
1497 std::ptr::swap(ptr_a, ptr_b);
1498 }
1499 }
1500}
1501
1502impl<T, const PREALLOC: usize> Default for SegmentedVec<T, PREALLOC> {
1503 fn default() -> Self {
1504 Self::new()
1505 }
1506}
1507
1508impl<T, const PREALLOC: usize> Index<usize> for SegmentedVec<T, PREALLOC> {
1509 type Output = T;
1510
1511 fn index(&self, index: usize) -> &Self::Output {
1512 self.get(index).expect("index out of bounds")
1513 }
1514}
1515
1516impl<T, const PREALLOC: usize> IndexMut<usize> for SegmentedVec<T, PREALLOC> {
1517 fn index_mut(&mut self, index: usize) -> &mut Self::Output {
1518 self.get_mut(index).expect("index out of bounds")
1519 }
1520}
1521
1522impl<T: Clone, const PREALLOC: usize> Clone for SegmentedVec<T, PREALLOC> {
1523 fn clone(&self) -> Self {
1524 if self.len == 0 {
1525 return Self::new();
1526 }
1527
1528 let mut new_vec = Self::new();
1529 new_vec.reserve(self.len);
1530
1531 if PREALLOC > 0 {
1533 let count = self.len.min(PREALLOC);
1534 let src = unsafe { (*self.prealloc_segment.as_ptr()).as_ptr() };
1535 let dst = unsafe { (*new_vec.prealloc_segment.as_mut_ptr()).as_mut_ptr() };
1536 for i in 0..count {
1537 unsafe { std::ptr::write(dst.add(i), (*src.add(i)).clone()) };
1538 }
1539 new_vec.len = count;
1540 }
1541
1542 if self.len > PREALLOC {
1544 let mut remaining = self.len - PREALLOC;
1545 for shelf in 0..self.dynamic_segments.len() {
1546 let size = Self::shelf_size(shelf as u32);
1547 let count = remaining.min(size);
1548 let src = unsafe { self.dynamic_segments.get_unchecked(shelf).as_ptr() };
1549 let dst = unsafe { new_vec.dynamic_segments.get_unchecked(shelf).as_ptr() };
1550 for i in 0..count {
1551 unsafe { std::ptr::write(dst.add(i), (*src.add(i)).clone()) };
1552 }
1553 new_vec.len += count;
1554 remaining -= count;
1555 if remaining == 0 {
1556 break;
1557 }
1558 }
1559 }
1560
1561 if new_vec.len < new_vec.capacity() {
1563 new_vec.init_write_ptr();
1564 } else {
1565 new_vec.write_ptr = std::ptr::null_mut();
1566 new_vec.segment_end = std::ptr::null_mut();
1567 }
1568
1569 new_vec
1570 }
1571}
1572
1573impl<T: std::fmt::Debug, const PREALLOC: usize> std::fmt::Debug for SegmentedVec<T, PREALLOC> {
1574 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
1575 f.debug_list().entries(self.iter()).finish()
1576 }
1577}
1578
1579impl<T: PartialEq, const PREALLOC: usize> PartialEq for SegmentedVec<T, PREALLOC> {
1580 fn eq(&self, other: &Self) -> bool {
1581 if self.len != other.len {
1582 return false;
1583 }
1584 for i in 0..self.len {
1585 if unsafe { self.unchecked_at(i) } != unsafe { other.unchecked_at(i) } {
1586 return false;
1587 }
1588 }
1589 true
1590 }
1591}
1592
1593impl<T: Eq, const PREALLOC: usize> Eq for SegmentedVec<T, PREALLOC> {}
1594
1595impl<T, const PREALLOC: usize> FromIterator<T> for SegmentedVec<T, PREALLOC> {
1596 fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> Self {
1597 let iter = iter.into_iter();
1598 let (lower, _) = iter.size_hint();
1599 let mut vec = Self::new();
1600 vec.reserve(lower);
1601 for item in iter {
1602 vec.push(item);
1603 }
1604 vec
1605 }
1606}
1607
1608impl<T, const PREALLOC: usize> Extend<T> for SegmentedVec<T, PREALLOC> {
1609 fn extend<I: IntoIterator<Item = T>>(&mut self, iter: I) {
1610 let iter = iter.into_iter();
1611 let (lower, _) = iter.size_hint();
1612 self.reserve(lower);
1613 for item in iter {
1614 self.push(item);
1615 }
1616 }
1617}
1618
1619pub struct Iter<'a, T, const PREALLOC: usize> {
1623 vec: &'a SegmentedVec<T, PREALLOC>,
1624 ptr: *const T,
1626 segment_end: *const T,
1628 index: usize,
1630 shelf_index: u32,
1632}
1633
1634impl<'a, T, const PREALLOC: usize> Iterator for Iter<'a, T, PREALLOC> {
1635 type Item = &'a T;
1636
1637 #[inline]
1638 fn next(&mut self) -> Option<Self::Item> {
1639 if self.ptr < self.segment_end {
1640 let result = unsafe { &*self.ptr };
1641 self.ptr = unsafe { self.ptr.add(1) };
1642 self.index += 1;
1643 return Some(result);
1644 }
1645 self.next_segment()
1646 }
1647
1648 #[inline]
1649 fn size_hint(&self) -> (usize, Option<usize>) {
1650 let remaining = self.vec.len.saturating_sub(self.index);
1651 (remaining, Some(remaining))
1652 }
1653}
1654
1655impl<'a, T, const PREALLOC: usize> Iter<'a, T, PREALLOC> {
1656 #[inline]
1657 fn next_segment(&mut self) -> Option<&'a T> {
1658 if self.index >= self.vec.len {
1659 return None;
1660 }
1661
1662 if self.index < PREALLOC {
1664 let ptr = unsafe {
1666 (*self.vec.prealloc_segment.as_ptr())
1667 .as_ptr()
1668 .add(self.index)
1669 };
1670 let remaining = PREALLOC - self.index;
1671 let segment_len = remaining.min(self.vec.len - self.index);
1672 self.ptr = ptr;
1673 self.segment_end = unsafe { ptr.add(segment_len) };
1674 } else {
1675 let shelf_idx = self.shelf_index as usize;
1677 let shelf_size = SegmentedVec::<T, PREALLOC>::shelf_size(self.shelf_index);
1678 let ptr = self.vec.dynamic_segments[shelf_idx].as_ptr();
1679 let segment_len = shelf_size.min(self.vec.len - self.index);
1680 self.ptr = ptr;
1681 self.segment_end = unsafe { ptr.add(segment_len) };
1682 self.shelf_index += 1;
1683 }
1684
1685 let result = unsafe { &*self.ptr };
1686 self.ptr = unsafe { self.ptr.add(1) };
1687 self.index += 1;
1688 Some(result)
1689 }
1690}
1691
1692impl<'a, T, const PREALLOC: usize> ExactSizeIterator for Iter<'a, T, PREALLOC> {}
1693
1694pub struct IterMut<'a, T, const PREALLOC: usize> {
1696 vec: &'a mut SegmentedVec<T, PREALLOC>,
1697 ptr: *mut T,
1699 segment_end: *mut T,
1701 index: usize,
1703 shelf_index: u32,
1705}
1706
1707impl<'a, T, const PREALLOC: usize> Iterator for IterMut<'a, T, PREALLOC> {
1708 type Item = &'a mut T;
1709
1710 #[inline]
1711 fn next(&mut self) -> Option<Self::Item> {
1712 if self.ptr < self.segment_end {
1713 let result = self.ptr;
1714 self.ptr = unsafe { self.ptr.add(1) };
1715 self.index += 1;
1716 return Some(unsafe { &mut *result });
1717 }
1718 self.next_segment()
1719 }
1720
1721 #[inline]
1722 fn size_hint(&self) -> (usize, Option<usize>) {
1723 let remaining = self.vec.len.saturating_sub(self.index);
1724 (remaining, Some(remaining))
1725 }
1726}
1727
1728impl<'a, T, const PREALLOC: usize> IterMut<'a, T, PREALLOC> {
1729 #[inline]
1730 fn next_segment(&mut self) -> Option<&'a mut T> {
1731 if self.index >= self.vec.len {
1732 return None;
1733 }
1734
1735 if self.index < PREALLOC {
1737 let ptr = unsafe {
1739 (*self.vec.prealloc_segment.as_mut_ptr())
1740 .as_mut_ptr()
1741 .add(self.index)
1742 };
1743 let remaining = PREALLOC - self.index;
1744 let segment_len = remaining.min(self.vec.len - self.index);
1745 self.ptr = ptr;
1746 self.segment_end = unsafe { ptr.add(segment_len) };
1747 } else {
1748 let shelf_idx = self.shelf_index as usize;
1750 let shelf_size = SegmentedVec::<T, PREALLOC>::shelf_size(self.shelf_index);
1751 let ptr = self.vec.dynamic_segments[shelf_idx].as_ptr();
1753 let segment_len = shelf_size.min(self.vec.len - self.index);
1754 self.ptr = ptr;
1755 self.segment_end = unsafe { ptr.add(segment_len) };
1756 self.shelf_index += 1;
1757 }
1758
1759 let result = self.ptr;
1760 self.ptr = unsafe { self.ptr.add(1) };
1761 self.index += 1;
1762 Some(unsafe { &mut *result })
1763 }
1764}
1765
1766impl<'a, T, const PREALLOC: usize> ExactSizeIterator for IterMut<'a, T, PREALLOC> {}
1767
1768impl<T, const PREALLOC: usize> IntoIterator for SegmentedVec<T, PREALLOC> {
1769 type Item = T;
1770 type IntoIter = IntoIter<T, PREALLOC>;
1771
1772 fn into_iter(self) -> Self::IntoIter {
1773 IntoIter {
1774 vec: self,
1775 index: 0,
1776 }
1777 }
1778}
1779
1780impl<'a, T, const PREALLOC: usize> IntoIterator for &'a SegmentedVec<T, PREALLOC> {
1781 type Item = &'a T;
1782 type IntoIter = Iter<'a, T, PREALLOC>;
1783
1784 fn into_iter(self) -> Self::IntoIter {
1785 self.iter()
1786 }
1787}
1788
1789impl<'a, T, const PREALLOC: usize> IntoIterator for &'a mut SegmentedVec<T, PREALLOC> {
1790 type Item = &'a mut T;
1791 type IntoIter = IterMut<'a, T, PREALLOC>;
1792
1793 fn into_iter(self) -> Self::IntoIter {
1794 self.iter_mut()
1795 }
1796}
1797
1798pub struct IntoIter<T, const PREALLOC: usize> {
1800 vec: SegmentedVec<T, PREALLOC>,
1801 index: usize,
1802}
1803
1804impl<T, const PREALLOC: usize> Iterator for IntoIter<T, PREALLOC> {
1805 type Item = T;
1806
1807 fn next(&mut self) -> Option<Self::Item> {
1808 if self.index >= self.vec.len {
1809 return None;
1810 }
1811 let value = unsafe { self.vec.unchecked_read(self.index) };
1812 self.index += 1;
1813 Some(value)
1814 }
1815
1816 fn size_hint(&self) -> (usize, Option<usize>) {
1817 let remaining = self.vec.len - self.index;
1818 (remaining, Some(remaining))
1819 }
1820}
1821
1822impl<T, const PREALLOC: usize> ExactSizeIterator for IntoIter<T, PREALLOC> {}
1823
1824impl<T, const PREALLOC: usize> Drop for IntoIter<T, PREALLOC> {
1825 fn drop(&mut self) {
1826 for i in self.index..self.vec.len {
1828 unsafe {
1829 std::ptr::drop_in_place(self.vec.unchecked_at_mut(i));
1830 }
1831 }
1832 self.vec.len = 0;
1834 }
1835}
1836
1837pub struct Drain<'a, T, const PREALLOC: usize> {
1841 vec: &'a mut SegmentedVec<T, PREALLOC>,
1842 range_start: usize,
1843 range_end: usize,
1844 index: usize,
1845}
1846
1847impl<'a, T, const PREALLOC: usize> Iterator for Drain<'a, T, PREALLOC> {
1848 type Item = T;
1849
1850 fn next(&mut self) -> Option<Self::Item> {
1851 if self.index >= self.range_end {
1852 None
1853 } else {
1854 let value = unsafe { std::ptr::read(self.vec.unchecked_at(self.index)) };
1855 self.index += 1;
1856 Some(value)
1857 }
1858 }
1859
1860 fn size_hint(&self) -> (usize, Option<usize>) {
1861 let remaining = self.range_end - self.index;
1862 (remaining, Some(remaining))
1863 }
1864}
1865
1866impl<'a, T, const PREALLOC: usize> DoubleEndedIterator for Drain<'a, T, PREALLOC> {
1867 fn next_back(&mut self) -> Option<Self::Item> {
1868 if self.index >= self.range_end {
1869 None
1870 } else {
1871 self.range_end -= 1;
1872 Some(unsafe { std::ptr::read(self.vec.unchecked_at(self.range_end)) })
1873 }
1874 }
1875}
1876
1877impl<'a, T, const PREALLOC: usize> ExactSizeIterator for Drain<'a, T, PREALLOC> {}
1878
1879impl<'a, T, const PREALLOC: usize> Drop for Drain<'a, T, PREALLOC> {
1880 fn drop(&mut self) {
1881 for i in self.index..self.range_end {
1883 unsafe {
1884 std::ptr::drop_in_place(self.vec.unchecked_at_mut(i));
1885 }
1886 }
1887
1888 let original_range_end = self.range_end;
1890 let original_len = self.vec.len;
1891 let drain_count = original_range_end - self.range_start;
1892
1893 for i in 0..(original_len - original_range_end) {
1894 unsafe {
1895 let src = self.vec.unchecked_at(original_range_end + i) as *const T;
1896 let dst = self.vec.unchecked_at_mut(self.range_start + i) as *mut T;
1897 std::ptr::copy_nonoverlapping(src, dst, 1);
1898 }
1899 }
1900
1901 self.vec.len = original_len - drain_count;
1902 }
1903}
1904
1905#[cfg(test)]
1906mod tests {
1907 use super::*;
1908
1909 #[test]
1910 fn test_new_empty() {
1911 let vec: SegmentedVec<i32, 4> = SegmentedVec::new();
1912 assert!(vec.is_empty());
1913 assert_eq!(vec.len(), 0);
1914 }
1915
1916 #[test]
1917 fn test_push_pop() {
1918 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
1919 vec.push(1);
1920 vec.push(2);
1921 vec.push(3);
1922 assert_eq!(vec.len(), 3);
1923 assert_eq!(vec.pop(), Some(3));
1924 assert_eq!(vec.pop(), Some(2));
1925 assert_eq!(vec.pop(), Some(1));
1926 assert_eq!(vec.pop(), None);
1927 }
1928
1929 #[test]
1930 fn test_get() {
1931 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
1932 vec.push(10);
1933 vec.push(20);
1934 vec.push(30);
1935 assert_eq!(vec.get(0), Some(&10));
1936 assert_eq!(vec.get(1), Some(&20));
1937 assert_eq!(vec.get(2), Some(&30));
1938 assert_eq!(vec.get(3), None);
1939 }
1940
1941 #[test]
1942 fn test_index() {
1943 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
1944 vec.push(10);
1945 vec.push(20);
1946 assert_eq!(vec[0], 10);
1947 assert_eq!(vec[1], 20);
1948 vec[0] = 100;
1949 assert_eq!(vec[0], 100);
1950 }
1951
1952 #[test]
1953 fn test_stable_pointers() {
1954 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
1955 vec.push(1);
1956 let ptr = &vec[0] as *const i32;
1957
1958 for i in 2..1000 {
1960 vec.push(i);
1961 }
1962
1963 assert_eq!(unsafe { *ptr }, 1);
1965 }
1966
1967 #[test]
1968 fn test_iter() {
1969 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
1970 for i in 0..100 {
1971 vec.push(i);
1972 }
1973
1974 let collected: Vec<i32> = vec.iter().copied().collect();
1975 let expected: Vec<i32> = (0..100).collect();
1976 assert_eq!(collected, expected);
1977 }
1978
1979 #[test]
1980 fn test_iter_mut() {
1981 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
1982 for i in 0..10 {
1983 vec.push(i);
1984 }
1985
1986 for item in vec.iter_mut() {
1987 *item *= 2;
1988 }
1989
1990 let collected: Vec<i32> = vec.iter().copied().collect();
1991 let expected: Vec<i32> = (0..10).map(|x| x * 2).collect();
1992 assert_eq!(collected, expected);
1993 }
1994
1995 #[test]
1996 fn test_into_iter() {
1997 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
1998 for i in 0..10 {
1999 vec.push(i);
2000 }
2001
2002 let collected: Vec<i32> = vec.into_iter().collect();
2003 let expected: Vec<i32> = (0..10).collect();
2004 assert_eq!(collected, expected);
2005 }
2006
2007 #[test]
2008 fn test_from_iter() {
2009 let vec: SegmentedVec<i32, 4> = (0..10).collect();
2010 assert_eq!(vec.len(), 10);
2011 for i in 0..10 {
2012 assert_eq!(vec[i], i as i32);
2013 }
2014 }
2015
2016 #[test]
2017 fn test_extend() {
2018 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2019 vec.extend(0..5);
2020 vec.extend(5..10);
2021 assert_eq!(vec.len(), 10);
2022 for i in 0..10 {
2023 assert_eq!(vec[i], i as i32);
2024 }
2025 }
2026
2027 #[test]
2028 fn test_clear() {
2029 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2030 vec.extend(0..10);
2031 vec.clear();
2032 assert!(vec.is_empty());
2033 assert_eq!(vec.len(), 0);
2034 }
2035
2036 #[test]
2037 fn test_truncate() {
2038 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2039 vec.extend(0..10);
2040 vec.truncate(5);
2041 assert_eq!(vec.len(), 5);
2042 for i in 0..5 {
2043 assert_eq!(vec[i], i as i32);
2044 }
2045 }
2046
2047 #[test]
2048 fn test_zero_prealloc() {
2049 let mut vec: SegmentedVec<i32, 0> = SegmentedVec::new();
2050 for i in 0..100 {
2051 vec.push(i);
2052 }
2053
2054 for i in 0..100 {
2055 assert_eq!(vec[i], i as i32);
2056 }
2057
2058 assert_eq!(vec.pop(), Some(99));
2059 assert_eq!(vec.len(), 99);
2060 }
2061
2062 #[test]
2063 fn test_various_prealloc_sizes() {
2064 fn test_prealloc<const N: usize>() {
2065 let mut vec: SegmentedVec<i32, N> = SegmentedVec::new();
2066 for i in 0..100 {
2067 vec.push(i);
2068 }
2069 for i in 0..100 {
2070 assert_eq!(vec[i], i as i32);
2071 }
2072 }
2073
2074 test_prealloc::<0>();
2075 test_prealloc::<1>();
2076 test_prealloc::<2>();
2077 test_prealloc::<4>();
2078 test_prealloc::<8>();
2079 test_prealloc::<16>();
2080 }
2081
2082 #[test]
2083 fn test_clone() {
2084 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2085 vec.extend(0..10);
2086 let vec2 = vec.clone();
2087 assert_eq!(vec, vec2);
2088 }
2089
2090 #[test]
2091 fn test_debug() {
2092 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2093 vec.extend(0..3);
2094 let debug_str = format!("{:?}", vec);
2095 assert_eq!(debug_str, "[0, 1, 2]");
2096 }
2097
2098 #[test]
2099 fn test_first_last() {
2100 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2101 assert_eq!(vec.first(), None);
2102 assert_eq!(vec.last(), None);
2103
2104 vec.push(1);
2105 assert_eq!(vec.first(), Some(&1));
2106 assert_eq!(vec.last(), Some(&1));
2107
2108 vec.push(2);
2109 vec.push(3);
2110 assert_eq!(vec.first(), Some(&1));
2111 assert_eq!(vec.last(), Some(&3));
2112 }
2113
2114 #[test]
2115 fn test_drop_elements() {
2116 use std::cell::Cell;
2117 use std::rc::Rc;
2118
2119 let drop_count = Rc::new(Cell::new(0));
2120
2121 struct DropCounter {
2122 counter: Rc<Cell<i32>>,
2123 }
2124
2125 impl Drop for DropCounter {
2126 fn drop(&mut self) {
2127 self.counter.set(self.counter.get() + 1);
2128 }
2129 }
2130
2131 {
2132 let mut vec: SegmentedVec<DropCounter, 4> = SegmentedVec::new();
2133 for _ in 0..10 {
2134 vec.push(DropCounter {
2135 counter: Rc::clone(&drop_count),
2136 });
2137 }
2138 }
2139
2140 assert_eq!(drop_count.get(), 10);
2141 }
2142
2143 #[test]
2144 fn test_shrink_to_fit() {
2145 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2146 vec.extend(0..100);
2147 vec.truncate(5);
2148 vec.shrink_to_fit();
2149 assert_eq!(vec.len(), 5);
2150 for i in 0..5 {
2151 assert_eq!(vec[i], i as i32);
2152 }
2153 }
2154
2155 #[test]
2156 fn test_sort() {
2157 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2158 vec.extend([5, 2, 8, 1, 9, 3, 7, 4, 6, 0]);
2159 vec.sort();
2160 let result: Vec<i32> = vec.iter().copied().collect();
2161 assert_eq!(result, vec![0, 1, 2, 3, 4, 5, 6, 7, 8, 9]);
2162 }
2163
2164 #[test]
2165 fn test_sort_empty() {
2166 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2167 vec.sort();
2168 assert!(vec.is_empty());
2169 }
2170
2171 #[test]
2172 fn test_sort_single() {
2173 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2174 vec.push(42);
2175 vec.sort();
2176 assert_eq!(vec[0], 42);
2177 }
2178
2179 #[test]
2180 fn test_sort_already_sorted() {
2181 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2182 vec.extend(0..10);
2183 vec.sort();
2184 let result: Vec<i32> = vec.iter().copied().collect();
2185 assert_eq!(result, (0..10).collect::<Vec<_>>());
2186 }
2187
2188 #[test]
2189 fn test_sort_reverse_sorted() {
2190 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2191 vec.extend((0..10).rev());
2192 vec.sort();
2193 let result: Vec<i32> = vec.iter().copied().collect();
2194 assert_eq!(result, (0..10).collect::<Vec<_>>());
2195 }
2196
2197 #[test]
2198 fn test_sort_by() {
2199 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2200 vec.extend([5, 2, 8, 1, 9, 3, 7, 4, 6, 0]);
2201 vec.sort_by(|a, b| b.cmp(a)); let result: Vec<i32> = vec.iter().copied().collect();
2203 assert_eq!(result, vec![9, 8, 7, 6, 5, 4, 3, 2, 1, 0]);
2204 }
2205
2206 #[test]
2207 fn test_sort_by_key() {
2208 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2209 vec.extend([-5, 2, -8, 1, -9, 3, -7, 4, -6, 0]);
2210 vec.sort_by_key(|k| k.abs());
2211 let result: Vec<i32> = vec.iter().copied().collect();
2212 assert_eq!(result, vec![0, 1, 2, 3, 4, -5, -6, -7, -8, -9]);
2213 }
2214
2215 #[test]
2216 fn test_sort_stable() {
2217 #[derive(Debug, Clone, Eq, PartialEq)]
2219 struct Item {
2220 key: i32,
2221 order: usize,
2222 }
2223
2224 let mut vec: SegmentedVec<Item, 4> = SegmentedVec::new();
2225 vec.push(Item { key: 1, order: 0 });
2226 vec.push(Item { key: 2, order: 1 });
2227 vec.push(Item { key: 1, order: 2 });
2228 vec.push(Item { key: 2, order: 3 });
2229 vec.push(Item { key: 1, order: 4 });
2230
2231 vec.sort_by_key(|item| item.key);
2232
2233 assert_eq!(vec[0], Item { key: 1, order: 0 });
2235 assert_eq!(vec[1], Item { key: 1, order: 2 });
2236 assert_eq!(vec[2], Item { key: 1, order: 4 });
2237 assert_eq!(vec[3], Item { key: 2, order: 1 });
2239 assert_eq!(vec[4], Item { key: 2, order: 3 });
2240 }
2241
2242 #[test]
2243 fn test_sort_unstable() {
2244 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2245 vec.extend([5, 2, 8, 1, 9, 3, 7, 4, 6, 0]);
2246 vec.sort_unstable();
2247 let result: Vec<i32> = vec.iter().copied().collect();
2248 assert_eq!(result, vec![0, 1, 2, 3, 4, 5, 6, 7, 8, 9]);
2249 }
2250
2251 #[test]
2252 fn test_sort_unstable_by() {
2253 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2254 vec.extend([5, 2, 8, 1, 9, 3, 7, 4, 6, 0]);
2255 vec.sort_unstable_by(|a, b| b.cmp(a)); let result: Vec<i32> = vec.iter().copied().collect();
2257 assert_eq!(result, vec![9, 8, 7, 6, 5, 4, 3, 2, 1, 0]);
2258 }
2259
2260 #[test]
2261 fn test_sort_unstable_by_key() {
2262 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2263 vec.extend([-5, 2, -8, 1, -9, 3, -7, 4, -6, 0]);
2264 vec.sort_unstable_by_key(|k| k.abs());
2265 let result: Vec<i32> = vec.iter().copied().collect();
2267 for i in 1..result.len() {
2268 assert!(result[i - 1].abs() <= result[i].abs());
2269 }
2270 }
2271
2272 #[test]
2273 fn test_sort_large() {
2274 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2275 vec.extend((0..1000).rev());
2277 vec.sort();
2278 for i in 0..1000 {
2279 assert_eq!(vec[i], i as i32);
2280 }
2281 }
2282
2283 #[test]
2284 fn test_sort_unstable_large() {
2285 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2286 vec.extend((0..1000).rev());
2288 vec.sort_unstable();
2289 for i in 0..1000 {
2290 assert_eq!(vec[i], i as i32);
2291 }
2292 }
2293
2294 #[test]
2295 fn test_sort_with_zero_prealloc() {
2296 let mut vec: SegmentedVec<i32, 0> = SegmentedVec::new();
2297 vec.extend([5, 2, 8, 1, 9, 3, 7, 4, 6, 0]);
2298 vec.sort();
2299 let result: Vec<i32> = vec.iter().copied().collect();
2300 assert_eq!(result, vec![0, 1, 2, 3, 4, 5, 6, 7, 8, 9]);
2301 }
2302
2303 #[test]
2304 fn test_sort_pointers_remain_stable_after_sort() {
2305 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2308 vec.extend([5, 2, 8, 1, 9]);
2309
2310 let ptr = &vec[0] as *const i32;
2312
2313 vec.sort();
2314
2315 assert_eq!(unsafe { *ptr }, 1); }
2318
2319 #[test]
2322 fn test_as_slice() {
2323 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2324 vec.extend(0..10);
2325
2326 let slice = vec.as_slice();
2327 assert_eq!(slice.len(), 10);
2328 assert_eq!(slice[0], 0);
2329 assert_eq!(slice[9], 9);
2330 }
2331
2332 #[test]
2333 fn test_as_mut_slice() {
2334 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2335 vec.extend(0..10);
2336
2337 {
2338 let mut slice = vec.as_mut_slice();
2339 slice[0] = 100;
2340 slice[9] = 200;
2341 }
2342
2343 assert_eq!(vec[0], 100);
2344 assert_eq!(vec[9], 200);
2345 }
2346
2347 #[test]
2348 fn test_slice_range() {
2349 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2350 vec.extend(0..10);
2351
2352 let slice = vec.slice(2..5);
2353 assert_eq!(slice.len(), 3);
2354 assert_eq!(slice[0], 2);
2355 assert_eq!(slice[1], 3);
2356 assert_eq!(slice[2], 4);
2357 }
2358
2359 #[test]
2360 fn test_slice_mut_range() {
2361 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2362 vec.extend(0..10);
2363
2364 {
2365 let mut slice = vec.slice_mut(2..5);
2366 slice[0] = 100;
2367 slice[2] = 200;
2368 }
2369
2370 assert_eq!(vec[2], 100);
2371 assert_eq!(vec[4], 200);
2372 }
2373
2374 #[test]
2375 fn test_slice_first_last() {
2376 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2377 vec.extend(0..10);
2378
2379 let slice = vec.as_slice();
2380 assert_eq!(slice.first(), Some(&0));
2381 assert_eq!(slice.last(), Some(&9));
2382 }
2383
2384 #[test]
2385 fn test_slice_iter() {
2386 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2387 vec.extend(0..10);
2388
2389 let slice = vec.as_slice();
2390 let collected: Vec<i32> = slice.iter().copied().collect();
2391 assert_eq!(collected, (0..10).collect::<Vec<_>>());
2392 }
2393
2394 #[test]
2395 fn test_slice_iter_rev() {
2396 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2397 vec.extend(0..10);
2398
2399 let slice = vec.as_slice();
2400 let collected: Vec<i32> = slice.iter().rev().copied().collect();
2401 assert_eq!(collected, (0..10).rev().collect::<Vec<_>>());
2402 }
2403
2404 #[test]
2405 fn test_slice_contains() {
2406 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2407 vec.extend(0..10);
2408
2409 let slice = vec.as_slice();
2410 assert!(slice.contains(&5));
2411 assert!(!slice.contains(&100));
2412 }
2413
2414 #[test]
2415 fn test_slice_binary_search() {
2416 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2417 vec.extend(0..100);
2418
2419 let slice = vec.as_slice();
2420 assert_eq!(slice.binary_search(&50), Ok(50));
2421 assert_eq!(slice.binary_search(&0), Ok(0));
2422 assert_eq!(slice.binary_search(&99), Ok(99));
2423 assert_eq!(slice.binary_search(&100), Err(100));
2424 }
2425
2426 #[test]
2427 fn test_slice_split_at() {
2428 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2429 vec.extend(0..10);
2430
2431 let slice = vec.as_slice();
2432 let (left, right) = slice.split_at(5);
2433 assert_eq!(left.len(), 5);
2434 assert_eq!(right.len(), 5);
2435 assert_eq!(left[0], 0);
2436 assert_eq!(right[0], 5);
2437 }
2438
2439 #[test]
2440 fn test_slice_to_vec() {
2441 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2442 vec.extend(0..10);
2443
2444 let slice = vec.as_slice();
2445 let converted = slice.to_vec();
2446 assert_eq!(converted, (0..10).collect::<Vec<_>>());
2447 }
2448
2449 #[test]
2450 fn test_slice_mut_sort() {
2451 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2452 vec.extend([5, 3, 1, 4, 2, 8, 7, 6, 0, 9]);
2453
2454 {
2456 let mut slice = vec.slice_mut(2..8);
2457 slice.sort();
2458 }
2459
2460 let result: Vec<i32> = vec.iter().copied().collect();
2461 assert_eq!(result, vec![5, 3, 1, 2, 4, 6, 7, 8, 0, 9]);
2462 }
2463
2464 #[test]
2465 fn test_slice_mut_reverse() {
2466 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2467 vec.extend(0..10);
2468
2469 {
2470 let mut slice = vec.slice_mut(2..8);
2471 slice.reverse();
2472 }
2473
2474 let result: Vec<i32> = vec.iter().copied().collect();
2475 assert_eq!(result, vec![0, 1, 7, 6, 5, 4, 3, 2, 8, 9]);
2476 }
2477
2478 #[test]
2479 fn test_slice_mut_fill() {
2480 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2481 vec.extend(0..10);
2482
2483 {
2484 let mut slice = vec.slice_mut(2..5);
2485 slice.fill(99);
2486 }
2487
2488 let result: Vec<i32> = vec.iter().copied().collect();
2489 assert_eq!(result, vec![0, 1, 99, 99, 99, 5, 6, 7, 8, 9]);
2490 }
2491
2492 #[test]
2493 fn test_slice_starts_with() {
2494 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2495 vec.extend(0..10);
2496
2497 let slice = vec.as_slice();
2498 assert!(slice.starts_with(&[0, 1, 2]));
2499 assert!(!slice.starts_with(&[1, 2, 3]));
2500 }
2501
2502 #[test]
2503 fn test_slice_ends_with() {
2504 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2505 vec.extend(0..10);
2506
2507 let slice = vec.as_slice();
2508 assert!(slice.ends_with(&[7, 8, 9]));
2509 assert!(!slice.ends_with(&[6, 7, 8]));
2510 }
2511
2512 #[test]
2513 fn test_slice_eq() {
2514 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2515 vec.extend(0..10);
2516
2517 let slice = vec.as_slice();
2518 assert_eq!(slice, (0..10).collect::<Vec<_>>());
2519 }
2520
2521 #[test]
2522 fn test_min_non_zero_cap() {
2523 let mut vec_u8: SegmentedVec<u8, 0> = SegmentedVec::new();
2525 vec_u8.push(1);
2526 assert_eq!(vec_u8.capacity(), 8);
2527
2528 let mut vec_i32: SegmentedVec<i32, 0> = SegmentedVec::new();
2530 vec_i32.push(1);
2531 assert_eq!(vec_i32.capacity(), 4);
2532
2533 for i in 0u8..100 {
2535 vec_u8.push(i);
2536 }
2537 for i in 0..101 {
2538 assert_eq!(vec_u8[i], if i == 0 { 1 } else { (i - 1) as u8 });
2539 }
2540 }
2541
2542 #[test]
2543 fn test_extend_from_copy_slice() {
2544 let mut vec: SegmentedVec<i32, 0> = SegmentedVec::new();
2546 let data: Vec<i32> = (0..100).collect();
2547 vec.extend_from_copy_slice(&data);
2548 assert_eq!(vec.len(), 100);
2549 for i in 0..100 {
2550 assert_eq!(vec[i], i as i32);
2551 }
2552
2553 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2555 vec.extend_from_copy_slice(&data);
2556 assert_eq!(vec.len(), 100);
2557 for i in 0..100 {
2558 assert_eq!(vec[i], i as i32);
2559 }
2560
2561 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2563 vec.push(999);
2564 vec.push(998);
2565 vec.extend_from_copy_slice(&data[..10]);
2566 assert_eq!(vec.len(), 12);
2567 assert_eq!(vec[0], 999);
2568 assert_eq!(vec[1], 998);
2569 for i in 0..10 {
2570 assert_eq!(vec[i + 2], i as i32);
2571 }
2572
2573 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2575 vec.extend_from_copy_slice(&[]);
2576 assert!(vec.is_empty());
2577
2578 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2580 vec.extend_from_copy_slice(&[1, 2, 3]); assert_eq!(vec.len(), 3);
2582 vec.push(4); vec.push(5);
2584 vec.push(6);
2585 assert_eq!(vec.len(), 6);
2586 for i in 0..6 {
2587 assert_eq!(vec[i], (i + 1) as i32);
2588 }
2589
2590 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2592 vec.extend_from_copy_slice(&[1, 2, 3, 4]); assert_eq!(vec.len(), 4);
2594 vec.push(5); assert_eq!(vec.len(), 5);
2596 assert_eq!(vec[4], 5);
2597 }
2598}