1mod slice;
28mod sort;
29
30pub use slice::{
31 Chunks, ChunksExact, RChunks, SegmentedSlice, SegmentedSliceMut, SliceIter, SliceIterMut,
32 Windows,
33};
34
35use std::alloc::{self, Layout};
36use std::cmp::Ordering;
37use std::marker::PhantomData;
38use std::mem::MaybeUninit;
39use std::ops::{Index, IndexMut};
40use std::ptr::NonNull;
41
42pub struct SegmentedVec<T, const PREALLOC: usize = 0> {
47 prealloc_segment: MaybeUninit<[T; PREALLOC]>,
48 dynamic_segments: Vec<NonNull<T>>,
49 len: usize,
50 write_ptr: *mut T,
52 segment_end: *mut T,
54 segment_base: *mut T,
56 _marker: PhantomData<T>,
57}
58
59unsafe impl<T: Send, const PREALLOC: usize> Send for SegmentedVec<T, PREALLOC> {}
61unsafe impl<T: Sync, const PREALLOC: usize> Sync for SegmentedVec<T, PREALLOC> {}
63
64impl<T, const PREALLOC: usize> SegmentedVec<T, PREALLOC> {
65 const PREALLOC_EXP: u32 = if PREALLOC == 0 {
66 0
67 } else {
68 assert!(
69 PREALLOC.is_power_of_two(),
70 "PREALLOC must be 0 or a power of 2"
71 );
72 PREALLOC.trailing_zeros()
73 };
74
75 const MIN_NON_ZERO_CAP: usize = {
81 let size = std::mem::size_of::<T>();
82 if size == 1 {
83 8
84 } else if size <= 1024 {
85 4
86 } else {
87 1
88 }
89 };
90
91 const MIN_CAP_EXP: u32 = Self::MIN_NON_ZERO_CAP.trailing_zeros();
92
93 const BIAS: usize = if PREALLOC > 0 {
94 PREALLOC
95 } else {
96 Self::MIN_NON_ZERO_CAP
97 };
98
99 const SHELF_OFFSET: u32 = if PREALLOC == 0 {
101 Self::MIN_CAP_EXP
102 } else {
103 Self::PREALLOC_EXP + 1
104 };
105
106 #[inline]
116 pub const fn new() -> Self {
117 Self {
118 prealloc_segment: MaybeUninit::uninit(),
119 dynamic_segments: Vec::new(),
120 len: 0,
121 write_ptr: std::ptr::null_mut(),
122 segment_end: std::ptr::null_mut(),
123 segment_base: std::ptr::null_mut(),
124 _marker: PhantomData,
125 }
126 }
127
128 #[inline]
130 fn init_write_ptr(&mut self) {
131 if PREALLOC > 0 && self.len < PREALLOC {
132 let base = unsafe { (*self.prealloc_segment.as_mut_ptr()).as_mut_ptr() };
133 self.segment_base = base;
134 self.write_ptr = unsafe { base.add(self.len) };
135 self.segment_end = unsafe { base.add(PREALLOC) };
136 } else if !self.dynamic_segments.is_empty() {
137 let (shelf, box_idx) = Self::location(self.len);
138 let shelf_size = Self::shelf_size(shelf as u32);
139 let base = unsafe { self.dynamic_segments.get_unchecked(shelf).as_ptr() };
140 self.segment_base = base;
141 self.write_ptr = unsafe { base.add(box_idx) };
142 self.segment_end = unsafe { base.add(shelf_size) };
143 } else if PREALLOC > 0 {
144 let base = unsafe { (*self.prealloc_segment.as_mut_ptr()).as_mut_ptr() };
145 self.segment_base = base;
146 self.write_ptr = base;
147 self.segment_end = unsafe { base.add(PREALLOC) };
148 } else {
149 self.segment_base = std::ptr::null_mut();
150 self.write_ptr = std::ptr::null_mut();
151 self.segment_end = std::ptr::null_mut();
152 }
153 }
154
155 #[inline]
157 pub const fn len(&self) -> usize {
158 self.len
159 }
160
161 #[inline]
163 pub const fn is_empty(&self) -> bool {
164 self.len == 0
165 }
166
167 #[inline]
169 pub fn capacity(&self) -> usize {
170 Self::compute_capacity(self.dynamic_segments.len() as u32)
171 }
172
173 #[inline]
189 pub fn push(&mut self, value: T) {
190 if self.write_ptr < self.segment_end {
192 unsafe {
193 std::ptr::write(self.write_ptr, value);
194 self.write_ptr = self.write_ptr.add(1);
195 }
196 self.len += 1;
197 return;
198 }
199
200 self.push_slow(value);
202 }
203
204 #[cold]
205 #[inline(never)]
206 fn push_slow(&mut self, value: T) {
207 let new_len = self.len + 1;
208
209 if PREALLOC > 0 && self.len == 0 {
211 unsafe {
212 let base = (*self.prealloc_segment.as_mut_ptr()).as_mut_ptr();
213 std::ptr::write(base, value);
214 self.segment_base = base;
215 self.write_ptr = base.add(1);
216 self.segment_end = base.add(PREALLOC);
217 }
218 self.len = 1;
219 return;
220 }
221
222 let biased = self.len + Self::BIAS;
225 let shelf = (biased.trailing_zeros() - Self::SHELF_OFFSET) as usize;
226 let shelf_size = Self::shelf_size(shelf as u32);
227
228 self.grow_capacity(new_len);
229
230 unsafe {
231 let base = self.dynamic_segments.get_unchecked(shelf).as_ptr();
232 std::ptr::write(base, value);
233 self.segment_base = base;
234 self.write_ptr = base.add(1);
235 self.segment_end = base.add(shelf_size);
236 }
237 self.len = new_len;
238 }
239
240 #[inline]
254 pub fn pop(&mut self) -> Option<T> {
255 if self.write_ptr > self.segment_base {
257 unsafe {
258 let new_len = self.len - 1;
259 self.len = new_len;
260 self.write_ptr = self.write_ptr.sub(1);
262 Some(std::ptr::read(self.write_ptr))
264 }
265 } else {
266 self.pop_slow_path()
268 }
269 }
270
271 #[cold]
272 #[inline(never)]
273 fn pop_slow_path(&mut self) -> Option<T> {
274 if self.len == 0 {
275 return None;
276 }
277
278 unsafe {
279 let val = std::ptr::read(self.write_ptr);
282
283 let new_len = self.len - 1;
284 self.len = new_len;
285 self.init_write_ptr();
286
287 Some(val)
288 }
289 }
290
291 #[inline]
295 pub fn get(&self, index: usize) -> Option<&T> {
296 if index < self.len {
297 Some(unsafe { self.unchecked_at(index) })
298 } else {
299 None
300 }
301 }
302
303 #[inline]
307 pub fn get_mut(&mut self, index: usize) -> Option<&mut T> {
308 if index < self.len {
309 Some(unsafe { self.unchecked_at_mut(index) })
310 } else {
311 None
312 }
313 }
314
315 #[inline]
317 pub fn first(&self) -> Option<&T> {
318 self.get(0)
319 }
320
321 #[inline]
323 pub fn first_mut(&mut self) -> Option<&mut T> {
324 self.get_mut(0)
325 }
326
327 #[inline]
329 pub fn last(&self) -> Option<&T> {
330 if self.len == 0 {
331 None
332 } else {
333 self.get(self.len - 1)
334 }
335 }
336
337 #[inline]
339 pub fn last_mut(&mut self) -> Option<&mut T> {
340 if self.len == 0 {
341 None
342 } else {
343 self.get_mut(self.len - 1)
344 }
345 }
346
347 #[inline]
349 pub fn contains(&self, x: &T) -> bool
350 where
351 T: PartialEq,
352 {
353 self.as_slice().contains(x)
354 }
355
356 pub fn starts_with(&self, needle: &[T]) -> bool
358 where
359 T: PartialEq,
360 {
361 self.as_slice().starts_with(needle)
362 }
363
364 pub fn ends_with(&self, needle: &[T]) -> bool
366 where
367 T: PartialEq,
368 {
369 self.as_slice().ends_with(needle)
370 }
371
372 pub fn binary_search(&self, x: &T) -> Result<usize, usize>
377 where
378 T: Ord,
379 {
380 self.as_slice().binary_search(x)
381 }
382
383 pub fn binary_search_by<F>(&self, f: F) -> Result<usize, usize>
385 where
386 F: FnMut(&T) -> Ordering,
387 {
388 self.as_slice().binary_search_by(f)
389 }
390
391 pub fn binary_search_by_key<B, F>(&self, b: &B, f: F) -> Result<usize, usize>
393 where
394 F: FnMut(&T) -> B,
395 B: Ord,
396 {
397 self.as_slice().binary_search_by_key(b, f)
398 }
399
400 #[inline]
406 pub fn swap(&mut self, a: usize, b: usize) {
407 self.as_mut_slice().swap(a, b)
408 }
409
410 pub fn reverse(&mut self) {
412 self.as_mut_slice().reverse()
413 }
414
415 pub fn fill(&mut self, value: T)
417 where
418 T: Clone,
419 {
420 self.as_mut_slice().fill(value)
421 }
422
423 pub fn fill_with<F>(&mut self, f: F)
425 where
426 F: FnMut() -> T,
427 {
428 self.as_mut_slice().fill_with(f)
429 }
430
431 pub fn clear(&mut self) {
435 if self.len == 0 {
436 return;
437 }
438
439 if std::mem::needs_drop::<T>() {
440 if PREALLOC > 0 {
442 let count = self.len.min(PREALLOC);
443 let ptr = unsafe { (*self.prealloc_segment.as_mut_ptr()).as_mut_ptr() };
444 unsafe { std::ptr::drop_in_place(std::ptr::slice_from_raw_parts_mut(ptr, count)) };
445 }
446
447 if self.len > PREALLOC {
449 let mut remaining = self.len - PREALLOC;
450 for shelf in 0..self.dynamic_segments.len() {
451 let size = Self::shelf_size(shelf as u32);
452 let count = remaining.min(size);
453 let ptr = unsafe { self.dynamic_segments.get_unchecked(shelf).as_ptr() };
454 unsafe {
455 std::ptr::drop_in_place(std::ptr::slice_from_raw_parts_mut(ptr, count))
456 };
457 remaining -= count;
458 if remaining == 0 {
459 break;
460 }
461 }
462 }
463 }
464
465 self.len = 0;
466 if PREALLOC > 0 {
468 let base = unsafe { (*self.prealloc_segment.as_mut_ptr()).as_mut_ptr() };
469 self.write_ptr = base;
470 self.segment_end = unsafe { base.add(PREALLOC) };
471 } else {
472 self.write_ptr = std::ptr::null_mut();
473 self.segment_end = std::ptr::null_mut();
474 }
475 }
476
477 pub fn truncate(&mut self, new_len: usize) {
481 if new_len >= self.len {
482 return;
483 }
484
485 if std::mem::needs_drop::<T>() {
486 if PREALLOC > 0 && new_len < PREALLOC {
488 let start = new_len;
489 let end = self.len.min(PREALLOC);
490 if start < end {
491 let ptr = unsafe {
492 (*self.prealloc_segment.as_mut_ptr())
493 .as_mut_ptr()
494 .add(start)
495 };
496 unsafe {
497 std::ptr::drop_in_place(std::ptr::slice_from_raw_parts_mut(
498 ptr,
499 end - start,
500 ))
501 };
502 }
503 }
504
505 if self.len > PREALLOC {
507 let dynamic_new_len = new_len.saturating_sub(PREALLOC);
508 let dynamic_old_len = self.len - PREALLOC;
509
510 if dynamic_new_len < dynamic_old_len {
511 let mut pos = 0usize;
512 for shelf in 0..self.dynamic_segments.len() {
513 let size = Self::shelf_size(shelf as u32);
514 let seg_end = pos + size;
515
516 let drop_start = dynamic_new_len.max(pos);
518 let drop_end = dynamic_old_len.min(seg_end);
519
520 if drop_start < drop_end {
521 let offset = drop_start - pos;
522 let count = drop_end - drop_start;
523 let ptr = unsafe {
524 self.dynamic_segments
525 .get_unchecked(shelf)
526 .as_ptr()
527 .add(offset)
528 };
529 unsafe {
530 std::ptr::drop_in_place(std::ptr::slice_from_raw_parts_mut(
531 ptr, count,
532 ))
533 };
534 }
535
536 pos = seg_end;
537 if pos >= dynamic_old_len {
538 break;
539 }
540 }
541 }
542 }
543 }
544
545 self.len = new_len;
546 if new_len > 0 {
548 self.init_write_ptr();
549 } else if PREALLOC > 0 {
550 let base = unsafe { (*self.prealloc_segment.as_mut_ptr()).as_mut_ptr() };
551 self.write_ptr = base;
552 self.segment_end = unsafe { base.add(PREALLOC) };
553 } else {
554 self.write_ptr = std::ptr::null_mut();
555 self.segment_end = std::ptr::null_mut();
556 }
557 }
558
559 pub fn reserve(&mut self, additional: usize) {
561 let old_capacity = self.capacity();
562 self.grow_capacity(self.len + additional);
563 if old_capacity == 0 && self.capacity() > 0 && self.segment_end.is_null() {
565 self.init_write_ptr();
566 }
567 }
568
569 pub fn shrink_to_fit(&mut self) {
573 self.shrink_capacity(self.len);
574 }
575
576 #[inline]
578 pub fn iter(&self) -> Iter<'_, T, PREALLOC> {
579 Iter {
581 vec: self,
582 ptr: std::ptr::null(),
583 segment_end: std::ptr::null(),
584 index: 0,
585 shelf_index: 0,
586 }
587 }
588
589 #[inline]
591 pub fn iter_mut(&mut self) -> IterMut<'_, T, PREALLOC> {
592 IterMut {
594 vec: self,
595 ptr: std::ptr::null_mut(),
596 segment_end: std::ptr::null_mut(),
597 index: 0,
598 shelf_index: 0,
599 }
600 }
601
602 #[inline]
617 pub fn as_slice(&self) -> SegmentedSlice<'_, T, PREALLOC> {
618 SegmentedSlice::new(self)
619 }
620
621 #[inline]
636 pub fn as_mut_slice(&mut self) -> SegmentedSliceMut<'_, T, PREALLOC> {
637 SegmentedSliceMut::new(self)
638 }
639
640 #[inline]
659 pub fn slice(&self, range: std::ops::Range<usize>) -> SegmentedSlice<'_, T, PREALLOC> {
660 assert!(range.start <= range.end && range.end <= self.len);
661 SegmentedSlice::from_range(self, range.start, range.end)
662 }
663
664 #[inline]
683 pub fn slice_mut(
684 &mut self,
685 range: std::ops::Range<usize>,
686 ) -> SegmentedSliceMut<'_, T, PREALLOC> {
687 assert!(range.start <= range.end && range.end <= self.len);
688 SegmentedSliceMut::from_range(self, range.start, range.end)
689 }
690
691 pub fn extend_from_slice(&mut self, other: &[T])
693 where
694 T: Clone,
695 {
696 if other.is_empty() {
697 return;
698 }
699 self.reserve(other.len());
700
701 let mut src = other;
702
703 if PREALLOC > 0 && self.len < PREALLOC {
705 let prealloc_remaining = PREALLOC - self.len;
706 let to_copy = src.len().min(prealloc_remaining);
707 let dest = unsafe {
708 (*self.prealloc_segment.as_mut_ptr())
709 .as_mut_ptr()
710 .add(self.len)
711 };
712 for (i, item) in src.iter().take(to_copy).enumerate() {
713 unsafe { std::ptr::write(dest.add(i), item.clone()) };
714 }
715 self.len += to_copy;
716 src = &src[to_copy..];
717 }
718
719 while !src.is_empty() {
721 let (shelf, box_idx, remaining) = Self::location_with_capacity(self.len);
722 let to_copy = src.len().min(remaining);
723 let dest = unsafe {
724 self.dynamic_segments
725 .get_unchecked(shelf)
726 .as_ptr()
727 .add(box_idx)
728 };
729 for (i, item) in src.iter().take(to_copy).enumerate() {
730 unsafe { std::ptr::write(dest.add(i), item.clone()) };
731 }
732 self.len += to_copy;
733 src = &src[to_copy..];
734 }
735
736 if self.len < self.capacity() {
738 self.init_write_ptr();
739 } else {
740 self.write_ptr = std::ptr::null_mut();
741 self.segment_end = std::ptr::null_mut();
742 }
743 }
744
745 pub fn extend_from_copy_slice(&mut self, other: &[T])
750 where
751 T: Copy,
752 {
753 if other.is_empty() {
754 return;
755 }
756 self.reserve(other.len());
757
758 let mut src = other;
759
760 if PREALLOC > 0 && self.len < PREALLOC {
762 let prealloc_remaining = PREALLOC - self.len;
763 let to_copy = src.len().min(prealloc_remaining);
764 unsafe {
765 let dest = (*self.prealloc_segment.as_mut_ptr())
766 .as_mut_ptr()
767 .add(self.len);
768 std::ptr::copy_nonoverlapping(src.as_ptr(), dest, to_copy);
769 };
770 self.len += to_copy;
771 src = &src[to_copy..];
772 }
773
774 while !src.is_empty() {
776 let (shelf, box_idx, remaining) = Self::location_with_capacity(self.len);
777 let to_copy = src.len().min(remaining);
778 unsafe {
779 let dest = self
780 .dynamic_segments
781 .get_unchecked(shelf)
782 .as_ptr()
783 .add(box_idx);
784 std::ptr::copy_nonoverlapping(src.as_ptr(), dest, to_copy);
785 };
786 self.len += to_copy;
787 src = &src[to_copy..];
788 }
789
790 if self.len < self.capacity() {
792 self.init_write_ptr();
793 } else {
794 self.write_ptr = std::ptr::null_mut();
795 self.segment_end = std::ptr::null_mut();
796 }
797 }
798
799 #[inline]
816 pub fn sort(&mut self)
817 where
818 T: Ord,
819 {
820 self.as_mut_slice().sort();
821 }
822
823 pub fn sort_by<F>(&mut self, compare: F)
840 where
841 F: FnMut(&T, &T) -> Ordering,
842 {
843 self.as_mut_slice().sort_by(compare);
844 }
845
846 #[inline]
862 pub fn sort_by_key<K, F>(&mut self, f: F)
863 where
864 F: FnMut(&T) -> K,
865 K: Ord,
866 {
867 self.as_mut_slice().sort_by_key(f);
868 }
869
870 #[inline]
887 pub fn sort_unstable(&mut self)
888 where
889 T: Ord,
890 {
891 self.as_mut_slice().sort_unstable();
892 }
893
894 pub fn sort_unstable_by<F>(&mut self, compare: F)
911 where
912 F: FnMut(&T, &T) -> Ordering,
913 {
914 self.as_mut_slice().sort_unstable_by(compare);
915 }
916
917 #[inline]
937 pub fn sort_unstable_by_key<K, F>(&mut self, f: F)
938 where
939 F: FnMut(&T) -> K,
940 K: Ord,
941 {
942 self.as_mut_slice().sort_unstable_by_key(f);
943 }
944
945 pub fn is_sorted(&self) -> bool
947 where
948 T: PartialOrd,
949 {
950 self.as_slice().is_sorted()
951 }
952
953 pub fn is_sorted_by<F>(&self, compare: F) -> bool
955 where
956 F: FnMut(&T, &T) -> bool,
957 {
958 self.as_slice().is_sorted_by(compare)
959 }
960
961 pub fn is_sorted_by_key<K, F>(&self, f: F) -> bool
963 where
964 F: FnMut(&T) -> K,
965 K: PartialOrd,
966 {
967 self.as_slice().is_sorted_by_key(f)
968 }
969
970 pub fn partition_point<P>(&self, pred: P) -> usize
972 where
973 P: FnMut(&T) -> bool,
974 {
975 self.as_slice().partition_point(pred)
976 }
977
978 pub fn rotate_left(&mut self, mid: usize) {
980 self.as_mut_slice().rotate_left(mid);
981 }
982
983 pub fn rotate_right(&mut self, k: usize) {
985 self.as_mut_slice().rotate_right(k);
986 }
987
988 pub fn with_capacity(capacity: usize) -> Self {
990 let mut vec = Self::new();
991 vec.reserve(capacity);
992 vec
993 }
994
995 pub fn insert(&mut self, index: usize, element: T) {
1001 assert!(index <= self.len);
1002 self.push(element);
1003 if index < self.len - 1 {
1005 for i in (index..self.len - 1).rev() {
1006 unsafe {
1007 let ptr_a = self.unchecked_at_mut(i) as *mut T;
1008 let ptr_b = self.unchecked_at_mut(i + 1) as *mut T;
1009 std::ptr::swap(ptr_a, ptr_b);
1010 }
1011 }
1012 }
1013 }
1014
1015 pub fn remove(&mut self, index: usize) -> T {
1021 assert!(index < self.len);
1022 for i in index..self.len - 1 {
1024 unsafe {
1025 let ptr_a = self.unchecked_at_mut(i) as *mut T;
1026 let ptr_b = self.unchecked_at_mut(i + 1) as *mut T;
1027 std::ptr::swap(ptr_a, ptr_b);
1028 }
1029 }
1030 self.pop().unwrap()
1031 }
1032
1033 pub fn swap_remove(&mut self, index: usize) -> T {
1042 assert!(index < self.len);
1043 let last_index = self.len - 1;
1044 if index != last_index {
1045 unsafe {
1046 let ptr_a = self.unchecked_at_mut(index) as *mut T;
1047 let ptr_b = self.unchecked_at_mut(last_index) as *mut T;
1048 std::ptr::swap(ptr_a, ptr_b);
1049 }
1050 }
1051 self.pop().unwrap()
1052 }
1053
1054 pub fn retain<F>(&mut self, mut f: F)
1058 where
1059 F: FnMut(&T) -> bool,
1060 {
1061 let mut i = 0;
1062 while i < self.len {
1063 if f(unsafe { self.unchecked_at(i) }) {
1064 i += 1;
1065 } else {
1066 self.remove(i);
1067 }
1068 }
1069 }
1070
1071 pub fn retain_mut<F>(&mut self, mut f: F)
1073 where
1074 F: FnMut(&mut T) -> bool,
1075 {
1076 let mut i = 0;
1077 while i < self.len {
1078 if f(unsafe { self.unchecked_at_mut(i) }) {
1079 i += 1;
1080 } else {
1081 self.remove(i);
1082 }
1083 }
1084 }
1085
1086 pub fn dedup(&mut self)
1090 where
1091 T: PartialEq,
1092 {
1093 self.dedup_by(|a, b| a == b);
1094 }
1095
1096 pub fn dedup_by<F>(&mut self, mut same_bucket: F)
1098 where
1099 F: FnMut(&mut T, &mut T) -> bool,
1100 {
1101 if self.len <= 1 {
1102 return;
1103 }
1104 let mut write = 1;
1105 for read in 1..self.len {
1106 let should_keep = unsafe {
1107 let prev_ptr = self.unchecked_at_mut(write - 1) as *mut T;
1108 let curr_ptr = self.unchecked_at_mut(read) as *mut T;
1109 !same_bucket(&mut *prev_ptr, &mut *curr_ptr)
1110 };
1111 if should_keep {
1112 if read != write {
1113 unsafe {
1114 let ptr_src = self.unchecked_at_mut(read) as *mut T;
1115 let ptr_dst = self.unchecked_at_mut(write) as *mut T;
1116 std::ptr::swap(ptr_dst, ptr_src);
1117 }
1118 }
1119 write += 1;
1120 } else {
1121 unsafe {
1123 std::ptr::drop_in_place(self.unchecked_at_mut(read));
1124 }
1125 }
1126 }
1127 self.len = write;
1128 }
1129
1130 pub fn dedup_by_key<K, F>(&mut self, mut key: F)
1132 where
1133 F: FnMut(&mut T) -> K,
1134 K: PartialEq,
1135 {
1136 self.dedup_by(|a, b| key(a) == key(b));
1137 }
1138
1139 pub fn resize(&mut self, new_len: usize, value: T)
1145 where
1146 T: Clone,
1147 {
1148 if new_len > self.len {
1149 self.reserve(new_len - self.len);
1150 while self.len < new_len {
1151 self.push(value.clone());
1152 }
1153 } else {
1154 self.truncate(new_len);
1155 }
1156 }
1157
1158 pub fn resize_with<F>(&mut self, new_len: usize, mut f: F)
1163 where
1164 F: FnMut() -> T,
1165 {
1166 if new_len > self.len {
1167 self.reserve(new_len - self.len);
1168 while self.len < new_len {
1169 self.push(f());
1170 }
1171 } else {
1172 self.truncate(new_len);
1173 }
1174 }
1175
1176 pub fn append(&mut self, other: &mut Self) {
1178 let other_len = other.len;
1179 self.reserve(other_len);
1180 let start = self.len;
1181 while let Some(item) = other.pop() {
1182 self.push(item);
1183 }
1184 let mut left = start;
1186 let mut right = self.len;
1187 while left < right {
1188 right -= 1;
1189 if left < right {
1190 unsafe {
1191 let ptr_a = self.unchecked_at_mut(left) as *mut T;
1192 let ptr_b = self.unchecked_at_mut(right) as *mut T;
1193 std::ptr::swap(ptr_a, ptr_b);
1194 }
1195 left += 1;
1196 }
1197 }
1198 }
1199
1200 pub fn split_off(&mut self, at: usize) -> Self {
1209 assert!(at <= self.len);
1210 let mut other = Self::new();
1211 other.reserve(self.len - at);
1212 for i in at..self.len {
1213 other.push(unsafe { self.unchecked_read(i) });
1214 }
1215 self.len = at;
1216 other
1217 }
1218
1219 pub fn chunks(&self, chunk_size: usize) -> Chunks<'_, T, PREALLOC> {
1225 self.as_slice().chunks(chunk_size)
1226 }
1227
1228 pub fn windows(&self, size: usize) -> Windows<'_, T, PREALLOC> {
1234 self.as_slice().windows(size)
1235 }
1236
1237 pub fn rchunks(&self, chunk_size: usize) -> RChunks<'_, T, PREALLOC> {
1243 self.as_slice().rchunks(chunk_size)
1244 }
1245
1246 pub fn chunks_exact(&self, chunk_size: usize) -> ChunksExact<'_, T, PREALLOC> {
1252 self.as_slice().chunks_exact(chunk_size)
1253 }
1254
1255 pub fn drain(&mut self, range: std::ops::Range<usize>) -> Drain<'_, T, PREALLOC> {
1261 assert!(range.start <= range.end && range.end <= self.len);
1262 Drain {
1263 vec: self,
1264 range_start: range.start,
1265 range_end: range.end,
1266 index: range.start,
1267 }
1268 }
1269
1270 pub fn to_vec(&self) -> Vec<T>
1272 where
1273 T: Clone,
1274 {
1275 self.as_slice().to_vec()
1276 }
1277
1278 #[inline]
1282 fn shelf_count(box_count: usize) -> u32 {
1283 if box_count == 0 {
1284 return 0;
1285 }
1286 if PREALLOC == 0 {
1287 let val = box_count + Self::MIN_NON_ZERO_CAP;
1288 val.next_power_of_two().trailing_zeros() - Self::MIN_CAP_EXP
1289 } else {
1290 let val = box_count + PREALLOC;
1291 val.next_power_of_two().trailing_zeros() - Self::PREALLOC_EXP - 1
1292 }
1293 }
1294
1295 #[inline]
1297 fn shelf_size(shelf_index: u32) -> usize {
1298 if PREALLOC == 0 {
1299 Self::MIN_NON_ZERO_CAP << shelf_index
1300 } else {
1301 1usize << (shelf_index + Self::PREALLOC_EXP + 1)
1302 }
1303 }
1304
1305 #[inline]
1308 fn location(list_index: usize) -> (usize, usize) {
1309 let biased = list_index + Self::BIAS;
1310 let msb = biased.ilog2();
1311 let shelf = msb - Self::SHELF_OFFSET;
1312 let box_idx = biased ^ (1usize << msb);
1314 (shelf as usize, box_idx)
1315 }
1316
1317 #[inline]
1320 fn location_with_capacity(list_index: usize) -> (usize, usize, usize) {
1321 let biased = list_index + Self::BIAS;
1322 let msb = biased.ilog2();
1323 let shelf = msb - Self::SHELF_OFFSET;
1324 let box_idx = biased ^ (1usize << msb);
1325 let segment_remaining = (2usize << msb) - biased;
1329 (shelf as usize, box_idx, segment_remaining)
1330 }
1331
1332 #[inline]
1338 pub(crate) unsafe fn unchecked_at(&self, index: usize) -> &T {
1339 unsafe {
1340 if index < PREALLOC {
1341 &(*self.prealloc_segment.as_ptr())[index]
1342 } else {
1343 let (shelf, box_idx) = Self::location(index);
1344 &*self
1345 .dynamic_segments
1346 .get_unchecked(shelf)
1347 .as_ptr()
1348 .add(box_idx)
1349 }
1350 }
1351 }
1352
1353 #[inline]
1359 pub(crate) unsafe fn unchecked_at_mut(&mut self, index: usize) -> &mut T {
1360 unsafe {
1361 if index < PREALLOC {
1362 &mut (*self.prealloc_segment.as_mut_ptr())[index]
1363 } else {
1364 let (shelf, box_idx) = Self::location(index);
1365 &mut *self
1366 .dynamic_segments
1367 .get_unchecked(shelf)
1368 .as_ptr()
1369 .add(box_idx)
1370 }
1371 }
1372 }
1373
1374 #[inline]
1380 unsafe fn unchecked_read(&self, index: usize) -> T {
1381 unsafe {
1382 if index < PREALLOC {
1383 std::ptr::read(&(*self.prealloc_segment.as_ptr())[index])
1384 } else {
1385 let (shelf, box_idx) = Self::location(index);
1386 std::ptr::read(
1387 self.dynamic_segments
1388 .get_unchecked(shelf)
1389 .as_ptr()
1390 .add(box_idx),
1391 )
1392 }
1393 }
1394 }
1395
1396 fn grow_capacity(&mut self, new_capacity: usize) {
1398 let new_shelf_count = Self::shelf_count(new_capacity);
1399 let old_shelf_count = self.dynamic_segments.len() as u32;
1400
1401 if new_shelf_count > old_shelf_count {
1402 self.dynamic_segments
1403 .reserve((new_shelf_count - old_shelf_count) as usize);
1404
1405 for i in old_shelf_count..new_shelf_count {
1406 let size = Self::shelf_size(i);
1407 let layout = Layout::array::<T>(size).expect("Layout overflow");
1408 let ptr = if layout.size() == 0 {
1409 NonNull::dangling()
1410 } else {
1411 let ptr = unsafe { alloc::alloc(layout) };
1412 NonNull::new(ptr as *mut T).expect("Allocation failed")
1413 };
1414 self.dynamic_segments.push(ptr);
1415 }
1416 }
1417 }
1418
1419 #[inline]
1421 fn compute_capacity(shelf_count: u32) -> usize {
1422 if shelf_count == 0 {
1423 PREALLOC
1424 } else if PREALLOC == 0 {
1425 (Self::MIN_NON_ZERO_CAP << shelf_count) - Self::MIN_NON_ZERO_CAP
1426 } else {
1427 (1usize << (Self::PREALLOC_EXP + 1 + shelf_count)) - PREALLOC
1428 }
1429 }
1430
1431 fn shrink_capacity(&mut self, new_capacity: usize) {
1433 if new_capacity <= PREALLOC {
1434 self.free_shelves(self.dynamic_segments.len() as u32, 0);
1436 self.dynamic_segments.clear();
1437 self.dynamic_segments.shrink_to_fit();
1438 return;
1439 }
1440
1441 let new_shelf_count = Self::shelf_count(new_capacity);
1442 let old_shelf_count = self.dynamic_segments.len() as u32;
1443
1444 if new_shelf_count < old_shelf_count {
1445 self.free_shelves(old_shelf_count, new_shelf_count);
1446 self.dynamic_segments.truncate(new_shelf_count as usize);
1447 self.dynamic_segments.shrink_to_fit();
1448 }
1449 }
1450
1451 fn free_shelves(&mut self, from_count: u32, to_count: u32) {
1453 for i in (to_count..from_count).rev() {
1454 let size = Self::shelf_size(i);
1455 let layout = Layout::array::<T>(size).expect("Layout overflow");
1456 if layout.size() > 0 {
1457 unsafe {
1458 alloc::dealloc(
1459 self.dynamic_segments[i as usize].as_ptr() as *mut u8,
1460 layout,
1461 );
1462 }
1463 }
1464 }
1465 }
1466}
1467
1468impl<T, const PREALLOC: usize> Drop for SegmentedVec<T, PREALLOC> {
1469 fn drop(&mut self) {
1470 self.clear();
1472 self.free_shelves(self.dynamic_segments.len() as u32, 0);
1474 }
1475}
1476
1477impl<T, const PREALLOC: usize> sort::IndexedAccess<T> for SegmentedVec<T, PREALLOC> {
1478 #[inline]
1479 fn get_ref(&self, index: usize) -> &T {
1480 debug_assert!(index < self.len);
1481 unsafe { self.unchecked_at(index) }
1482 }
1483
1484 #[inline]
1485 fn get_ptr(&self, index: usize) -> *const T {
1486 debug_assert!(index < self.len);
1487 if index < PREALLOC {
1488 unsafe { (*self.prealloc_segment.as_ptr()).as_ptr().add(index) }
1489 } else {
1490 let (shelf, box_idx) = Self::location(index);
1491 unsafe {
1492 self.dynamic_segments
1493 .get_unchecked(shelf)
1494 .as_ptr()
1495 .add(box_idx)
1496 }
1497 }
1498 }
1499
1500 #[inline]
1501 fn get_ptr_mut(&mut self, index: usize) -> *mut T {
1502 debug_assert!(index < self.len);
1503 if index < PREALLOC {
1504 unsafe {
1505 (*self.prealloc_segment.as_mut_ptr())
1506 .as_mut_ptr()
1507 .add(index)
1508 }
1509 } else {
1510 let (shelf, box_idx) = Self::location(index);
1511 unsafe {
1512 self.dynamic_segments
1513 .get_unchecked(shelf)
1514 .as_ptr()
1515 .add(box_idx)
1516 }
1517 }
1518 }
1519
1520 #[inline]
1521 fn swap(&mut self, a: usize, b: usize) {
1522 if a == b {
1523 return;
1524 }
1525 debug_assert!(a < self.len && b < self.len);
1526 unsafe {
1527 let ptr_a = self.get_ptr_mut(a);
1528 let ptr_b = self.get_ptr_mut(b);
1529 std::ptr::swap(ptr_a, ptr_b);
1530 }
1531 }
1532}
1533
1534impl<T, const PREALLOC: usize> Default for SegmentedVec<T, PREALLOC> {
1535 fn default() -> Self {
1536 Self::new()
1537 }
1538}
1539
1540impl<T, const PREALLOC: usize> Index<usize> for SegmentedVec<T, PREALLOC> {
1541 type Output = T;
1542
1543 fn index(&self, index: usize) -> &Self::Output {
1544 self.get(index).expect("index out of bounds")
1545 }
1546}
1547
1548impl<T, const PREALLOC: usize> IndexMut<usize> for SegmentedVec<T, PREALLOC> {
1549 fn index_mut(&mut self, index: usize) -> &mut Self::Output {
1550 self.get_mut(index).expect("index out of bounds")
1551 }
1552}
1553
1554impl<T: Clone, const PREALLOC: usize> Clone for SegmentedVec<T, PREALLOC> {
1555 fn clone(&self) -> Self {
1556 if self.len == 0 {
1557 return Self::new();
1558 }
1559
1560 let mut new_vec = Self::new();
1561 new_vec.reserve(self.len);
1562
1563 if PREALLOC > 0 {
1565 let count = self.len.min(PREALLOC);
1566 let src = unsafe { (*self.prealloc_segment.as_ptr()).as_ptr() };
1567 let dst = unsafe { (*new_vec.prealloc_segment.as_mut_ptr()).as_mut_ptr() };
1568 for i in 0..count {
1569 unsafe { std::ptr::write(dst.add(i), (*src.add(i)).clone()) };
1570 }
1571 new_vec.len = count;
1572 }
1573
1574 if self.len > PREALLOC {
1576 let mut remaining = self.len - PREALLOC;
1577 for shelf in 0..self.dynamic_segments.len() {
1578 let size = Self::shelf_size(shelf as u32);
1579 let count = remaining.min(size);
1580 let src = unsafe { self.dynamic_segments.get_unchecked(shelf).as_ptr() };
1581 let dst = unsafe { new_vec.dynamic_segments.get_unchecked(shelf).as_ptr() };
1582 for i in 0..count {
1583 unsafe { std::ptr::write(dst.add(i), (*src.add(i)).clone()) };
1584 }
1585 new_vec.len += count;
1586 remaining -= count;
1587 if remaining == 0 {
1588 break;
1589 }
1590 }
1591 }
1592
1593 if new_vec.len < new_vec.capacity() {
1595 new_vec.init_write_ptr();
1596 } else {
1597 new_vec.write_ptr = std::ptr::null_mut();
1598 new_vec.segment_end = std::ptr::null_mut();
1599 }
1600
1601 new_vec
1602 }
1603}
1604
1605impl<T: std::fmt::Debug, const PREALLOC: usize> std::fmt::Debug for SegmentedVec<T, PREALLOC> {
1606 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
1607 f.debug_list().entries(self.iter()).finish()
1608 }
1609}
1610
1611impl<T: PartialEq, const PREALLOC: usize> PartialEq for SegmentedVec<T, PREALLOC> {
1612 fn eq(&self, other: &Self) -> bool {
1613 if self.len != other.len {
1614 return false;
1615 }
1616 for i in 0..self.len {
1617 if unsafe { self.unchecked_at(i) } != unsafe { other.unchecked_at(i) } {
1618 return false;
1619 }
1620 }
1621 true
1622 }
1623}
1624
1625impl<T: Eq, const PREALLOC: usize> Eq for SegmentedVec<T, PREALLOC> {}
1626
1627impl<T, const PREALLOC: usize> FromIterator<T> for SegmentedVec<T, PREALLOC> {
1628 fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> Self {
1629 let iter = iter.into_iter();
1630 let (lower, _) = iter.size_hint();
1631 let mut vec = Self::new();
1632 vec.reserve(lower);
1633 for item in iter {
1634 vec.push(item);
1635 }
1636 vec
1637 }
1638}
1639
1640impl<T, const PREALLOC: usize> Extend<T> for SegmentedVec<T, PREALLOC> {
1641 fn extend<I: IntoIterator<Item = T>>(&mut self, iter: I) {
1642 let iter = iter.into_iter();
1643 let (lower, _) = iter.size_hint();
1644 self.reserve(lower);
1645 for item in iter {
1646 self.push(item);
1647 }
1648 }
1649}
1650
1651pub struct Iter<'a, T, const PREALLOC: usize> {
1655 vec: &'a SegmentedVec<T, PREALLOC>,
1656 ptr: *const T,
1658 segment_end: *const T,
1660 index: usize,
1662 shelf_index: u32,
1664}
1665
1666impl<'a, T, const PREALLOC: usize> Iterator for Iter<'a, T, PREALLOC> {
1667 type Item = &'a T;
1668
1669 #[inline]
1670 fn next(&mut self) -> Option<Self::Item> {
1671 if self.ptr < self.segment_end {
1672 let result = unsafe { &*self.ptr };
1673 self.ptr = unsafe { self.ptr.add(1) };
1674 self.index += 1;
1675 return Some(result);
1676 }
1677 self.next_segment()
1678 }
1679
1680 #[inline]
1681 fn size_hint(&self) -> (usize, Option<usize>) {
1682 let remaining = self.vec.len.saturating_sub(self.index);
1683 (remaining, Some(remaining))
1684 }
1685}
1686
1687impl<'a, T, const PREALLOC: usize> Iter<'a, T, PREALLOC> {
1688 #[inline]
1689 fn next_segment(&mut self) -> Option<&'a T> {
1690 if self.index >= self.vec.len {
1691 return None;
1692 }
1693
1694 if self.index < PREALLOC {
1696 let ptr = unsafe {
1698 (*self.vec.prealloc_segment.as_ptr())
1699 .as_ptr()
1700 .add(self.index)
1701 };
1702 let remaining = PREALLOC - self.index;
1703 let segment_len = remaining.min(self.vec.len - self.index);
1704 self.ptr = ptr;
1705 self.segment_end = unsafe { ptr.add(segment_len) };
1706 } else {
1707 let shelf_idx = self.shelf_index as usize;
1709 let shelf_size = SegmentedVec::<T, PREALLOC>::shelf_size(self.shelf_index);
1710 let ptr = self.vec.dynamic_segments[shelf_idx].as_ptr();
1711 let segment_len = shelf_size.min(self.vec.len - self.index);
1712 self.ptr = ptr;
1713 self.segment_end = unsafe { ptr.add(segment_len) };
1714 self.shelf_index += 1;
1715 }
1716
1717 let result = unsafe { &*self.ptr };
1718 self.ptr = unsafe { self.ptr.add(1) };
1719 self.index += 1;
1720 Some(result)
1721 }
1722}
1723
1724impl<'a, T, const PREALLOC: usize> ExactSizeIterator for Iter<'a, T, PREALLOC> {}
1725
1726pub struct IterMut<'a, T, const PREALLOC: usize> {
1728 vec: &'a mut SegmentedVec<T, PREALLOC>,
1729 ptr: *mut T,
1731 segment_end: *mut T,
1733 index: usize,
1735 shelf_index: u32,
1737}
1738
1739impl<'a, T, const PREALLOC: usize> Iterator for IterMut<'a, T, PREALLOC> {
1740 type Item = &'a mut T;
1741
1742 #[inline]
1743 fn next(&mut self) -> Option<Self::Item> {
1744 if self.ptr < self.segment_end {
1745 let result = self.ptr;
1746 self.ptr = unsafe { self.ptr.add(1) };
1747 self.index += 1;
1748 return Some(unsafe { &mut *result });
1749 }
1750 self.next_segment()
1751 }
1752
1753 #[inline]
1754 fn size_hint(&self) -> (usize, Option<usize>) {
1755 let remaining = self.vec.len.saturating_sub(self.index);
1756 (remaining, Some(remaining))
1757 }
1758}
1759
1760impl<'a, T, const PREALLOC: usize> IterMut<'a, T, PREALLOC> {
1761 #[inline]
1762 fn next_segment(&mut self) -> Option<&'a mut T> {
1763 if self.index >= self.vec.len {
1764 return None;
1765 }
1766
1767 if self.index < PREALLOC {
1769 let ptr = unsafe {
1771 (*self.vec.prealloc_segment.as_mut_ptr())
1772 .as_mut_ptr()
1773 .add(self.index)
1774 };
1775 let remaining = PREALLOC - self.index;
1776 let segment_len = remaining.min(self.vec.len - self.index);
1777 self.ptr = ptr;
1778 self.segment_end = unsafe { ptr.add(segment_len) };
1779 } else {
1780 let shelf_idx = self.shelf_index as usize;
1782 let shelf_size = SegmentedVec::<T, PREALLOC>::shelf_size(self.shelf_index);
1783 let ptr = self.vec.dynamic_segments[shelf_idx].as_ptr();
1785 let segment_len = shelf_size.min(self.vec.len - self.index);
1786 self.ptr = ptr;
1787 self.segment_end = unsafe { ptr.add(segment_len) };
1788 self.shelf_index += 1;
1789 }
1790
1791 let result = self.ptr;
1792 self.ptr = unsafe { self.ptr.add(1) };
1793 self.index += 1;
1794 Some(unsafe { &mut *result })
1795 }
1796}
1797
1798impl<'a, T, const PREALLOC: usize> ExactSizeIterator for IterMut<'a, T, PREALLOC> {}
1799
1800impl<T, const PREALLOC: usize> IntoIterator for SegmentedVec<T, PREALLOC> {
1801 type Item = T;
1802 type IntoIter = IntoIter<T, PREALLOC>;
1803
1804 fn into_iter(self) -> Self::IntoIter {
1805 IntoIter {
1806 vec: self,
1807 index: 0,
1808 }
1809 }
1810}
1811
1812impl<'a, T, const PREALLOC: usize> IntoIterator for &'a SegmentedVec<T, PREALLOC> {
1813 type Item = &'a T;
1814 type IntoIter = Iter<'a, T, PREALLOC>;
1815
1816 fn into_iter(self) -> Self::IntoIter {
1817 self.iter()
1818 }
1819}
1820
1821impl<'a, T, const PREALLOC: usize> IntoIterator for &'a mut SegmentedVec<T, PREALLOC> {
1822 type Item = &'a mut T;
1823 type IntoIter = IterMut<'a, T, PREALLOC>;
1824
1825 fn into_iter(self) -> Self::IntoIter {
1826 self.iter_mut()
1827 }
1828}
1829
1830pub struct IntoIter<T, const PREALLOC: usize> {
1832 vec: SegmentedVec<T, PREALLOC>,
1833 index: usize,
1834}
1835
1836impl<T, const PREALLOC: usize> Iterator for IntoIter<T, PREALLOC> {
1837 type Item = T;
1838
1839 fn next(&mut self) -> Option<Self::Item> {
1840 if self.index >= self.vec.len {
1841 return None;
1842 }
1843 let value = unsafe { self.vec.unchecked_read(self.index) };
1844 self.index += 1;
1845 Some(value)
1846 }
1847
1848 fn size_hint(&self) -> (usize, Option<usize>) {
1849 let remaining = self.vec.len - self.index;
1850 (remaining, Some(remaining))
1851 }
1852}
1853
1854impl<T, const PREALLOC: usize> ExactSizeIterator for IntoIter<T, PREALLOC> {}
1855
1856impl<T, const PREALLOC: usize> Drop for IntoIter<T, PREALLOC> {
1857 fn drop(&mut self) {
1858 for i in self.index..self.vec.len {
1860 unsafe {
1861 std::ptr::drop_in_place(self.vec.unchecked_at_mut(i));
1862 }
1863 }
1864 self.vec.len = 0;
1866 }
1867}
1868
1869pub struct Drain<'a, T, const PREALLOC: usize> {
1873 vec: &'a mut SegmentedVec<T, PREALLOC>,
1874 range_start: usize,
1875 range_end: usize,
1876 index: usize,
1877}
1878
1879impl<'a, T, const PREALLOC: usize> Iterator for Drain<'a, T, PREALLOC> {
1880 type Item = T;
1881
1882 fn next(&mut self) -> Option<Self::Item> {
1883 if self.index >= self.range_end {
1884 None
1885 } else {
1886 let value = unsafe { std::ptr::read(self.vec.unchecked_at(self.index)) };
1887 self.index += 1;
1888 Some(value)
1889 }
1890 }
1891
1892 fn size_hint(&self) -> (usize, Option<usize>) {
1893 let remaining = self.range_end - self.index;
1894 (remaining, Some(remaining))
1895 }
1896}
1897
1898impl<'a, T, const PREALLOC: usize> DoubleEndedIterator for Drain<'a, T, PREALLOC> {
1899 fn next_back(&mut self) -> Option<Self::Item> {
1900 if self.index >= self.range_end {
1901 None
1902 } else {
1903 self.range_end -= 1;
1904 Some(unsafe { std::ptr::read(self.vec.unchecked_at(self.range_end)) })
1905 }
1906 }
1907}
1908
1909impl<'a, T, const PREALLOC: usize> ExactSizeIterator for Drain<'a, T, PREALLOC> {}
1910
1911impl<'a, T, const PREALLOC: usize> Drop for Drain<'a, T, PREALLOC> {
1912 fn drop(&mut self) {
1913 for i in self.index..self.range_end {
1915 unsafe {
1916 std::ptr::drop_in_place(self.vec.unchecked_at_mut(i));
1917 }
1918 }
1919
1920 let original_range_end = self.range_end;
1922 let original_len = self.vec.len;
1923 let drain_count = original_range_end - self.range_start;
1924
1925 for i in 0..(original_len - original_range_end) {
1926 unsafe {
1927 let src = self.vec.unchecked_at(original_range_end + i) as *const T;
1928 let dst = self.vec.unchecked_at_mut(self.range_start + i) as *mut T;
1929 std::ptr::copy_nonoverlapping(src, dst, 1);
1930 }
1931 }
1932
1933 self.vec.len = original_len - drain_count;
1934 }
1935}
1936
1937#[cfg(test)]
1938mod tests {
1939 use super::*;
1940
1941 #[test]
1942 fn test_new_empty() {
1943 let vec: SegmentedVec<i32, 4> = SegmentedVec::new();
1944 assert!(vec.is_empty());
1945 assert_eq!(vec.len(), 0);
1946 }
1947
1948 #[test]
1949 fn test_push_pop() {
1950 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
1951 vec.push(1);
1952 vec.push(2);
1953 vec.push(3);
1954 assert_eq!(vec.len(), 3);
1955 assert_eq!(vec.pop(), Some(3));
1956 assert_eq!(vec.pop(), Some(2));
1957 assert_eq!(vec.pop(), Some(1));
1958 assert_eq!(vec.pop(), None);
1959 }
1960
1961 #[test]
1962 fn test_get() {
1963 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
1964 vec.push(10);
1965 vec.push(20);
1966 vec.push(30);
1967 assert_eq!(vec.get(0), Some(&10));
1968 assert_eq!(vec.get(1), Some(&20));
1969 assert_eq!(vec.get(2), Some(&30));
1970 assert_eq!(vec.get(3), None);
1971 }
1972
1973 #[test]
1974 fn test_index() {
1975 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
1976 vec.push(10);
1977 vec.push(20);
1978 assert_eq!(vec[0], 10);
1979 assert_eq!(vec[1], 20);
1980 vec[0] = 100;
1981 assert_eq!(vec[0], 100);
1982 }
1983
1984 #[test]
1985 fn test_stable_pointers() {
1986 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
1987 vec.push(1);
1988 let ptr = &vec[0] as *const i32;
1989
1990 for i in 2..1000 {
1992 vec.push(i);
1993 }
1994
1995 assert_eq!(unsafe { *ptr }, 1);
1997 }
1998
1999 #[test]
2000 fn test_iter() {
2001 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2002 for i in 0..100 {
2003 vec.push(i);
2004 }
2005
2006 let collected: Vec<i32> = vec.iter().copied().collect();
2007 let expected: Vec<i32> = (0..100).collect();
2008 assert_eq!(collected, expected);
2009 }
2010
2011 #[test]
2012 fn test_iter_mut() {
2013 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2014 for i in 0..10 {
2015 vec.push(i);
2016 }
2017
2018 for item in vec.iter_mut() {
2019 *item *= 2;
2020 }
2021
2022 let collected: Vec<i32> = vec.iter().copied().collect();
2023 let expected: Vec<i32> = (0..10).map(|x| x * 2).collect();
2024 assert_eq!(collected, expected);
2025 }
2026
2027 #[test]
2028 fn test_into_iter() {
2029 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2030 for i in 0..10 {
2031 vec.push(i);
2032 }
2033
2034 let collected: Vec<i32> = vec.into_iter().collect();
2035 let expected: Vec<i32> = (0..10).collect();
2036 assert_eq!(collected, expected);
2037 }
2038
2039 #[test]
2040 fn test_from_iter() {
2041 let vec: SegmentedVec<i32, 4> = (0..10).collect();
2042 assert_eq!(vec.len(), 10);
2043 for i in 0..10 {
2044 assert_eq!(vec[i], i as i32);
2045 }
2046 }
2047
2048 #[test]
2049 fn test_extend() {
2050 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2051 vec.extend(0..5);
2052 vec.extend(5..10);
2053 assert_eq!(vec.len(), 10);
2054 for i in 0..10 {
2055 assert_eq!(vec[i], i as i32);
2056 }
2057 }
2058
2059 #[test]
2060 fn test_clear() {
2061 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2062 vec.extend(0..10);
2063 vec.clear();
2064 assert!(vec.is_empty());
2065 assert_eq!(vec.len(), 0);
2066 }
2067
2068 #[test]
2069 fn test_truncate() {
2070 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2071 vec.extend(0..10);
2072 vec.truncate(5);
2073 assert_eq!(vec.len(), 5);
2074 for i in 0..5 {
2075 assert_eq!(vec[i], i as i32);
2076 }
2077 }
2078
2079 #[test]
2080 fn test_zero_prealloc() {
2081 let mut vec: SegmentedVec<i32, 0> = SegmentedVec::new();
2082 for i in 0..100 {
2083 vec.push(i);
2084 }
2085
2086 for i in 0..100 {
2087 assert_eq!(vec[i], i as i32);
2088 }
2089
2090 assert_eq!(vec.pop(), Some(99));
2091 assert_eq!(vec.len(), 99);
2092 }
2093
2094 #[test]
2095 fn test_various_prealloc_sizes() {
2096 fn test_prealloc<const N: usize>() {
2097 let mut vec: SegmentedVec<i32, N> = SegmentedVec::new();
2098 for i in 0..100 {
2099 vec.push(i);
2100 }
2101 for i in 0..100 {
2102 assert_eq!(vec[i], i as i32);
2103 }
2104 }
2105
2106 test_prealloc::<0>();
2107 test_prealloc::<1>();
2108 test_prealloc::<2>();
2109 test_prealloc::<4>();
2110 test_prealloc::<8>();
2111 test_prealloc::<16>();
2112 }
2113
2114 #[test]
2115 fn test_clone() {
2116 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2117 vec.extend(0..10);
2118 let vec2 = vec.clone();
2119 assert_eq!(vec, vec2);
2120 }
2121
2122 #[test]
2123 fn test_debug() {
2124 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2125 vec.extend(0..3);
2126 let debug_str = format!("{:?}", vec);
2127 assert_eq!(debug_str, "[0, 1, 2]");
2128 }
2129
2130 #[test]
2131 fn test_first_last() {
2132 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2133 assert_eq!(vec.first(), None);
2134 assert_eq!(vec.last(), None);
2135
2136 vec.push(1);
2137 assert_eq!(vec.first(), Some(&1));
2138 assert_eq!(vec.last(), Some(&1));
2139
2140 vec.push(2);
2141 vec.push(3);
2142 assert_eq!(vec.first(), Some(&1));
2143 assert_eq!(vec.last(), Some(&3));
2144 }
2145
2146 #[test]
2147 fn test_drop_elements() {
2148 use std::cell::Cell;
2149 use std::rc::Rc;
2150
2151 let drop_count = Rc::new(Cell::new(0));
2152
2153 struct DropCounter {
2154 counter: Rc<Cell<i32>>,
2155 }
2156
2157 impl Drop for DropCounter {
2158 fn drop(&mut self) {
2159 self.counter.set(self.counter.get() + 1);
2160 }
2161 }
2162
2163 {
2164 let mut vec: SegmentedVec<DropCounter, 4> = SegmentedVec::new();
2165 for _ in 0..10 {
2166 vec.push(DropCounter {
2167 counter: Rc::clone(&drop_count),
2168 });
2169 }
2170 }
2171
2172 assert_eq!(drop_count.get(), 10);
2173 }
2174
2175 #[test]
2176 fn test_shrink_to_fit() {
2177 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2178 vec.extend(0..100);
2179 vec.truncate(5);
2180 vec.shrink_to_fit();
2181 assert_eq!(vec.len(), 5);
2182 for i in 0..5 {
2183 assert_eq!(vec[i], i as i32);
2184 }
2185 }
2186
2187 #[test]
2188 fn test_sort() {
2189 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2190 vec.extend([5, 2, 8, 1, 9, 3, 7, 4, 6, 0]);
2191 vec.sort();
2192 let result: Vec<i32> = vec.iter().copied().collect();
2193 assert_eq!(result, vec![0, 1, 2, 3, 4, 5, 6, 7, 8, 9]);
2194 }
2195
2196 #[test]
2197 fn test_sort_empty() {
2198 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2199 vec.sort();
2200 assert!(vec.is_empty());
2201 }
2202
2203 #[test]
2204 fn test_sort_single() {
2205 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2206 vec.push(42);
2207 vec.sort();
2208 assert_eq!(vec[0], 42);
2209 }
2210
2211 #[test]
2212 fn test_sort_already_sorted() {
2213 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2214 vec.extend(0..10);
2215 vec.sort();
2216 let result: Vec<i32> = vec.iter().copied().collect();
2217 assert_eq!(result, (0..10).collect::<Vec<_>>());
2218 }
2219
2220 #[test]
2221 fn test_sort_reverse_sorted() {
2222 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2223 vec.extend((0..10).rev());
2224 vec.sort();
2225 let result: Vec<i32> = vec.iter().copied().collect();
2226 assert_eq!(result, (0..10).collect::<Vec<_>>());
2227 }
2228
2229 #[test]
2230 fn test_sort_by() {
2231 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2232 vec.extend([5, 2, 8, 1, 9, 3, 7, 4, 6, 0]);
2233 vec.sort_by(|a, b| b.cmp(a)); let result: Vec<i32> = vec.iter().copied().collect();
2235 assert_eq!(result, vec![9, 8, 7, 6, 5, 4, 3, 2, 1, 0]);
2236 }
2237
2238 #[test]
2239 fn test_sort_by_key() {
2240 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2241 vec.extend([-5, 2, -8, 1, -9, 3, -7, 4, -6, 0]);
2242 vec.sort_by_key(|k| k.abs());
2243 let result: Vec<i32> = vec.iter().copied().collect();
2244 assert_eq!(result, vec![0, 1, 2, 3, 4, -5, -6, -7, -8, -9]);
2245 }
2246
2247 #[test]
2248 fn test_sort_stable() {
2249 #[derive(Debug, Clone, Eq, PartialEq)]
2251 struct Item {
2252 key: i32,
2253 order: usize,
2254 }
2255
2256 let mut vec: SegmentedVec<Item, 4> = SegmentedVec::new();
2257 vec.push(Item { key: 1, order: 0 });
2258 vec.push(Item { key: 2, order: 1 });
2259 vec.push(Item { key: 1, order: 2 });
2260 vec.push(Item { key: 2, order: 3 });
2261 vec.push(Item { key: 1, order: 4 });
2262
2263 vec.sort_by_key(|item| item.key);
2264
2265 assert_eq!(vec[0], Item { key: 1, order: 0 });
2267 assert_eq!(vec[1], Item { key: 1, order: 2 });
2268 assert_eq!(vec[2], Item { key: 1, order: 4 });
2269 assert_eq!(vec[3], Item { key: 2, order: 1 });
2271 assert_eq!(vec[4], Item { key: 2, order: 3 });
2272 }
2273
2274 #[test]
2275 fn test_sort_unstable() {
2276 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2277 vec.extend([5, 2, 8, 1, 9, 3, 7, 4, 6, 0]);
2278 vec.sort_unstable();
2279 let result: Vec<i32> = vec.iter().copied().collect();
2280 assert_eq!(result, vec![0, 1, 2, 3, 4, 5, 6, 7, 8, 9]);
2281 }
2282
2283 #[test]
2284 fn test_sort_unstable_by() {
2285 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2286 vec.extend([5, 2, 8, 1, 9, 3, 7, 4, 6, 0]);
2287 vec.sort_unstable_by(|a, b| b.cmp(a)); let result: Vec<i32> = vec.iter().copied().collect();
2289 assert_eq!(result, vec![9, 8, 7, 6, 5, 4, 3, 2, 1, 0]);
2290 }
2291
2292 #[test]
2293 fn test_sort_unstable_by_key() {
2294 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2295 vec.extend([-5, 2, -8, 1, -9, 3, -7, 4, -6, 0]);
2296 vec.sort_unstable_by_key(|k| k.abs());
2297 let result: Vec<i32> = vec.iter().copied().collect();
2299 for i in 1..result.len() {
2300 assert!(result[i - 1].abs() <= result[i].abs());
2301 }
2302 }
2303
2304 #[test]
2305 fn test_sort_large() {
2306 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2307 vec.extend((0..1000).rev());
2309 vec.sort();
2310 for i in 0..1000 {
2311 assert_eq!(vec[i], i as i32);
2312 }
2313 }
2314
2315 #[test]
2316 fn test_sort_unstable_large() {
2317 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2318 vec.extend((0..1000).rev());
2320 vec.sort_unstable();
2321 for i in 0..1000 {
2322 assert_eq!(vec[i], i as i32);
2323 }
2324 }
2325
2326 #[test]
2327 fn test_sort_with_zero_prealloc() {
2328 let mut vec: SegmentedVec<i32, 0> = SegmentedVec::new();
2329 vec.extend([5, 2, 8, 1, 9, 3, 7, 4, 6, 0]);
2330 vec.sort();
2331 let result: Vec<i32> = vec.iter().copied().collect();
2332 assert_eq!(result, vec![0, 1, 2, 3, 4, 5, 6, 7, 8, 9]);
2333 }
2334
2335 #[test]
2336 fn test_sort_pointers_remain_stable_after_sort() {
2337 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2340 vec.extend([5, 2, 8, 1, 9]);
2341
2342 let ptr = &vec[0] as *const i32;
2344
2345 vec.sort();
2346
2347 assert_eq!(unsafe { *ptr }, 1); }
2350
2351 #[test]
2354 fn test_as_slice() {
2355 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2356 vec.extend(0..10);
2357
2358 let slice = vec.as_slice();
2359 assert_eq!(slice.len(), 10);
2360 assert_eq!(slice[0], 0);
2361 assert_eq!(slice[9], 9);
2362 }
2363
2364 #[test]
2365 fn test_as_mut_slice() {
2366 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2367 vec.extend(0..10);
2368
2369 {
2370 let mut slice = vec.as_mut_slice();
2371 slice[0] = 100;
2372 slice[9] = 200;
2373 }
2374
2375 assert_eq!(vec[0], 100);
2376 assert_eq!(vec[9], 200);
2377 }
2378
2379 #[test]
2380 fn test_slice_range() {
2381 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2382 vec.extend(0..10);
2383
2384 let slice = vec.slice(2..5);
2385 assert_eq!(slice.len(), 3);
2386 assert_eq!(slice[0], 2);
2387 assert_eq!(slice[1], 3);
2388 assert_eq!(slice[2], 4);
2389 }
2390
2391 #[test]
2392 fn test_slice_mut_range() {
2393 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2394 vec.extend(0..10);
2395
2396 {
2397 let mut slice = vec.slice_mut(2..5);
2398 slice[0] = 100;
2399 slice[2] = 200;
2400 }
2401
2402 assert_eq!(vec[2], 100);
2403 assert_eq!(vec[4], 200);
2404 }
2405
2406 #[test]
2407 fn test_slice_first_last() {
2408 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2409 vec.extend(0..10);
2410
2411 let slice = vec.as_slice();
2412 assert_eq!(slice.first(), Some(&0));
2413 assert_eq!(slice.last(), Some(&9));
2414 }
2415
2416 #[test]
2417 fn test_slice_iter() {
2418 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2419 vec.extend(0..10);
2420
2421 let slice = vec.as_slice();
2422 let collected: Vec<i32> = slice.iter().copied().collect();
2423 assert_eq!(collected, (0..10).collect::<Vec<_>>());
2424 }
2425
2426 #[test]
2427 fn test_slice_iter_rev() {
2428 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2429 vec.extend(0..10);
2430
2431 let slice = vec.as_slice();
2432 let collected: Vec<i32> = slice.iter().rev().copied().collect();
2433 assert_eq!(collected, (0..10).rev().collect::<Vec<_>>());
2434 }
2435
2436 #[test]
2437 fn test_slice_contains() {
2438 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2439 vec.extend(0..10);
2440
2441 let slice = vec.as_slice();
2442 assert!(slice.contains(&5));
2443 assert!(!slice.contains(&100));
2444 }
2445
2446 #[test]
2447 fn test_slice_binary_search() {
2448 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2449 vec.extend(0..100);
2450
2451 let slice = vec.as_slice();
2452 assert_eq!(slice.binary_search(&50), Ok(50));
2453 assert_eq!(slice.binary_search(&0), Ok(0));
2454 assert_eq!(slice.binary_search(&99), Ok(99));
2455 assert_eq!(slice.binary_search(&100), Err(100));
2456 }
2457
2458 #[test]
2459 fn test_slice_split_at() {
2460 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2461 vec.extend(0..10);
2462
2463 let slice = vec.as_slice();
2464 let (left, right) = slice.split_at(5);
2465 assert_eq!(left.len(), 5);
2466 assert_eq!(right.len(), 5);
2467 assert_eq!(left[0], 0);
2468 assert_eq!(right[0], 5);
2469 }
2470
2471 #[test]
2472 fn test_slice_to_vec() {
2473 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2474 vec.extend(0..10);
2475
2476 let slice = vec.as_slice();
2477 let converted = slice.to_vec();
2478 assert_eq!(converted, (0..10).collect::<Vec<_>>());
2479 }
2480
2481 #[test]
2482 fn test_slice_mut_sort() {
2483 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2484 vec.extend([5, 3, 1, 4, 2, 8, 7, 6, 0, 9]);
2485
2486 {
2488 let mut slice = vec.slice_mut(2..8);
2489 slice.sort();
2490 }
2491
2492 let result: Vec<i32> = vec.iter().copied().collect();
2493 assert_eq!(result, vec![5, 3, 1, 2, 4, 6, 7, 8, 0, 9]);
2494 }
2495
2496 #[test]
2497 fn test_slice_mut_reverse() {
2498 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2499 vec.extend(0..10);
2500
2501 {
2502 let mut slice = vec.slice_mut(2..8);
2503 slice.reverse();
2504 }
2505
2506 let result: Vec<i32> = vec.iter().copied().collect();
2507 assert_eq!(result, vec![0, 1, 7, 6, 5, 4, 3, 2, 8, 9]);
2508 }
2509
2510 #[test]
2511 fn test_slice_mut_fill() {
2512 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2513 vec.extend(0..10);
2514
2515 {
2516 let mut slice = vec.slice_mut(2..5);
2517 slice.fill(99);
2518 }
2519
2520 let result: Vec<i32> = vec.iter().copied().collect();
2521 assert_eq!(result, vec![0, 1, 99, 99, 99, 5, 6, 7, 8, 9]);
2522 }
2523
2524 #[test]
2525 fn test_slice_starts_with() {
2526 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2527 vec.extend(0..10);
2528
2529 let slice = vec.as_slice();
2530 assert!(slice.starts_with(&[0, 1, 2]));
2531 assert!(!slice.starts_with(&[1, 2, 3]));
2532 }
2533
2534 #[test]
2535 fn test_slice_ends_with() {
2536 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2537 vec.extend(0..10);
2538
2539 let slice = vec.as_slice();
2540 assert!(slice.ends_with(&[7, 8, 9]));
2541 assert!(!slice.ends_with(&[6, 7, 8]));
2542 }
2543
2544 #[test]
2545 fn test_slice_eq() {
2546 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2547 vec.extend(0..10);
2548
2549 let slice = vec.as_slice();
2550 assert_eq!(slice, (0..10).collect::<Vec<_>>());
2551 }
2552
2553 #[test]
2554 fn test_min_non_zero_cap() {
2555 let mut vec_u8: SegmentedVec<u8, 0> = SegmentedVec::new();
2557 vec_u8.push(1);
2558 assert_eq!(vec_u8.capacity(), 8);
2559
2560 let mut vec_i32: SegmentedVec<i32, 0> = SegmentedVec::new();
2562 vec_i32.push(1);
2563 assert_eq!(vec_i32.capacity(), 4);
2564
2565 for i in 0u8..100 {
2567 vec_u8.push(i);
2568 }
2569 for i in 0..101 {
2570 assert_eq!(vec_u8[i], if i == 0 { 1 } else { (i - 1) as u8 });
2571 }
2572 }
2573
2574 #[test]
2575 fn test_extend_from_copy_slice() {
2576 let mut vec: SegmentedVec<i32, 0> = SegmentedVec::new();
2578 let data: Vec<i32> = (0..100).collect();
2579 vec.extend_from_copy_slice(&data);
2580 assert_eq!(vec.len(), 100);
2581 for i in 0..100 {
2582 assert_eq!(vec[i], i as i32);
2583 }
2584
2585 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2587 vec.extend_from_copy_slice(&data);
2588 assert_eq!(vec.len(), 100);
2589 for i in 0..100 {
2590 assert_eq!(vec[i], i as i32);
2591 }
2592
2593 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2595 vec.push(999);
2596 vec.push(998);
2597 vec.extend_from_copy_slice(&data[..10]);
2598 assert_eq!(vec.len(), 12);
2599 assert_eq!(vec[0], 999);
2600 assert_eq!(vec[1], 998);
2601 for i in 0..10 {
2602 assert_eq!(vec[i + 2], i as i32);
2603 }
2604
2605 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2607 vec.extend_from_copy_slice(&[]);
2608 assert!(vec.is_empty());
2609
2610 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2612 vec.extend_from_copy_slice(&[1, 2, 3]); assert_eq!(vec.len(), 3);
2614 vec.push(4); vec.push(5);
2616 vec.push(6);
2617 assert_eq!(vec.len(), 6);
2618 for i in 0..6 {
2619 assert_eq!(vec[i], (i + 1) as i32);
2620 }
2621
2622 let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2624 vec.extend_from_copy_slice(&[1, 2, 3, 4]); assert_eq!(vec.len(), 4);
2626 vec.push(5); assert_eq!(vec.len(), 5);
2628 assert_eq!(vec[4], 5);
2629 }
2630}