1mod slice;
28mod sort;
29
30pub use slice::{
31 Chunks, ChunksExact, RChunks, SegmentedSlice, SegmentedSliceMut, SliceIter, SliceIterMut,
32 Windows,
33};
34
35use std::alloc::{self, Layout};
36use std::cmp::Ordering;
37use std::marker::PhantomData;
38use std::ops::{Index, IndexMut};
39
40const MAX_SEGMENTS: usize = 64;
43
44#[repr(C)]
46pub struct SegmentedVec<T> {
47 write_ptr: *mut T,
49 segment_end: *mut T,
51 len: usize,
52 active_segment_index: usize,
54 segment_count: usize,
55 dynamic_segments: [*mut T; MAX_SEGMENTS],
56 _marker: PhantomData<T>,
57}
58
59impl<T> SegmentedVec<T> {
60 const MIN_NON_ZERO_CAP: usize = {
66 let size = std::mem::size_of::<T>();
67 if size == 1 {
68 8
69 } else if size <= 1024 {
70 4
71 } else {
72 1
73 }
74 };
75
76 const MIN_CAP_EXP: u32 = Self::MIN_NON_ZERO_CAP.trailing_zeros();
77
78 #[inline]
88 pub const fn new() -> Self {
89 Self {
90 dynamic_segments: [std::ptr::null_mut(); MAX_SEGMENTS],
91 segment_count: 0,
92 len: 0,
93 write_ptr: std::ptr::null_mut(),
94 segment_end: std::ptr::null_mut(),
95 active_segment_index: usize::MAX,
97 _marker: PhantomData,
98 }
99 }
100
101 #[inline]
103 pub const fn len(&self) -> usize {
104 self.len
105 }
106
107 #[inline]
109 pub const fn is_empty(&self) -> bool {
110 self.len == 0
111 }
112
113 #[inline]
115 pub fn capacity(&self) -> usize {
116 Self::compute_capacity(self.segment_count as u32)
117 }
118
119 #[inline]
135 pub fn push(&mut self, value: T) {
136 if self.write_ptr < self.segment_end {
138 unsafe {
139 std::ptr::write(self.write_ptr, value);
140 self.write_ptr = self.write_ptr.add(1);
141 }
142 self.len += 1;
143 return;
144 }
145
146 self.push_slow(value);
148 }
149
150 #[cold]
151 #[inline(never)]
152 fn push_slow(&mut self, value: T) {
153 self.active_segment_index = self.active_segment_index.wrapping_add(1);
154
155 if self.active_segment_index >= self.segment_count {
156 self.grow_once();
157 }
158
159 let idx = self.active_segment_index;
160
161 let base = unsafe { *self.dynamic_segments.get_unchecked(idx) };
164 let shelf_size = Self::MIN_NON_ZERO_CAP << idx;
165
166 unsafe {
167 std::ptr::write(base, value);
168 self.write_ptr = base.add(1);
169 self.segment_end = base.add(shelf_size);
170 }
171 self.len += 1;
172 }
173
174 #[inline]
188 pub fn pop(&mut self) -> Option<T> {
189 if self.len == 0 {
190 return None;
191 }
192
193 let segment_base = unsafe {
196 *self
197 .dynamic_segments
198 .get_unchecked(self.active_segment_index)
199 };
200 if self.write_ptr > segment_base {
201 self.len -= 1;
202 unsafe {
203 self.write_ptr = self.write_ptr.sub(1);
205 Some(std::ptr::read(self.write_ptr))
207 }
208 } else {
209 self.pop_slow_path()
211 }
212 }
213
214 #[cold]
215 #[inline(never)]
216 fn pop_slow_path(&mut self) -> Option<T> {
217 self.active_segment_index -= 1;
220 let idx = self.active_segment_index;
221
222 let base = unsafe { *self.dynamic_segments.get_unchecked(idx) };
223 let capacity = Self::MIN_NON_ZERO_CAP << idx;
224
225 self.segment_end = unsafe { base.add(capacity) };
226 self.write_ptr = unsafe { self.segment_end.sub(1) };
227 self.len -= 1;
228 Some(unsafe { std::ptr::read(self.write_ptr) })
229 }
230
231 #[inline]
235 pub fn get(&self, index: usize) -> Option<&T> {
236 if index < self.len {
237 Some(unsafe { self.unchecked_at(index) })
238 } else {
239 None
240 }
241 }
242
243 #[inline]
247 pub fn get_mut(&mut self, index: usize) -> Option<&mut T> {
248 if index < self.len {
249 Some(unsafe { self.unchecked_at_mut(index) })
250 } else {
251 None
252 }
253 }
254
255 #[inline]
257 pub fn first(&self) -> Option<&T> {
258 if self.len == 0 {
259 None
260 } else {
261 Some(unsafe { &*self.dynamic_segments[0] })
263 }
264 }
265
266 #[inline]
268 pub fn first_mut(&mut self) -> Option<&mut T> {
269 if self.len == 0 {
270 None
271 } else {
272 Some(unsafe { &mut *self.dynamic_segments[0] })
274 }
275 }
276
277 #[inline]
279 pub fn last(&self) -> Option<&T> {
280 if self.len == 0 {
281 return None;
282 }
283
284 let segment_base = unsafe {
287 *self
288 .dynamic_segments
289 .get_unchecked(self.active_segment_index)
290 };
291
292 if self.write_ptr > segment_base {
293 Some(unsafe { &*self.write_ptr.sub(1) })
295 } else {
296 self.get(self.len - 1)
298 }
299 }
300
301 #[inline]
303 pub fn last_mut(&mut self) -> Option<&mut T> {
304 if self.len == 0 {
305 return None;
306 }
307
308 let segment_base = unsafe {
311 *self
312 .dynamic_segments
313 .get_unchecked(self.active_segment_index)
314 };
315
316 if self.write_ptr > segment_base {
317 Some(unsafe { &mut *self.write_ptr.sub(1) })
319 } else {
320 self.get_mut(self.len - 1)
322 }
323 }
324
325 #[inline]
327 pub fn contains(&self, x: &T) -> bool
328 where
329 T: PartialEq,
330 {
331 self.as_slice().contains(x)
332 }
333
334 pub fn starts_with(&self, needle: &[T]) -> bool
336 where
337 T: PartialEq,
338 {
339 self.as_slice().starts_with(needle)
340 }
341
342 pub fn ends_with(&self, needle: &[T]) -> bool
344 where
345 T: PartialEq,
346 {
347 self.as_slice().ends_with(needle)
348 }
349
350 pub fn binary_search(&self, x: &T) -> Result<usize, usize>
355 where
356 T: Ord,
357 {
358 self.as_slice().binary_search(x)
359 }
360
361 pub fn binary_search_by<F>(&self, f: F) -> Result<usize, usize>
363 where
364 F: FnMut(&T) -> Ordering,
365 {
366 self.as_slice().binary_search_by(f)
367 }
368
369 pub fn binary_search_by_key<B, F>(&self, b: &B, f: F) -> Result<usize, usize>
371 where
372 F: FnMut(&T) -> B,
373 B: Ord,
374 {
375 self.as_slice().binary_search_by_key(b, f)
376 }
377
378 #[inline]
384 pub fn swap(&mut self, a: usize, b: usize) {
385 self.as_mut_slice().swap(a, b)
386 }
387
388 pub fn reverse(&mut self) {
390 self.as_mut_slice().reverse()
391 }
392
393 pub fn fill(&mut self, value: T)
395 where
396 T: Clone,
397 {
398 self.as_mut_slice().fill(value)
399 }
400
401 pub fn fill_with<F>(&mut self, f: F)
403 where
404 F: FnMut() -> T,
405 {
406 self.as_mut_slice().fill_with(f)
407 }
408
409 pub fn clear(&mut self) {
413 if self.len == 0 {
414 return;
415 }
416
417 if std::mem::needs_drop::<T>() {
418 let mut remaining = self.len;
420 for shelf in 0..self.segment_count {
421 let size = Self::shelf_size(shelf as u32);
422 let count = remaining.min(size);
423 let ptr = unsafe { *self.dynamic_segments.get_unchecked(shelf) };
424 unsafe { std::ptr::drop_in_place(std::ptr::slice_from_raw_parts_mut(ptr, count)) };
425 remaining -= count;
426 if remaining == 0 {
427 break;
428 }
429 }
430 }
431
432 self.len = 0;
433 self.write_ptr = std::ptr::null_mut();
435 self.segment_end = std::ptr::null_mut();
436 self.active_segment_index = usize::MAX;
437 }
438
439 pub fn truncate(&mut self, new_len: usize) {
443 if new_len >= self.len {
444 return;
445 }
446
447 if std::mem::needs_drop::<T>() {
448 let dynamic_new_len = new_len;
449 let dynamic_old_len = self.len;
450
451 if dynamic_new_len < dynamic_old_len {
452 let mut pos = 0usize;
453 for shelf in 0..self.segment_count {
454 let size = Self::shelf_size(shelf as u32);
455 let seg_end = pos + size;
456
457 let drop_start = dynamic_new_len.max(pos);
459 let drop_end = dynamic_old_len.min(seg_end);
460
461 if drop_start < drop_end {
462 let offset = drop_start - pos;
463 let count = drop_end - drop_start;
464 let ptr =
465 unsafe { (*self.dynamic_segments.get_unchecked(shelf)).add(offset) };
466 unsafe {
467 std::ptr::drop_in_place(std::ptr::slice_from_raw_parts_mut(ptr, count))
468 };
469 }
470
471 pos = seg_end;
472 if pos >= dynamic_old_len {
473 break;
474 }
475 }
476 }
477 }
478
479 self.len = new_len;
480 if new_len > 0 {
482 let biased = new_len + Self::MIN_NON_ZERO_CAP;
483 let msb = biased.ilog2();
484 let idx = (msb - Self::MIN_CAP_EXP) as usize;
485 self.active_segment_index = idx;
486
487 let capacity = 1usize << msb;
488 let offset = biased ^ capacity;
489 unsafe {
490 let base = *self.dynamic_segments.get_unchecked(idx);
491 self.write_ptr = base.add(offset);
492 self.segment_end = base.add(capacity);
493 }
494 } else {
495 self.write_ptr = std::ptr::null_mut();
497 self.segment_end = std::ptr::null_mut();
498 self.active_segment_index = usize::MAX;
500 }
501 }
502
503 pub fn reserve(&mut self, additional: usize) {
505 self.grow_capacity(self.len + additional);
506 if self.write_ptr.is_null() {
508 unsafe {
509 let base = *self.dynamic_segments.get_unchecked(0);
511 self.write_ptr = base;
512 self.segment_end = base.add(Self::MIN_NON_ZERO_CAP);
513
514 self.active_segment_index = 0;
519 }
520 }
521 }
522
523 pub fn shrink_to_fit(&mut self) {
527 self.shrink_capacity(self.len);
528 }
529
530 #[inline]
532 pub fn iter(&self) -> Iter<'_, T> {
533 Iter {
535 vec: self,
536 ptr: std::ptr::null(),
537 segment_end: std::ptr::null(),
538 index: 0,
539 shelf_index: 0,
540 }
541 }
542
543 #[inline]
545 pub fn iter_mut(&mut self) -> IterMut<'_, T> {
546 IterMut {
548 vec: self,
549 ptr: std::ptr::null_mut(),
550 segment_end: std::ptr::null_mut(),
551 index: 0,
552 shelf_index: 0,
553 }
554 }
555
556 #[inline]
571 pub fn as_slice(&self) -> SegmentedSlice<'_, T> {
572 SegmentedSlice::new(self)
573 }
574
575 #[inline]
590 pub fn as_mut_slice(&mut self) -> SegmentedSliceMut<'_, T> {
591 SegmentedSliceMut::new(self)
592 }
593
594 #[inline]
613 pub fn slice(&self, range: std::ops::Range<usize>) -> SegmentedSlice<'_, T> {
614 assert!(range.start <= range.end && range.end <= self.len);
615 SegmentedSlice::from_range(self, range.start, range.end)
616 }
617
618 #[inline]
637 pub fn slice_mut(&mut self, range: std::ops::Range<usize>) -> SegmentedSliceMut<'_, T> {
638 assert!(range.start <= range.end && range.end <= self.len);
639 SegmentedSliceMut::from_range(self, range.start, range.end)
640 }
641
642 pub fn extend_from_slice(&mut self, other: &[T])
644 where
645 T: Clone,
646 {
647 if other.is_empty() {
648 return;
649 }
650 self.reserve(other.len());
651
652 let mut src = other;
653
654 while !src.is_empty() {
656 let (shelf, box_idx, remaining) = Self::location_with_capacity(self.len);
657 let to_copy = src.len().min(remaining);
658 let dest = unsafe { (*self.dynamic_segments.get_unchecked(shelf)).add(box_idx) };
659 for (i, item) in src.iter().take(to_copy).enumerate() {
660 unsafe { std::ptr::write(dest.add(i), item.clone()) };
661 }
662 self.len += to_copy;
663 src = &src[to_copy..];
664 }
665
666 if self.len > 0 {
668 let biased = self.len + Self::MIN_NON_ZERO_CAP;
669 let msb = biased.ilog2();
670 let idx = (msb - Self::MIN_CAP_EXP) as usize;
671 self.active_segment_index = idx;
672
673 let capacity = 1usize << msb;
674 let offset = biased ^ capacity;
675 unsafe {
676 let base = *self.dynamic_segments.get_unchecked(idx);
677 self.write_ptr = base.add(offset);
678 self.segment_end = base.add(capacity);
679 }
680 }
681 }
682
683 pub fn extend_from_copy_slice(&mut self, other: &[T])
688 where
689 T: Copy,
690 {
691 if other.is_empty() {
692 return;
693 }
694 self.reserve(other.len());
695
696 let mut src = other;
697
698 while !src.is_empty() {
700 let (shelf, box_idx, remaining) = Self::location_with_capacity(self.len);
701 let to_copy = src.len().min(remaining);
702 unsafe {
703 let dest = (*self.dynamic_segments.get_unchecked(shelf)).add(box_idx);
704 std::ptr::copy_nonoverlapping(src.as_ptr(), dest, to_copy);
705 };
706 self.len += to_copy;
707 src = &src[to_copy..];
708 }
709
710 if self.len > 0 {
712 let biased = self.len + Self::MIN_NON_ZERO_CAP;
713 let msb = biased.ilog2();
714 let idx = (msb - Self::MIN_CAP_EXP) as usize;
715 self.active_segment_index = idx;
716
717 let capacity = 1usize << msb;
718 let offset = biased ^ capacity;
719 unsafe {
720 let base = *self.dynamic_segments.get_unchecked(idx);
721 self.write_ptr = base.add(offset);
722 self.segment_end = base.add(capacity);
723 }
724 }
725 }
726
727 #[inline]
744 pub fn sort(&mut self)
745 where
746 T: Ord,
747 {
748 self.as_mut_slice().sort();
749 }
750
751 pub fn sort_by<F>(&mut self, compare: F)
768 where
769 F: FnMut(&T, &T) -> Ordering,
770 {
771 self.as_mut_slice().sort_by(compare);
772 }
773
774 #[inline]
790 pub fn sort_by_key<K, F>(&mut self, f: F)
791 where
792 F: FnMut(&T) -> K,
793 K: Ord,
794 {
795 self.as_mut_slice().sort_by_key(f);
796 }
797
798 #[inline]
815 pub fn sort_unstable(&mut self)
816 where
817 T: Ord,
818 {
819 self.as_mut_slice().sort_unstable();
820 }
821
822 pub fn sort_unstable_by<F>(&mut self, compare: F)
839 where
840 F: FnMut(&T, &T) -> Ordering,
841 {
842 self.as_mut_slice().sort_unstable_by(compare);
843 }
844
845 #[inline]
865 pub fn sort_unstable_by_key<K, F>(&mut self, f: F)
866 where
867 F: FnMut(&T) -> K,
868 K: Ord,
869 {
870 self.as_mut_slice().sort_unstable_by_key(f);
871 }
872
873 pub fn is_sorted(&self) -> bool
875 where
876 T: PartialOrd,
877 {
878 self.as_slice().is_sorted()
879 }
880
881 pub fn is_sorted_by<F>(&self, compare: F) -> bool
883 where
884 F: FnMut(&T, &T) -> bool,
885 {
886 self.as_slice().is_sorted_by(compare)
887 }
888
889 pub fn is_sorted_by_key<K, F>(&self, f: F) -> bool
891 where
892 F: FnMut(&T) -> K,
893 K: PartialOrd,
894 {
895 self.as_slice().is_sorted_by_key(f)
896 }
897
898 pub fn partition_point<P>(&self, pred: P) -> usize
900 where
901 P: FnMut(&T) -> bool,
902 {
903 self.as_slice().partition_point(pred)
904 }
905
906 pub fn rotate_left(&mut self, mid: usize) {
908 self.as_mut_slice().rotate_left(mid);
909 }
910
911 pub fn rotate_right(&mut self, k: usize) {
913 self.as_mut_slice().rotate_right(k);
914 }
915
916 pub fn with_capacity(capacity: usize) -> Self {
918 let mut vec = Self::new();
919 vec.reserve(capacity);
920 vec
921 }
922
923 pub fn insert(&mut self, index: usize, element: T) {
929 assert!(index <= self.len);
930 self.push(element);
931 if index < self.len - 1 {
933 for i in (index..self.len - 1).rev() {
934 unsafe {
935 let ptr_a = self.unchecked_at_mut(i) as *mut T;
936 let ptr_b = self.unchecked_at_mut(i + 1) as *mut T;
937 std::ptr::swap(ptr_a, ptr_b);
938 }
939 }
940 }
941 }
942
943 pub fn remove(&mut self, index: usize) -> T {
949 assert!(index < self.len);
950 for i in index..self.len - 1 {
952 unsafe {
953 let ptr_a = self.unchecked_at_mut(i) as *mut T;
954 let ptr_b = self.unchecked_at_mut(i + 1) as *mut T;
955 std::ptr::swap(ptr_a, ptr_b);
956 }
957 }
958 self.pop().unwrap()
959 }
960
961 pub fn swap_remove(&mut self, index: usize) -> T {
970 assert!(index < self.len);
971 let last_index = self.len - 1;
972 if index != last_index {
973 unsafe {
974 let ptr_a = self.unchecked_at_mut(index) as *mut T;
975 let ptr_b = self.unchecked_at_mut(last_index) as *mut T;
979 std::ptr::swap(ptr_a, ptr_b);
980 }
981 }
982 self.pop().unwrap()
983 }
984
985 pub fn retain<F>(&mut self, mut f: F)
989 where
990 F: FnMut(&T) -> bool,
991 {
992 let mut i = 0;
993 while i < self.len {
994 if f(unsafe { self.unchecked_at(i) }) {
995 i += 1;
996 } else {
997 self.remove(i);
998 }
999 }
1000 }
1001
1002 pub fn retain_mut<F>(&mut self, mut f: F)
1004 where
1005 F: FnMut(&mut T) -> bool,
1006 {
1007 let mut i = 0;
1008 while i < self.len {
1009 if f(unsafe { self.unchecked_at_mut(i) }) {
1010 i += 1;
1011 } else {
1012 self.remove(i);
1013 }
1014 }
1015 }
1016
1017 pub fn dedup(&mut self)
1021 where
1022 T: PartialEq,
1023 {
1024 self.dedup_by(|a, b| a == b);
1025 }
1026
1027 pub fn dedup_by<F>(&mut self, mut same_bucket: F)
1029 where
1030 F: FnMut(&mut T, &mut T) -> bool,
1031 {
1032 if self.len <= 1 {
1033 return;
1034 }
1035 let mut write = 1;
1036 for read in 1..self.len {
1037 let should_keep = unsafe {
1038 let prev_ptr = self.unchecked_at_mut(write - 1) as *mut T;
1039 let curr_ptr = self.unchecked_at_mut(read) as *mut T;
1040 !same_bucket(&mut *prev_ptr, &mut *curr_ptr)
1041 };
1042 if should_keep {
1043 if read != write {
1044 unsafe {
1045 let ptr_src = self.unchecked_at_mut(read) as *mut T;
1046 let ptr_dst = self.unchecked_at_mut(write) as *mut T;
1047 std::ptr::swap(ptr_dst, ptr_src);
1048 }
1049 }
1050 write += 1;
1051 } else {
1052 unsafe {
1054 std::ptr::drop_in_place(self.unchecked_at_mut(read));
1055 }
1056 }
1057 }
1058 self.len = write;
1059 }
1060
1061 pub fn dedup_by_key<K, F>(&mut self, mut key: F)
1063 where
1064 F: FnMut(&mut T) -> K,
1065 K: PartialEq,
1066 {
1067 self.dedup_by(|a, b| key(a) == key(b));
1068 }
1069
1070 pub fn resize(&mut self, new_len: usize, value: T)
1076 where
1077 T: Clone,
1078 {
1079 if new_len > self.len {
1080 self.reserve(new_len - self.len);
1081 while self.len < new_len {
1082 self.push(value.clone());
1083 }
1084 } else {
1085 self.truncate(new_len);
1086 }
1087 }
1088
1089 pub fn resize_with<F>(&mut self, new_len: usize, mut f: F)
1094 where
1095 F: FnMut() -> T,
1096 {
1097 if new_len > self.len {
1098 self.reserve(new_len - self.len);
1099 while self.len < new_len {
1100 self.push(f());
1101 }
1102 } else {
1103 self.truncate(new_len);
1104 }
1105 }
1106
1107 pub fn append(&mut self, other: &mut Self) {
1109 let other_len = other.len;
1110 self.reserve(other_len);
1111 let start = self.len;
1112 while let Some(item) = other.pop() {
1113 self.push(item);
1114 }
1115 let mut left = start;
1117 let mut right = self.len;
1118 while left < right {
1119 right -= 1;
1120 if left < right {
1121 unsafe {
1122 let ptr_a = self.unchecked_at_mut(left) as *mut T;
1123 let ptr_b = self.unchecked_at_mut(right) as *mut T;
1124 std::ptr::swap(ptr_a, ptr_b);
1125 }
1126 left += 1;
1127 }
1128 }
1129 }
1130
1131 pub fn split_off(&mut self, at: usize) -> Self {
1140 assert!(at <= self.len);
1141 let mut other = Self::new();
1142 other.reserve(self.len - at);
1143 for i in at..self.len {
1144 other.push(unsafe { self.unchecked_read(i) });
1145 }
1146 self.len = at;
1147 other
1148 }
1149
1150 pub fn chunks(&self, chunk_size: usize) -> Chunks<'_, T> {
1156 self.as_slice().chunks(chunk_size)
1157 }
1158
1159 pub fn windows(&self, size: usize) -> Windows<'_, T> {
1165 self.as_slice().windows(size)
1166 }
1167
1168 pub fn rchunks(&self, chunk_size: usize) -> RChunks<'_, T> {
1174 self.as_slice().rchunks(chunk_size)
1175 }
1176
1177 pub fn chunks_exact(&self, chunk_size: usize) -> ChunksExact<'_, T> {
1183 self.as_slice().chunks_exact(chunk_size)
1184 }
1185
1186 pub fn drain(&mut self, range: std::ops::Range<usize>) -> Drain<'_, T> {
1192 assert!(range.start <= range.end && range.end <= self.len);
1193 Drain {
1194 vec: self,
1195 range_start: range.start,
1196 range_end: range.end,
1197 index: range.start,
1198 }
1199 }
1200
1201 pub fn to_vec(&self) -> Vec<T>
1203 where
1204 T: Clone,
1205 {
1206 self.as_slice().to_vec()
1207 }
1208
1209 #[inline]
1213 fn shelf_count(box_count: usize) -> u32 {
1214 if box_count == 0 {
1215 return 0;
1216 }
1217 let val = box_count + Self::MIN_NON_ZERO_CAP;
1218 val.next_power_of_two().trailing_zeros() - Self::MIN_CAP_EXP
1219 }
1220
1221 #[inline]
1223 fn shelf_size(shelf_index: u32) -> usize {
1224 Self::MIN_NON_ZERO_CAP << shelf_index
1225 }
1226
1227 #[inline]
1230 fn location(list_index: usize) -> (usize, usize) {
1231 let biased = list_index + Self::MIN_NON_ZERO_CAP;
1232 let msb = biased.ilog2();
1233 let shelf = msb - Self::MIN_CAP_EXP;
1234 let box_idx = biased ^ (1usize << msb);
1236 (shelf as usize, box_idx)
1237 }
1238
1239 #[inline]
1242 fn location_with_capacity(list_index: usize) -> (usize, usize, usize) {
1243 let biased = list_index + Self::MIN_NON_ZERO_CAP;
1244 let msb = biased.ilog2();
1245 let shelf = msb - Self::MIN_CAP_EXP;
1246 let box_idx = biased ^ (1usize << msb);
1247 let segment_remaining = (2usize << msb) - biased;
1251 (shelf as usize, box_idx, segment_remaining)
1252 }
1253
1254 #[inline]
1260 pub(crate) unsafe fn unchecked_at(&self, index: usize) -> &T {
1261 unsafe {
1262 let (shelf, box_idx) = Self::location(index);
1263 &*(*self.dynamic_segments.get_unchecked(shelf)).add(box_idx)
1264 }
1265 }
1266
1267 #[inline]
1273 pub(crate) unsafe fn unchecked_at_mut(&mut self, index: usize) -> &mut T {
1274 unsafe {
1275 let (shelf, box_idx) = Self::location(index);
1276 &mut *(*self.dynamic_segments.get_unchecked(shelf)).add(box_idx)
1277 }
1278 }
1279
1280 #[inline]
1286 unsafe fn unchecked_read(&self, index: usize) -> T {
1287 unsafe {
1288 let (shelf, box_idx) = Self::location(index);
1289 std::ptr::read((*self.dynamic_segments.get_unchecked(shelf)).add(box_idx))
1290 }
1291 }
1292
1293 fn grow_once(&mut self) {
1294 assert!(
1295 self.segment_count < MAX_SEGMENTS,
1296 "Maximum segment count exceeded"
1297 );
1298
1299 let size = Self::shelf_size(self.segment_count as u32);
1300 let layout = Layout::array::<T>(size).expect("Layout overflow");
1301 let ptr = if layout.size() == 0 {
1302 std::ptr::dangling_mut::<T>()
1303 } else {
1304 let ptr = unsafe { alloc::alloc(layout) };
1305 if ptr.is_null() {
1306 panic!("Allocation failed");
1307 }
1308 ptr as *mut T
1309 };
1310 self.dynamic_segments[self.segment_count] = ptr;
1311 self.segment_count += 1;
1312 }
1313
1314 fn grow_capacity(&mut self, new_capacity: usize) {
1316 let new_shelf_count = Self::shelf_count(new_capacity) as usize;
1317 let old_shelf_count = self.segment_count;
1318
1319 if new_shelf_count > old_shelf_count {
1320 assert!(
1321 new_shelf_count <= MAX_SEGMENTS,
1322 "Maximum segment count exceeded"
1323 );
1324
1325 for i in old_shelf_count..new_shelf_count {
1326 let size = Self::shelf_size(i as u32);
1327 let layout = Layout::array::<T>(size).expect("Layout overflow");
1328 let ptr = if layout.size() == 0 {
1329 std::ptr::dangling_mut::<T>()
1330 } else {
1331 let ptr = unsafe { alloc::alloc(layout) };
1332 if ptr.is_null() {
1333 panic!("Allocation failed");
1334 }
1335 ptr as *mut T
1336 };
1337 self.dynamic_segments[i] = ptr;
1338 }
1339 self.segment_count = new_shelf_count;
1340 }
1341 }
1342
1343 #[inline]
1345 fn compute_capacity(shelf_count: u32) -> usize {
1346 (Self::MIN_NON_ZERO_CAP << shelf_count) - Self::MIN_NON_ZERO_CAP
1347 }
1348
1349 fn shrink_capacity(&mut self, new_capacity: usize) {
1351 let new_shelf_count = Self::shelf_count(new_capacity);
1352 let old_shelf_count = self.segment_count as u32;
1353
1354 if new_shelf_count < old_shelf_count {
1355 self.free_shelves(old_shelf_count, new_shelf_count);
1356 self.segment_count = new_shelf_count as usize;
1357 }
1358 }
1359
1360 fn free_shelves(&mut self, from_count: u32, to_count: u32) {
1362 for i in (to_count..from_count).rev() {
1363 let size = Self::shelf_size(i);
1364 let layout = Layout::array::<T>(size).expect("Layout overflow");
1365 if layout.size() > 0 {
1366 unsafe {
1367 alloc::dealloc(self.dynamic_segments[i as usize] as *mut u8, layout);
1368 }
1369 }
1370 self.dynamic_segments[i as usize] = std::ptr::null_mut();
1371 }
1372 }
1373}
1374
1375impl<T> Drop for SegmentedVec<T> {
1376 fn drop(&mut self) {
1377 self.clear();
1379 self.free_shelves(self.segment_count as u32, 0);
1381 }
1382}
1383
1384impl<T> sort::IndexedAccess<T> for SegmentedVec<T> {
1385 #[inline]
1386 fn get_ref(&self, index: usize) -> &T {
1387 debug_assert!(index < self.len);
1388 unsafe { self.unchecked_at(index) }
1389 }
1390
1391 #[inline]
1392 fn get_ptr(&self, index: usize) -> *const T {
1393 debug_assert!(index < self.len);
1394 let (shelf, box_idx) = Self::location(index);
1395 unsafe { (*self.dynamic_segments.get_unchecked(shelf)).add(box_idx) }
1396 }
1397
1398 #[inline]
1399 fn get_ptr_mut(&mut self, index: usize) -> *mut T {
1400 debug_assert!(index < self.len);
1401 let (shelf, box_idx) = Self::location(index);
1402 unsafe { (*self.dynamic_segments.get_unchecked(shelf)).add(box_idx) }
1403 }
1404
1405 #[inline]
1406 fn swap(&mut self, a: usize, b: usize) {
1407 if a == b {
1408 return;
1409 }
1410 debug_assert!(a < self.len && b < self.len);
1411 unsafe {
1412 let ptr_a = self.get_ptr_mut(a);
1413 let ptr_b = self.get_ptr_mut(b);
1414 std::ptr::swap(ptr_a, ptr_b);
1415 }
1416 }
1417}
1418
1419impl<T> Default for SegmentedVec<T> {
1420 fn default() -> Self {
1421 Self::new()
1422 }
1423}
1424
1425impl<T> Index<usize> for SegmentedVec<T> {
1426 type Output = T;
1427
1428 fn index(&self, index: usize) -> &Self::Output {
1429 self.get(index).expect("index out of bounds")
1430 }
1431}
1432
1433impl<T> IndexMut<usize> for SegmentedVec<T> {
1434 fn index_mut(&mut self, index: usize) -> &mut Self::Output {
1435 self.get_mut(index).expect("index out of bounds")
1436 }
1437}
1438
1439impl<T: Clone> Clone for SegmentedVec<T> {
1440 fn clone(&self) -> Self {
1441 if self.len == 0 {
1442 return Self::new();
1443 }
1444
1445 let mut new_vec = Self::new();
1446 new_vec.reserve(self.len);
1447
1448 let mut remaining = self.len;
1449 for shelf in 0..self.segment_count {
1450 let size = Self::shelf_size(shelf as u32);
1451 let count = remaining.min(size);
1452 let src = unsafe { *self.dynamic_segments.get_unchecked(shelf) };
1453 let dst = unsafe { *new_vec.dynamic_segments.get_unchecked(shelf) };
1454 for i in 0..count {
1455 unsafe { std::ptr::write(dst.add(i), (*src.add(i)).clone()) };
1456 }
1457 new_vec.len += count;
1458 remaining -= count;
1459 if remaining == 0 {
1460 break;
1461 }
1462 }
1463
1464 if new_vec.len > 0 {
1466 let biased = new_vec.len + Self::MIN_NON_ZERO_CAP;
1467 let msb = biased.ilog2();
1468 let idx = (msb - Self::MIN_CAP_EXP) as usize;
1469 new_vec.active_segment_index = idx;
1470
1471 let capacity = 1usize << msb;
1472 let offset = biased ^ capacity;
1473 unsafe {
1474 let base = *new_vec.dynamic_segments.get_unchecked(idx);
1475 new_vec.write_ptr = base.add(offset);
1476 new_vec.segment_end = base.add(capacity);
1477 }
1478 }
1479
1480 new_vec
1481 }
1482}
1483
1484impl<T: std::fmt::Debug> std::fmt::Debug for SegmentedVec<T> {
1485 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
1486 f.debug_list().entries(self.iter()).finish()
1487 }
1488}
1489
1490impl<T: PartialEq> PartialEq for SegmentedVec<T> {
1491 fn eq(&self, other: &Self) -> bool {
1492 if self.len != other.len {
1493 return false;
1494 }
1495 for i in 0..self.len {
1496 if unsafe { self.unchecked_at(i) } != unsafe { other.unchecked_at(i) } {
1497 return false;
1498 }
1499 }
1500 true
1501 }
1502}
1503
1504impl<T: Eq> Eq for SegmentedVec<T> {}
1505
1506impl<T> FromIterator<T> for SegmentedVec<T> {
1507 fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> Self {
1508 let iter = iter.into_iter();
1509 let (lower, _) = iter.size_hint();
1510 let mut vec = Self::new();
1511 vec.reserve(lower);
1512 for item in iter {
1513 vec.push(item);
1514 }
1515 vec
1516 }
1517}
1518
1519impl<T> Extend<T> for SegmentedVec<T> {
1520 fn extend<I: IntoIterator<Item = T>>(&mut self, iter: I) {
1521 let iter = iter.into_iter();
1522 let (lower, _) = iter.size_hint();
1523 self.reserve(lower);
1524 for item in iter {
1525 self.push(item);
1526 }
1527 }
1528}
1529
1530pub struct Iter<'a, T> {
1534 vec: &'a SegmentedVec<T>,
1535 ptr: *const T,
1537 segment_end: *const T,
1539 index: usize,
1541 shelf_index: u32,
1543}
1544
1545impl<'a, T> Iterator for Iter<'a, T> {
1546 type Item = &'a T;
1547
1548 #[inline]
1549 fn next(&mut self) -> Option<Self::Item> {
1550 if self.ptr < self.segment_end {
1551 let result = unsafe { &*self.ptr };
1552 self.ptr = unsafe { self.ptr.add(1) };
1553 self.index += 1;
1554 return Some(result);
1555 }
1556 self.next_segment()
1557 }
1558
1559 #[inline]
1560 fn size_hint(&self) -> (usize, Option<usize>) {
1561 let remaining = self.vec.len.saturating_sub(self.index);
1562 (remaining, Some(remaining))
1563 }
1564}
1565
1566impl<'a, T> Iter<'a, T> {
1567 #[inline]
1568 fn next_segment(&mut self) -> Option<&'a T> {
1569 if self.index >= self.vec.len {
1570 return None;
1571 }
1572
1573 let shelf_idx = self.shelf_index as usize;
1574 let shelf_size = SegmentedVec::<T>::shelf_size(self.shelf_index);
1575 let ptr = self.vec.dynamic_segments[shelf_idx];
1576 let segment_len = shelf_size.min(self.vec.len - self.index);
1577 self.ptr = ptr;
1578 self.segment_end = unsafe { ptr.add(segment_len) };
1579 self.shelf_index += 1;
1580
1581 let result = unsafe { &*self.ptr };
1582 self.ptr = unsafe { self.ptr.add(1) };
1583 self.index += 1;
1584 Some(result)
1585 }
1586}
1587
1588impl<'a, T> ExactSizeIterator for Iter<'a, T> {}
1589
1590pub struct IterMut<'a, T> {
1592 vec: &'a mut SegmentedVec<T>,
1593 ptr: *mut T,
1595 segment_end: *mut T,
1597 index: usize,
1599 shelf_index: u32,
1601}
1602
1603impl<'a, T> Iterator for IterMut<'a, T> {
1604 type Item = &'a mut T;
1605
1606 #[inline]
1607 fn next(&mut self) -> Option<Self::Item> {
1608 if self.ptr < self.segment_end {
1609 let result = self.ptr;
1610 self.ptr = unsafe { self.ptr.add(1) };
1611 self.index += 1;
1612 return Some(unsafe { &mut *result });
1613 }
1614 self.next_segment()
1615 }
1616
1617 #[inline]
1618 fn size_hint(&self) -> (usize, Option<usize>) {
1619 let remaining = self.vec.len.saturating_sub(self.index);
1620 (remaining, Some(remaining))
1621 }
1622}
1623
1624impl<'a, T> IterMut<'a, T> {
1625 #[inline]
1626 fn next_segment(&mut self) -> Option<&'a mut T> {
1627 if self.index >= self.vec.len {
1628 return None;
1629 }
1630
1631 let shelf_idx = self.shelf_index as usize;
1632 let shelf_size = SegmentedVec::<T>::shelf_size(self.shelf_index);
1633 let ptr = self.vec.dynamic_segments[shelf_idx];
1634 let segment_len = shelf_size.min(self.vec.len - self.index);
1635 self.ptr = ptr;
1636 self.segment_end = unsafe { ptr.add(segment_len) };
1637 self.shelf_index += 1;
1638
1639 let result = self.ptr;
1640 self.ptr = unsafe { self.ptr.add(1) };
1641 self.index += 1;
1642 Some(unsafe { &mut *result })
1643 }
1644}
1645
1646impl<'a, T> ExactSizeIterator for IterMut<'a, T> {}
1647
1648impl<T> IntoIterator for SegmentedVec<T> {
1649 type Item = T;
1650 type IntoIter = IntoIter<T>;
1651
1652 fn into_iter(self) -> Self::IntoIter {
1653 IntoIter {
1654 vec: self,
1655 index: 0,
1656 }
1657 }
1658}
1659
1660impl<'a, T> IntoIterator for &'a SegmentedVec<T> {
1661 type Item = &'a T;
1662 type IntoIter = Iter<'a, T>;
1663
1664 fn into_iter(self) -> Self::IntoIter {
1665 self.iter()
1666 }
1667}
1668
1669impl<'a, T> IntoIterator for &'a mut SegmentedVec<T> {
1670 type Item = &'a mut T;
1671 type IntoIter = IterMut<'a, T>;
1672
1673 fn into_iter(self) -> Self::IntoIter {
1674 self.iter_mut()
1675 }
1676}
1677
1678pub struct IntoIter<T> {
1680 vec: SegmentedVec<T>,
1681 index: usize,
1682}
1683
1684impl<T> Iterator for IntoIter<T> {
1685 type Item = T;
1686
1687 fn next(&mut self) -> Option<Self::Item> {
1688 if self.index >= self.vec.len {
1689 return None;
1690 }
1691 let value = unsafe { self.vec.unchecked_read(self.index) };
1692 self.index += 1;
1693 Some(value)
1694 }
1695
1696 fn size_hint(&self) -> (usize, Option<usize>) {
1697 let remaining = self.vec.len - self.index;
1698 (remaining, Some(remaining))
1699 }
1700}
1701
1702impl<T> ExactSizeIterator for IntoIter<T> {}
1703
1704impl<T> Drop for IntoIter<T> {
1705 fn drop(&mut self) {
1706 for i in self.index..self.vec.len {
1708 unsafe {
1709 std::ptr::drop_in_place(self.vec.unchecked_at_mut(i));
1710 }
1711 }
1712 self.vec.len = 0;
1714 }
1715}
1716
1717pub struct Drain<'a, T> {
1721 vec: &'a mut SegmentedVec<T>,
1722 range_start: usize,
1723 range_end: usize,
1724 index: usize,
1725}
1726
1727impl<'a, T> Iterator for Drain<'a, T> {
1728 type Item = T;
1729
1730 fn next(&mut self) -> Option<Self::Item> {
1731 if self.index >= self.range_end {
1732 None
1733 } else {
1734 let value = unsafe { std::ptr::read(self.vec.unchecked_at(self.index)) };
1735 self.index += 1;
1736 Some(value)
1737 }
1738 }
1739
1740 fn size_hint(&self) -> (usize, Option<usize>) {
1741 let remaining = self.range_end - self.index;
1742 (remaining, Some(remaining))
1743 }
1744}
1745
1746impl<'a, T> DoubleEndedIterator for Drain<'a, T> {
1747 fn next_back(&mut self) -> Option<Self::Item> {
1748 if self.index >= self.range_end {
1749 None
1750 } else {
1751 self.range_end -= 1;
1752 Some(unsafe { std::ptr::read(self.vec.unchecked_at(self.range_end)) })
1753 }
1754 }
1755}
1756
1757impl<'a, T> ExactSizeIterator for Drain<'a, T> {}
1758
1759impl<'a, T> Drop for Drain<'a, T> {
1760 fn drop(&mut self) {
1761 for i in self.index..self.range_end {
1763 unsafe {
1764 std::ptr::drop_in_place(self.vec.unchecked_at_mut(i));
1765 }
1766 }
1767
1768 let original_range_end = self.range_end;
1770 let original_len = self.vec.len;
1771 let drain_count = original_range_end - self.range_start;
1772
1773 for i in 0..(original_len - original_range_end) {
1774 unsafe {
1775 let src = self.vec.unchecked_at(original_range_end + i) as *const T;
1776 let dst = self.vec.unchecked_at_mut(self.range_start + i) as *mut T;
1777 std::ptr::copy_nonoverlapping(src, dst, 1);
1778 }
1779 }
1780
1781 self.vec.len = original_len - drain_count;
1782 }
1783}
1784
1785#[cfg(test)]
1786mod tests {
1787 use super::*;
1788
1789 #[test]
1790 fn test_new_empty() {
1791 let vec: SegmentedVec<i32> = SegmentedVec::new();
1792 assert!(vec.is_empty());
1793 assert_eq!(vec.len(), 0);
1794 }
1795
1796 #[test]
1797 fn test_push_pop() {
1798 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
1799 vec.push(1);
1800 vec.push(2);
1801 vec.push(3);
1802 assert_eq!(vec.len(), 3);
1803 assert_eq!(vec.pop(), Some(3));
1804 assert_eq!(vec.pop(), Some(2));
1805 assert_eq!(vec.pop(), Some(1));
1806 assert_eq!(vec.pop(), None);
1807 }
1808
1809 #[test]
1810 fn test_get() {
1811 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
1812 vec.push(10);
1813 vec.push(20);
1814 vec.push(30);
1815 assert_eq!(vec.get(0), Some(&10));
1816 assert_eq!(vec.get(1), Some(&20));
1817 assert_eq!(vec.get(2), Some(&30));
1818 assert_eq!(vec.get(3), None);
1819 }
1820
1821 #[test]
1822 fn test_index() {
1823 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
1824 vec.push(10);
1825 vec.push(20);
1826 assert_eq!(vec[0], 10);
1827 assert_eq!(vec[1], 20);
1828 vec[0] = 100;
1829 assert_eq!(vec[0], 100);
1830 }
1831
1832 #[test]
1833 fn test_stable_pointers() {
1834 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
1835 vec.push(1);
1836 let ptr = &vec[0] as *const i32;
1837
1838 for i in 2..1000 {
1840 vec.push(i);
1841 }
1842
1843 assert_eq!(unsafe { *ptr }, 1);
1845 }
1846
1847 #[test]
1848 fn test_iter() {
1849 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
1850 for i in 0..100 {
1851 vec.push(i);
1852 }
1853
1854 let collected: Vec<i32> = vec.iter().copied().collect();
1855 let expected: Vec<i32> = (0..100).collect();
1856 assert_eq!(collected, expected);
1857 }
1858
1859 #[test]
1860 fn test_iter_mut() {
1861 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
1862 for i in 0..10 {
1863 vec.push(i);
1864 }
1865
1866 for item in vec.iter_mut() {
1867 *item *= 2;
1868 }
1869
1870 let collected: Vec<i32> = vec.iter().copied().collect();
1871 let expected: Vec<i32> = (0..10).map(|x| x * 2).collect();
1872 assert_eq!(collected, expected);
1873 }
1874
1875 #[test]
1876 fn test_into_iter() {
1877 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
1878 for i in 0..10 {
1879 vec.push(i);
1880 }
1881
1882 let collected: Vec<i32> = vec.into_iter().collect();
1883 let expected: Vec<i32> = (0..10).collect();
1884 assert_eq!(collected, expected);
1885 }
1886
1887 #[test]
1888 fn test_from_iter() {
1889 let vec: SegmentedVec<i32> = (0..10).collect();
1890 assert_eq!(vec.len(), 10);
1891 for i in 0..10 {
1892 assert_eq!(vec[i], i as i32);
1893 }
1894 }
1895
1896 #[test]
1897 fn test_extend() {
1898 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
1899 vec.extend(0..5);
1900 vec.extend(5..10);
1901 assert_eq!(vec.len(), 10);
1902 for i in 0..10 {
1903 assert_eq!(vec[i], i as i32);
1904 }
1905 }
1906
1907 #[test]
1908 fn test_clear() {
1909 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
1910 vec.extend(0..10);
1911 vec.clear();
1912 assert!(vec.is_empty());
1913 assert_eq!(vec.len(), 0);
1914 }
1915
1916 #[test]
1917 fn test_truncate() {
1918 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
1919 vec.extend(0..10);
1920 vec.truncate(5);
1921 assert_eq!(vec.len(), 5);
1922 for i in 0..5 {
1923 assert_eq!(vec[i], i as i32);
1924 }
1925 }
1926
1927 #[test]
1928 fn test_clone() {
1929 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
1930 vec.extend(0..10);
1931 let vec2 = vec.clone();
1932 assert_eq!(vec, vec2);
1933 }
1934
1935 #[test]
1936 fn test_debug() {
1937 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
1938 vec.extend(0..3);
1939 let debug_str = format!("{:?}", vec);
1940 assert_eq!(debug_str, "[0, 1, 2]");
1941 }
1942
1943 #[test]
1944 fn test_first_last() {
1945 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
1946 assert_eq!(vec.first(), None);
1947 assert_eq!(vec.last(), None);
1948
1949 vec.push(1);
1950 assert_eq!(vec.first(), Some(&1));
1951 assert_eq!(vec.last(), Some(&1));
1952
1953 vec.push(2);
1954 vec.push(3);
1955 assert_eq!(vec.first(), Some(&1));
1956 assert_eq!(vec.last(), Some(&3));
1957 }
1958
1959 #[test]
1960 fn test_drop_elements() {
1961 use std::cell::Cell;
1962 use std::rc::Rc;
1963
1964 let drop_count = Rc::new(Cell::new(0));
1965
1966 struct DropCounter {
1967 counter: Rc<Cell<i32>>,
1968 }
1969
1970 impl Drop for DropCounter {
1971 fn drop(&mut self) {
1972 self.counter.set(self.counter.get() + 1);
1973 }
1974 }
1975
1976 {
1977 let mut vec: SegmentedVec<DropCounter> = SegmentedVec::new();
1978 for _ in 0..10 {
1979 vec.push(DropCounter {
1980 counter: Rc::clone(&drop_count),
1981 });
1982 }
1983 }
1984
1985 assert_eq!(drop_count.get(), 10);
1986 }
1987
1988 #[test]
1989 fn test_shrink_to_fit() {
1990 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
1991 vec.extend(0..100);
1992 vec.truncate(5);
1993 vec.shrink_to_fit();
1994 assert_eq!(vec.len(), 5);
1995 for i in 0..5 {
1996 assert_eq!(vec[i], i as i32);
1997 }
1998 }
1999
2000 #[test]
2001 fn test_sort() {
2002 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2003 vec.extend([5, 2, 8, 1, 9, 3, 7, 4, 6, 0]);
2004 vec.sort();
2005 let result: Vec<i32> = vec.iter().copied().collect();
2006 assert_eq!(result, vec![0, 1, 2, 3, 4, 5, 6, 7, 8, 9]);
2007 }
2008
2009 #[test]
2010 fn test_sort_empty() {
2011 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2012 vec.sort();
2013 assert!(vec.is_empty());
2014 }
2015
2016 #[test]
2017 fn test_sort_single() {
2018 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2019 vec.push(42);
2020 vec.sort();
2021 assert_eq!(vec[0], 42);
2022 }
2023
2024 #[test]
2025 fn test_sort_already_sorted() {
2026 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2027 vec.extend(0..10);
2028 vec.sort();
2029 let result: Vec<i32> = vec.iter().copied().collect();
2030 assert_eq!(result, (0..10).collect::<Vec<_>>());
2031 }
2032
2033 #[test]
2034 fn test_sort_reverse_sorted() {
2035 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2036 vec.extend((0..10).rev());
2037 vec.sort();
2038 let result: Vec<i32> = vec.iter().copied().collect();
2039 assert_eq!(result, (0..10).collect::<Vec<_>>());
2040 }
2041
2042 #[test]
2043 fn test_sort_by() {
2044 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2045 vec.extend([5, 2, 8, 1, 9, 3, 7, 4, 6, 0]);
2046 vec.sort_by(|a, b| b.cmp(a)); let result: Vec<i32> = vec.iter().copied().collect();
2048 assert_eq!(result, vec![9, 8, 7, 6, 5, 4, 3, 2, 1, 0]);
2049 }
2050
2051 #[test]
2052 fn test_sort_by_key() {
2053 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2054 vec.extend([-5, 2, -8, 1, -9, 3, -7, 4, -6, 0]);
2055 vec.sort_by_key(|k| k.abs());
2056 let result: Vec<i32> = vec.iter().copied().collect();
2057 assert_eq!(result, vec![0, 1, 2, 3, 4, -5, -6, -7, -8, -9]);
2058 }
2059
2060 #[test]
2061 fn test_sort_stable() {
2062 #[derive(Debug, Clone, Eq, PartialEq)]
2064 struct Item {
2065 key: i32,
2066 order: usize,
2067 }
2068
2069 let mut vec: SegmentedVec<Item> = SegmentedVec::new();
2070 vec.push(Item { key: 1, order: 0 });
2071 vec.push(Item { key: 2, order: 1 });
2072 vec.push(Item { key: 1, order: 2 });
2073 vec.push(Item { key: 2, order: 3 });
2074 vec.push(Item { key: 1, order: 4 });
2075
2076 vec.sort_by_key(|item| item.key);
2077
2078 assert_eq!(vec[0], Item { key: 1, order: 0 });
2080 assert_eq!(vec[1], Item { key: 1, order: 2 });
2081 assert_eq!(vec[2], Item { key: 1, order: 4 });
2082 assert_eq!(vec[3], Item { key: 2, order: 1 });
2084 assert_eq!(vec[4], Item { key: 2, order: 3 });
2085 }
2086
2087 #[test]
2088 fn test_sort_unstable() {
2089 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2090 vec.extend([5, 2, 8, 1, 9, 3, 7, 4, 6, 0]);
2091 vec.sort_unstable();
2092 let result: Vec<i32> = vec.iter().copied().collect();
2093 assert_eq!(result, vec![0, 1, 2, 3, 4, 5, 6, 7, 8, 9]);
2094 }
2095
2096 #[test]
2097 fn test_sort_unstable_by() {
2098 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2099 vec.extend([5, 2, 8, 1, 9, 3, 7, 4, 6, 0]);
2100 vec.sort_unstable_by(|a, b| b.cmp(a)); let result: Vec<i32> = vec.iter().copied().collect();
2102 assert_eq!(result, vec![9, 8, 7, 6, 5, 4, 3, 2, 1, 0]);
2103 }
2104
2105 #[test]
2106 fn test_sort_unstable_by_key() {
2107 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2108 vec.extend([-5, 2, -8, 1, -9, 3, -7, 4, -6, 0]);
2109 vec.sort_unstable_by_key(|k| k.abs());
2110 let result: Vec<i32> = vec.iter().copied().collect();
2112 for i in 1..result.len() {
2113 assert!(result[i - 1].abs() <= result[i].abs());
2114 }
2115 }
2116
2117 #[test]
2118 fn test_sort_large() {
2119 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2120 vec.extend((0..1000).rev());
2122 vec.sort();
2123 for i in 0..1000 {
2124 assert_eq!(vec[i], i as i32);
2125 }
2126 }
2127
2128 #[test]
2129 fn test_sort_unstable_large() {
2130 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2131 vec.extend((0..1000).rev());
2133 vec.sort_unstable();
2134 for i in 0..1000 {
2135 assert_eq!(vec[i], i as i32);
2136 }
2137 }
2138
2139 #[test]
2140 fn test_sort_pointers_remain_stable_after_sort() {
2141 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2144 vec.extend([5, 2, 8, 1, 9]);
2145
2146 let ptr = &vec[0] as *const i32;
2148
2149 vec.sort();
2150
2151 assert_eq!(unsafe { *ptr }, 1); }
2154
2155 #[test]
2158 fn test_as_slice() {
2159 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2160 vec.extend(0..10);
2161
2162 let slice = vec.as_slice();
2163 assert_eq!(slice.len(), 10);
2164 assert_eq!(slice[0], 0);
2165 assert_eq!(slice[9], 9);
2166 }
2167
2168 #[test]
2169 fn test_as_mut_slice() {
2170 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2171 vec.extend(0..10);
2172
2173 {
2174 let mut slice = vec.as_mut_slice();
2175 slice[0] = 100;
2176 slice[9] = 200;
2177 }
2178
2179 assert_eq!(vec[0], 100);
2180 assert_eq!(vec[9], 200);
2181 }
2182
2183 #[test]
2184 fn test_slice_range() {
2185 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2186 vec.extend(0..10);
2187
2188 let slice = vec.slice(2..5);
2189 assert_eq!(slice.len(), 3);
2190 assert_eq!(slice[0], 2);
2191 assert_eq!(slice[1], 3);
2192 assert_eq!(slice[2], 4);
2193 }
2194
2195 #[test]
2196 fn test_slice_mut_range() {
2197 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2198 vec.extend(0..10);
2199
2200 {
2201 let mut slice = vec.slice_mut(2..5);
2202 slice[0] = 100;
2203 slice[2] = 200;
2204 }
2205
2206 assert_eq!(vec[2], 100);
2207 assert_eq!(vec[4], 200);
2208 }
2209
2210 #[test]
2211 fn test_slice_first_last() {
2212 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2213 vec.extend(0..10);
2214
2215 let slice = vec.as_slice();
2216 assert_eq!(slice.first(), Some(&0));
2217 assert_eq!(slice.last(), Some(&9));
2218 }
2219
2220 #[test]
2221 fn test_slice_iter() {
2222 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2223 vec.extend(0..10);
2224
2225 let slice = vec.as_slice();
2226 let collected: Vec<i32> = slice.iter().copied().collect();
2227 assert_eq!(collected, (0..10).collect::<Vec<_>>());
2228 }
2229
2230 #[test]
2231 fn test_slice_iter_rev() {
2232 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2233 vec.extend(0..10);
2234
2235 let slice = vec.as_slice();
2236 let collected: Vec<i32> = slice.iter().rev().copied().collect();
2237 assert_eq!(collected, (0..10).rev().collect::<Vec<_>>());
2238 }
2239
2240 #[test]
2241 fn test_slice_contains() {
2242 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2243 vec.extend(0..10);
2244
2245 let slice = vec.as_slice();
2246 assert!(slice.contains(&5));
2247 assert!(!slice.contains(&100));
2248 }
2249
2250 #[test]
2251 fn test_slice_binary_search() {
2252 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2253 vec.extend(0..100);
2254
2255 let slice = vec.as_slice();
2256 assert_eq!(slice.binary_search(&50), Ok(50));
2257 assert_eq!(slice.binary_search(&0), Ok(0));
2258 assert_eq!(slice.binary_search(&99), Ok(99));
2259 assert_eq!(slice.binary_search(&100), Err(100));
2260 }
2261
2262 #[test]
2263 fn test_slice_split_at() {
2264 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2265 vec.extend(0..10);
2266
2267 let slice = vec.as_slice();
2268 let (left, right) = slice.split_at(5);
2269 assert_eq!(left.len(), 5);
2270 assert_eq!(right.len(), 5);
2271 assert_eq!(left[0], 0);
2272 assert_eq!(right[0], 5);
2273 }
2274
2275 #[test]
2276 fn test_slice_to_vec() {
2277 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2278 vec.extend(0..10);
2279
2280 let slice = vec.as_slice();
2281 let converted = slice.to_vec();
2282 assert_eq!(converted, (0..10).collect::<Vec<_>>());
2283 }
2284
2285 #[test]
2286 fn test_slice_mut_sort() {
2287 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2288 vec.extend([5, 3, 1, 4, 2, 8, 7, 6, 0, 9]);
2289
2290 {
2292 let mut slice = vec.slice_mut(2..8);
2293 slice.sort();
2294 }
2295
2296 let result: Vec<i32> = vec.iter().copied().collect();
2297 assert_eq!(result, vec![5, 3, 1, 2, 4, 6, 7, 8, 0, 9]);
2298 }
2299
2300 #[test]
2301 fn test_slice_mut_reverse() {
2302 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2303 vec.extend(0..10);
2304
2305 {
2306 let mut slice = vec.slice_mut(2..8);
2307 slice.reverse();
2308 }
2309
2310 let result: Vec<i32> = vec.iter().copied().collect();
2311 assert_eq!(result, vec![0, 1, 7, 6, 5, 4, 3, 2, 8, 9]);
2312 }
2313
2314 #[test]
2315 fn test_slice_mut_fill() {
2316 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2317 vec.extend(0..10);
2318
2319 {
2320 let mut slice = vec.slice_mut(2..5);
2321 slice.fill(99);
2322 }
2323
2324 let result: Vec<i32> = vec.iter().copied().collect();
2325 assert_eq!(result, vec![0, 1, 99, 99, 99, 5, 6, 7, 8, 9]);
2326 }
2327
2328 #[test]
2329 fn test_slice_starts_with() {
2330 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2331 vec.extend(0..10);
2332
2333 let slice = vec.as_slice();
2334 assert!(slice.starts_with(&[0, 1, 2]));
2335 assert!(!slice.starts_with(&[1, 2, 3]));
2336 }
2337
2338 #[test]
2339 fn test_slice_ends_with() {
2340 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2341 vec.extend(0..10);
2342
2343 let slice = vec.as_slice();
2344 assert!(slice.ends_with(&[7, 8, 9]));
2345 assert!(!slice.ends_with(&[6, 7, 8]));
2346 }
2347
2348 #[test]
2349 fn test_slice_eq() {
2350 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2351 vec.extend(0..10);
2352
2353 let slice = vec.as_slice();
2354 assert_eq!(slice, (0..10).collect::<Vec<_>>());
2355 }
2356
2357 #[test]
2358 fn test_min_non_zero_cap() {
2359 let mut vec_u8: SegmentedVec<u8> = SegmentedVec::new();
2361 vec_u8.push(1);
2362 assert_eq!(vec_u8.capacity(), 8);
2363
2364 let mut vec_i32: SegmentedVec<i32> = SegmentedVec::new();
2366 vec_i32.push(1);
2367 assert_eq!(vec_i32.capacity(), 4);
2368
2369 for i in 0u8..100 {
2371 vec_u8.push(i);
2372 }
2373 for i in 0..101 {
2374 assert_eq!(vec_u8[i], if i == 0 { 1 } else { (i - 1) as u8 });
2375 }
2376 }
2377
2378 #[test]
2379 fn test_extend_from_copy_slice() {
2380 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2381 let data: Vec<i32> = (0..100).collect();
2382 vec.extend_from_copy_slice(&data);
2383 assert_eq!(vec.len(), 100);
2384 for i in 0..100 {
2385 assert_eq!(vec[i], i as i32);
2386 }
2387
2388 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2389 vec.push(999);
2390 vec.push(998);
2391 vec.extend_from_copy_slice(&data[..10]);
2392 assert_eq!(vec.len(), 12);
2393 assert_eq!(vec[0], 999);
2394 assert_eq!(vec[1], 998);
2395 for i in 0..10 {
2396 assert_eq!(vec[i + 2], i as i32);
2397 }
2398
2399 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2400 vec.extend_from_copy_slice(&[]);
2401 assert!(vec.is_empty());
2402
2403 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2404 vec.extend_from_copy_slice(&[1, 2, 3]); assert_eq!(vec.len(), 3);
2406 vec.push(4); vec.push(5);
2408 vec.push(6);
2409 assert_eq!(vec.len(), 6);
2410 for i in 0..6 {
2411 assert_eq!(vec[i], (i + 1) as i32);
2412 }
2413 }
2414}