1mod slice;
28mod sort;
29
30pub use slice::{
31 Chunks, ChunksExact, RChunks, SegmentedSlice, SegmentedSliceMut, SliceIter, SliceIterMut,
32 Windows,
33};
34
35use std::alloc::{self, Layout};
36use std::cmp::Ordering;
37use std::marker::PhantomData;
38use std::ops::{Index, IndexMut};
39
40const MAX_SEGMENTS: usize = 64;
43
44#[repr(C)]
46pub struct SegmentedVec<T> {
47 write_ptr: *mut T,
49 segment_end: *mut T,
51 len: usize,
52 active_segment_index: usize,
54 segment_count: usize,
55 dynamic_segments: [*mut T; MAX_SEGMENTS],
56 _marker: PhantomData<T>,
57}
58
59impl<T> SegmentedVec<T> {
60 const MIN_NON_ZERO_CAP: usize = {
66 let size = std::mem::size_of::<T>();
67 if size == 1 {
68 8
69 } else if size <= 1024 {
70 4
71 } else {
72 1
73 }
74 };
75
76 const MIN_CAP_EXP: u32 = Self::MIN_NON_ZERO_CAP.trailing_zeros();
77
78 #[inline]
88 pub const fn new() -> Self {
89 Self {
90 dynamic_segments: [std::ptr::null_mut(); MAX_SEGMENTS],
91 segment_count: 0,
92 len: 0,
93 write_ptr: std::ptr::null_mut(),
94 segment_end: std::ptr::null_mut(),
95 active_segment_index: usize::MAX,
97 _marker: PhantomData,
98 }
99 }
100
101 #[inline]
103 pub const fn len(&self) -> usize {
104 self.len
105 }
106
107 #[inline]
109 pub const fn is_empty(&self) -> bool {
110 self.len == 0
111 }
112
113 #[inline]
115 pub fn capacity(&self) -> usize {
116 Self::compute_capacity(self.segment_count as u32)
117 }
118
119 #[inline]
135 pub fn push(&mut self, value: T) {
136 if self.write_ptr < self.segment_end {
138 unsafe {
139 std::ptr::write(self.write_ptr, value);
140 self.write_ptr = self.write_ptr.add(1);
141 }
142 self.len += 1;
143 return;
144 }
145
146 self.push_slow(value);
148 }
149
150 #[cold]
151 #[inline(never)]
152 fn push_slow(&mut self, value: T) {
153 self.active_segment_index = self.active_segment_index.wrapping_add(1);
154
155 if self.active_segment_index >= self.segment_count {
156 self.grow_once();
157 }
158
159 let idx = self.active_segment_index;
160
161 let base = unsafe { *self.dynamic_segments.get_unchecked(idx) };
164 let shelf_size = Self::MIN_NON_ZERO_CAP << idx;
165
166 unsafe {
167 std::ptr::write(base, value);
168 self.write_ptr = base.add(1);
169 self.segment_end = base.add(shelf_size);
170 }
171 self.len += 1;
172 }
173
174 #[inline]
188 pub fn pop(&mut self) -> Option<T> {
189 if self.len == 0 {
190 return None;
191 }
192
193 let segment_capacity = Self::MIN_NON_ZERO_CAP << self.active_segment_index;
195 let segment_base = unsafe { self.segment_end.sub(segment_capacity) };
197 if self.write_ptr > segment_base {
198 self.len -= 1;
199 unsafe {
200 self.write_ptr = self.write_ptr.sub(1);
202 Some(std::ptr::read(self.write_ptr))
204 }
205 } else {
206 self.pop_slow_path()
208 }
209 }
210
211 #[cold]
212 #[inline(never)]
213 fn pop_slow_path(&mut self) -> Option<T> {
214 self.active_segment_index -= 1;
217 let idx = self.active_segment_index;
218
219 let base = unsafe { *self.dynamic_segments.get_unchecked(idx) };
220 let capacity = Self::MIN_NON_ZERO_CAP << idx;
221
222 self.segment_end = unsafe { base.add(capacity) };
223 self.write_ptr = unsafe { self.segment_end.sub(1) };
224 self.len -= 1;
225 Some(unsafe { std::ptr::read(self.write_ptr) })
226 }
227
228 #[inline]
232 pub fn get(&self, index: usize) -> Option<&T> {
233 if index < self.len {
234 Some(unsafe { self.unchecked_at(index) })
235 } else {
236 None
237 }
238 }
239
240 #[inline]
244 pub fn get_mut(&mut self, index: usize) -> Option<&mut T> {
245 if index < self.len {
246 Some(unsafe { self.unchecked_at_mut(index) })
247 } else {
248 None
249 }
250 }
251
252 #[inline]
254 pub fn first(&self) -> Option<&T> {
255 if self.len == 0 {
256 None
257 } else {
258 Some(unsafe { &*self.dynamic_segments[0] })
260 }
261 }
262
263 #[inline]
265 pub fn first_mut(&mut self) -> Option<&mut T> {
266 if self.len == 0 {
267 None
268 } else {
269 Some(unsafe { &mut *self.dynamic_segments[0] })
271 }
272 }
273
274 #[inline]
276 pub fn last(&self) -> Option<&T> {
277 if self.len == 0 {
278 None
279 } else {
280 Some(unsafe { &*self.write_ptr.sub(1) })
282 }
283 }
284
285 #[inline]
287 pub fn last_mut(&mut self) -> Option<&mut T> {
288 if self.len == 0 {
289 None
290 } else {
291 Some(unsafe { &mut *self.write_ptr.sub(1) })
293 }
294 }
295
296 #[inline]
298 pub fn contains(&self, x: &T) -> bool
299 where
300 T: PartialEq,
301 {
302 self.as_slice().contains(x)
303 }
304
305 pub fn starts_with(&self, needle: &[T]) -> bool
307 where
308 T: PartialEq,
309 {
310 self.as_slice().starts_with(needle)
311 }
312
313 pub fn ends_with(&self, needle: &[T]) -> bool
315 where
316 T: PartialEq,
317 {
318 self.as_slice().ends_with(needle)
319 }
320
321 pub fn binary_search(&self, x: &T) -> Result<usize, usize>
326 where
327 T: Ord,
328 {
329 self.as_slice().binary_search(x)
330 }
331
332 pub fn binary_search_by<F>(&self, f: F) -> Result<usize, usize>
334 where
335 F: FnMut(&T) -> Ordering,
336 {
337 self.as_slice().binary_search_by(f)
338 }
339
340 pub fn binary_search_by_key<B, F>(&self, b: &B, f: F) -> Result<usize, usize>
342 where
343 F: FnMut(&T) -> B,
344 B: Ord,
345 {
346 self.as_slice().binary_search_by_key(b, f)
347 }
348
349 #[inline]
355 pub fn swap(&mut self, a: usize, b: usize) {
356 self.as_mut_slice().swap(a, b)
357 }
358
359 pub fn reverse(&mut self) {
361 self.as_mut_slice().reverse()
362 }
363
364 pub fn fill(&mut self, value: T)
366 where
367 T: Clone,
368 {
369 self.as_mut_slice().fill(value)
370 }
371
372 pub fn fill_with<F>(&mut self, f: F)
374 where
375 F: FnMut() -> T,
376 {
377 self.as_mut_slice().fill_with(f)
378 }
379
380 pub fn clear(&mut self) {
384 if self.len == 0 {
385 return;
386 }
387
388 if std::mem::needs_drop::<T>() {
389 let mut remaining = self.len;
391 for shelf in 0..self.segment_count {
392 let size = Self::shelf_size(shelf as u32);
393 let count = remaining.min(size);
394 let ptr = unsafe { *self.dynamic_segments.get_unchecked(shelf) };
395 unsafe { std::ptr::drop_in_place(std::ptr::slice_from_raw_parts_mut(ptr, count)) };
396 remaining -= count;
397 if remaining == 0 {
398 break;
399 }
400 }
401 }
402
403 self.len = 0;
404 self.write_ptr = std::ptr::null_mut();
406 self.segment_end = std::ptr::null_mut();
407 self.active_segment_index = usize::MAX;
408 }
409
410 pub fn truncate(&mut self, new_len: usize) {
414 if new_len >= self.len {
415 return;
416 }
417
418 if std::mem::needs_drop::<T>() {
419 let dynamic_new_len = new_len;
420 let dynamic_old_len = self.len;
421
422 if dynamic_new_len < dynamic_old_len {
423 let mut pos = 0usize;
424 for shelf in 0..self.segment_count {
425 let size = Self::shelf_size(shelf as u32);
426 let seg_end = pos + size;
427
428 let drop_start = dynamic_new_len.max(pos);
430 let drop_end = dynamic_old_len.min(seg_end);
431
432 if drop_start < drop_end {
433 let offset = drop_start - pos;
434 let count = drop_end - drop_start;
435 let ptr =
436 unsafe { (*self.dynamic_segments.get_unchecked(shelf)).add(offset) };
437 unsafe {
438 std::ptr::drop_in_place(std::ptr::slice_from_raw_parts_mut(ptr, count))
439 };
440 }
441
442 pos = seg_end;
443 if pos >= dynamic_old_len {
444 break;
445 }
446 }
447 }
448 }
449
450 self.len = new_len;
451 if new_len > 0 {
453 let biased = new_len + Self::MIN_NON_ZERO_CAP;
454 let msb = biased.ilog2();
455 let idx = (msb - Self::MIN_CAP_EXP) as usize;
456 self.active_segment_index = idx;
457
458 let capacity = 1usize << msb;
459 let offset = biased ^ capacity;
460 unsafe {
461 let base = *self.dynamic_segments.get_unchecked(idx);
462 self.write_ptr = base.add(offset);
463 self.segment_end = base.add(capacity);
464 }
465 } else {
466 self.write_ptr = std::ptr::null_mut();
468 self.segment_end = std::ptr::null_mut();
469 self.active_segment_index = usize::MAX;
471 }
472 }
473
474 pub fn reserve(&mut self, additional: usize) {
476 self.grow_capacity(self.len + additional);
477 if self.write_ptr.is_null() {
479 unsafe {
480 let base = *self.dynamic_segments.get_unchecked(0);
482 self.write_ptr = base;
483 self.segment_end = base.add(Self::MIN_NON_ZERO_CAP);
484
485 self.active_segment_index = 0;
490 }
491 }
492 }
493
494 pub fn shrink_to_fit(&mut self) {
498 self.shrink_capacity(self.len);
499 }
500
501 #[inline]
503 pub fn iter(&self) -> Iter<'_, T> {
504 Iter {
506 vec: self,
507 ptr: std::ptr::null(),
508 segment_end: std::ptr::null(),
509 index: 0,
510 shelf_index: 0,
511 }
512 }
513
514 #[inline]
516 pub fn iter_mut(&mut self) -> IterMut<'_, T> {
517 IterMut {
519 vec: self,
520 ptr: std::ptr::null_mut(),
521 segment_end: std::ptr::null_mut(),
522 index: 0,
523 shelf_index: 0,
524 }
525 }
526
527 #[inline]
542 pub fn as_slice(&self) -> SegmentedSlice<'_, T> {
543 SegmentedSlice::new(self)
544 }
545
546 #[inline]
561 pub fn as_mut_slice(&mut self) -> SegmentedSliceMut<'_, T> {
562 SegmentedSliceMut::new(self)
563 }
564
565 #[inline]
584 pub fn slice(&self, range: std::ops::Range<usize>) -> SegmentedSlice<'_, T> {
585 assert!(range.start <= range.end && range.end <= self.len);
586 SegmentedSlice::from_range(self, range.start, range.end)
587 }
588
589 #[inline]
608 pub fn slice_mut(&mut self, range: std::ops::Range<usize>) -> SegmentedSliceMut<'_, T> {
609 assert!(range.start <= range.end && range.end <= self.len);
610 SegmentedSliceMut::from_range(self, range.start, range.end)
611 }
612
613 pub fn extend_from_slice(&mut self, other: &[T])
615 where
616 T: Clone,
617 {
618 if other.is_empty() {
619 return;
620 }
621 self.reserve(other.len());
622
623 let mut src = other;
624
625 while !src.is_empty() {
627 let (shelf, box_idx, remaining) = Self::location_with_capacity(self.len);
628 let to_copy = src.len().min(remaining);
629 let dest = unsafe { (*self.dynamic_segments.get_unchecked(shelf)).add(box_idx) };
630 for (i, item) in src.iter().take(to_copy).enumerate() {
631 unsafe { std::ptr::write(dest.add(i), item.clone()) };
632 }
633 self.len += to_copy;
634 src = &src[to_copy..];
635 }
636
637 if self.len > 0 {
639 let biased = self.len + Self::MIN_NON_ZERO_CAP;
640 let msb = biased.ilog2();
641 let idx = (msb - Self::MIN_CAP_EXP) as usize;
642 self.active_segment_index = idx;
643
644 let capacity = 1usize << msb;
645 let offset = biased ^ capacity;
646 unsafe {
647 let base = *self.dynamic_segments.get_unchecked(idx);
648 self.write_ptr = base.add(offset);
649 self.segment_end = base.add(capacity);
650 }
651 }
652 }
653
654 pub fn extend_from_copy_slice(&mut self, other: &[T])
659 where
660 T: Copy,
661 {
662 if other.is_empty() {
663 return;
664 }
665 self.reserve(other.len());
666
667 let mut src = other;
668
669 while !src.is_empty() {
671 let (shelf, box_idx, remaining) = Self::location_with_capacity(self.len);
672 let to_copy = src.len().min(remaining);
673 unsafe {
674 let dest = (*self.dynamic_segments.get_unchecked(shelf)).add(box_idx);
675 std::ptr::copy_nonoverlapping(src.as_ptr(), dest, to_copy);
676 };
677 self.len += to_copy;
678 src = &src[to_copy..];
679 }
680
681 if self.len > 0 {
683 let biased = self.len + Self::MIN_NON_ZERO_CAP;
684 let msb = biased.ilog2();
685 let idx = (msb - Self::MIN_CAP_EXP) as usize;
686 self.active_segment_index = idx;
687
688 let capacity = 1usize << msb;
689 let offset = biased ^ capacity;
690 unsafe {
691 let base = *self.dynamic_segments.get_unchecked(idx);
692 self.write_ptr = base.add(offset);
693 self.segment_end = base.add(capacity);
694 }
695 }
696 }
697
698 #[inline]
715 pub fn sort(&mut self)
716 where
717 T: Ord,
718 {
719 self.as_mut_slice().sort();
720 }
721
722 pub fn sort_by<F>(&mut self, compare: F)
739 where
740 F: FnMut(&T, &T) -> Ordering,
741 {
742 self.as_mut_slice().sort_by(compare);
743 }
744
745 #[inline]
761 pub fn sort_by_key<K, F>(&mut self, f: F)
762 where
763 F: FnMut(&T) -> K,
764 K: Ord,
765 {
766 self.as_mut_slice().sort_by_key(f);
767 }
768
769 #[inline]
786 pub fn sort_unstable(&mut self)
787 where
788 T: Ord,
789 {
790 self.as_mut_slice().sort_unstable();
791 }
792
793 pub fn sort_unstable_by<F>(&mut self, compare: F)
810 where
811 F: FnMut(&T, &T) -> Ordering,
812 {
813 self.as_mut_slice().sort_unstable_by(compare);
814 }
815
816 #[inline]
836 pub fn sort_unstable_by_key<K, F>(&mut self, f: F)
837 where
838 F: FnMut(&T) -> K,
839 K: Ord,
840 {
841 self.as_mut_slice().sort_unstable_by_key(f);
842 }
843
844 pub fn is_sorted(&self) -> bool
846 where
847 T: PartialOrd,
848 {
849 self.as_slice().is_sorted()
850 }
851
852 pub fn is_sorted_by<F>(&self, compare: F) -> bool
854 where
855 F: FnMut(&T, &T) -> bool,
856 {
857 self.as_slice().is_sorted_by(compare)
858 }
859
860 pub fn is_sorted_by_key<K, F>(&self, f: F) -> bool
862 where
863 F: FnMut(&T) -> K,
864 K: PartialOrd,
865 {
866 self.as_slice().is_sorted_by_key(f)
867 }
868
869 pub fn partition_point<P>(&self, pred: P) -> usize
871 where
872 P: FnMut(&T) -> bool,
873 {
874 self.as_slice().partition_point(pred)
875 }
876
877 pub fn rotate_left(&mut self, mid: usize) {
879 self.as_mut_slice().rotate_left(mid);
880 }
881
882 pub fn rotate_right(&mut self, k: usize) {
884 self.as_mut_slice().rotate_right(k);
885 }
886
887 pub fn with_capacity(capacity: usize) -> Self {
889 let mut vec = Self::new();
890 vec.reserve(capacity);
891 vec
892 }
893
894 pub fn insert(&mut self, index: usize, element: T) {
900 assert!(index <= self.len);
901 self.push(element);
902 if index < self.len - 1 {
904 for i in (index..self.len - 1).rev() {
905 unsafe {
906 let ptr_a = self.unchecked_at_mut(i) as *mut T;
907 let ptr_b = self.unchecked_at_mut(i + 1) as *mut T;
908 std::ptr::swap(ptr_a, ptr_b);
909 }
910 }
911 }
912 }
913
914 pub fn remove(&mut self, index: usize) -> T {
920 assert!(index < self.len);
921 for i in index..self.len - 1 {
923 unsafe {
924 let ptr_a = self.unchecked_at_mut(i) as *mut T;
925 let ptr_b = self.unchecked_at_mut(i + 1) as *mut T;
926 std::ptr::swap(ptr_a, ptr_b);
927 }
928 }
929 self.pop().unwrap()
930 }
931
932 pub fn swap_remove(&mut self, index: usize) -> T {
941 assert!(index < self.len);
942 let last_index = self.len - 1;
943 if index != last_index {
944 unsafe {
945 let ptr_a = self.unchecked_at_mut(index) as *mut T;
946 let ptr_b = self.write_ptr.sub(1);
948 std::ptr::swap(ptr_a, ptr_b);
949 }
950 }
951 self.pop().unwrap()
952 }
953
954 pub fn retain<F>(&mut self, mut f: F)
958 where
959 F: FnMut(&T) -> bool,
960 {
961 let mut i = 0;
962 while i < self.len {
963 if f(unsafe { self.unchecked_at(i) }) {
964 i += 1;
965 } else {
966 self.remove(i);
967 }
968 }
969 }
970
971 pub fn retain_mut<F>(&mut self, mut f: F)
973 where
974 F: FnMut(&mut T) -> bool,
975 {
976 let mut i = 0;
977 while i < self.len {
978 if f(unsafe { self.unchecked_at_mut(i) }) {
979 i += 1;
980 } else {
981 self.remove(i);
982 }
983 }
984 }
985
986 pub fn dedup(&mut self)
990 where
991 T: PartialEq,
992 {
993 self.dedup_by(|a, b| a == b);
994 }
995
996 pub fn dedup_by<F>(&mut self, mut same_bucket: F)
998 where
999 F: FnMut(&mut T, &mut T) -> bool,
1000 {
1001 if self.len <= 1 {
1002 return;
1003 }
1004 let mut write = 1;
1005 for read in 1..self.len {
1006 let should_keep = unsafe {
1007 let prev_ptr = self.unchecked_at_mut(write - 1) as *mut T;
1008 let curr_ptr = self.unchecked_at_mut(read) as *mut T;
1009 !same_bucket(&mut *prev_ptr, &mut *curr_ptr)
1010 };
1011 if should_keep {
1012 if read != write {
1013 unsafe {
1014 let ptr_src = self.unchecked_at_mut(read) as *mut T;
1015 let ptr_dst = self.unchecked_at_mut(write) as *mut T;
1016 std::ptr::swap(ptr_dst, ptr_src);
1017 }
1018 }
1019 write += 1;
1020 } else {
1021 unsafe {
1023 std::ptr::drop_in_place(self.unchecked_at_mut(read));
1024 }
1025 }
1026 }
1027 self.len = write;
1028 }
1029
1030 pub fn dedup_by_key<K, F>(&mut self, mut key: F)
1032 where
1033 F: FnMut(&mut T) -> K,
1034 K: PartialEq,
1035 {
1036 self.dedup_by(|a, b| key(a) == key(b));
1037 }
1038
1039 pub fn resize(&mut self, new_len: usize, value: T)
1045 where
1046 T: Clone,
1047 {
1048 if new_len > self.len {
1049 self.reserve(new_len - self.len);
1050 while self.len < new_len {
1051 self.push(value.clone());
1052 }
1053 } else {
1054 self.truncate(new_len);
1055 }
1056 }
1057
1058 pub fn resize_with<F>(&mut self, new_len: usize, mut f: F)
1063 where
1064 F: FnMut() -> T,
1065 {
1066 if new_len > self.len {
1067 self.reserve(new_len - self.len);
1068 while self.len < new_len {
1069 self.push(f());
1070 }
1071 } else {
1072 self.truncate(new_len);
1073 }
1074 }
1075
1076 pub fn append(&mut self, other: &mut Self) {
1078 let other_len = other.len;
1079 self.reserve(other_len);
1080 let start = self.len;
1081 while let Some(item) = other.pop() {
1082 self.push(item);
1083 }
1084 let mut left = start;
1086 let mut right = self.len;
1087 while left < right {
1088 right -= 1;
1089 if left < right {
1090 unsafe {
1091 let ptr_a = self.unchecked_at_mut(left) as *mut T;
1092 let ptr_b = self.unchecked_at_mut(right) as *mut T;
1093 std::ptr::swap(ptr_a, ptr_b);
1094 }
1095 left += 1;
1096 }
1097 }
1098 }
1099
1100 pub fn split_off(&mut self, at: usize) -> Self {
1109 assert!(at <= self.len);
1110 let mut other = Self::new();
1111 other.reserve(self.len - at);
1112 for i in at..self.len {
1113 other.push(unsafe { self.unchecked_read(i) });
1114 }
1115 self.len = at;
1116 other
1117 }
1118
1119 pub fn chunks(&self, chunk_size: usize) -> Chunks<'_, T> {
1125 self.as_slice().chunks(chunk_size)
1126 }
1127
1128 pub fn windows(&self, size: usize) -> Windows<'_, T> {
1134 self.as_slice().windows(size)
1135 }
1136
1137 pub fn rchunks(&self, chunk_size: usize) -> RChunks<'_, T> {
1143 self.as_slice().rchunks(chunk_size)
1144 }
1145
1146 pub fn chunks_exact(&self, chunk_size: usize) -> ChunksExact<'_, T> {
1152 self.as_slice().chunks_exact(chunk_size)
1153 }
1154
1155 pub fn drain(&mut self, range: std::ops::Range<usize>) -> Drain<'_, T> {
1161 assert!(range.start <= range.end && range.end <= self.len);
1162 Drain {
1163 vec: self,
1164 range_start: range.start,
1165 range_end: range.end,
1166 index: range.start,
1167 }
1168 }
1169
1170 pub fn to_vec(&self) -> Vec<T>
1172 where
1173 T: Clone,
1174 {
1175 self.as_slice().to_vec()
1176 }
1177
1178 #[inline]
1182 fn shelf_count(box_count: usize) -> u32 {
1183 if box_count == 0 {
1184 return 0;
1185 }
1186 let val = box_count + Self::MIN_NON_ZERO_CAP;
1187 val.next_power_of_two().trailing_zeros() - Self::MIN_CAP_EXP
1188 }
1189
1190 #[inline]
1192 fn shelf_size(shelf_index: u32) -> usize {
1193 Self::MIN_NON_ZERO_CAP << shelf_index
1194 }
1195
1196 #[inline]
1199 fn location(list_index: usize) -> (usize, usize) {
1200 let biased = list_index + Self::MIN_NON_ZERO_CAP;
1201 let msb = biased.ilog2();
1202 let shelf = msb - Self::MIN_CAP_EXP;
1203 let box_idx = biased ^ (1usize << msb);
1205 (shelf as usize, box_idx)
1206 }
1207
1208 #[inline]
1211 fn location_with_capacity(list_index: usize) -> (usize, usize, usize) {
1212 let biased = list_index + Self::MIN_NON_ZERO_CAP;
1213 let msb = biased.ilog2();
1214 let shelf = msb - Self::MIN_CAP_EXP;
1215 let box_idx = biased ^ (1usize << msb);
1216 let segment_remaining = (2usize << msb) - biased;
1220 (shelf as usize, box_idx, segment_remaining)
1221 }
1222
1223 #[inline]
1229 pub(crate) unsafe fn unchecked_at(&self, index: usize) -> &T {
1230 unsafe {
1231 let (shelf, box_idx) = Self::location(index);
1232 &*(*self.dynamic_segments.get_unchecked(shelf)).add(box_idx)
1233 }
1234 }
1235
1236 #[inline]
1242 pub(crate) unsafe fn unchecked_at_mut(&mut self, index: usize) -> &mut T {
1243 unsafe {
1244 let (shelf, box_idx) = Self::location(index);
1245 &mut *(*self.dynamic_segments.get_unchecked(shelf)).add(box_idx)
1246 }
1247 }
1248
1249 #[inline]
1255 unsafe fn unchecked_read(&self, index: usize) -> T {
1256 unsafe {
1257 let (shelf, box_idx) = Self::location(index);
1258 std::ptr::read((*self.dynamic_segments.get_unchecked(shelf)).add(box_idx))
1259 }
1260 }
1261
1262 fn grow_once(&mut self) {
1263 assert!(
1264 self.segment_count < MAX_SEGMENTS,
1265 "Maximum segment count exceeded"
1266 );
1267
1268 let size = Self::shelf_size(self.segment_count as u32);
1269 let layout = Layout::array::<T>(size).expect("Layout overflow");
1270 let ptr = if layout.size() == 0 {
1271 std::ptr::dangling_mut::<T>()
1272 } else {
1273 let ptr = unsafe { alloc::alloc(layout) };
1274 if ptr.is_null() {
1275 panic!("Allocation failed");
1276 }
1277 ptr as *mut T
1278 };
1279 self.dynamic_segments[self.segment_count] = ptr;
1280 self.segment_count += 1;
1281 }
1282
1283 fn grow_capacity(&mut self, new_capacity: usize) {
1285 let new_shelf_count = Self::shelf_count(new_capacity) as usize;
1286 let old_shelf_count = self.segment_count;
1287
1288 if new_shelf_count > old_shelf_count {
1289 assert!(
1290 new_shelf_count <= MAX_SEGMENTS,
1291 "Maximum segment count exceeded"
1292 );
1293
1294 for i in old_shelf_count..new_shelf_count {
1295 let size = Self::shelf_size(i as u32);
1296 let layout = Layout::array::<T>(size).expect("Layout overflow");
1297 let ptr = if layout.size() == 0 {
1298 std::ptr::dangling_mut::<T>()
1299 } else {
1300 let ptr = unsafe { alloc::alloc(layout) };
1301 if ptr.is_null() {
1302 panic!("Allocation failed");
1303 }
1304 ptr as *mut T
1305 };
1306 self.dynamic_segments[i] = ptr;
1307 }
1308 self.segment_count = new_shelf_count;
1309 }
1310 }
1311
1312 #[inline]
1314 fn compute_capacity(shelf_count: u32) -> usize {
1315 (Self::MIN_NON_ZERO_CAP << shelf_count) - Self::MIN_NON_ZERO_CAP
1316 }
1317
1318 fn shrink_capacity(&mut self, new_capacity: usize) {
1320 let new_shelf_count = Self::shelf_count(new_capacity);
1321 let old_shelf_count = self.segment_count as u32;
1322
1323 if new_shelf_count < old_shelf_count {
1324 self.free_shelves(old_shelf_count, new_shelf_count);
1325 self.segment_count = new_shelf_count as usize;
1326 }
1327 }
1328
1329 fn free_shelves(&mut self, from_count: u32, to_count: u32) {
1331 for i in (to_count..from_count).rev() {
1332 let size = Self::shelf_size(i);
1333 let layout = Layout::array::<T>(size).expect("Layout overflow");
1334 if layout.size() > 0 {
1335 unsafe {
1336 alloc::dealloc(self.dynamic_segments[i as usize] as *mut u8, layout);
1337 }
1338 }
1339 self.dynamic_segments[i as usize] = std::ptr::null_mut();
1340 }
1341 }
1342}
1343
1344impl<T> Drop for SegmentedVec<T> {
1345 fn drop(&mut self) {
1346 self.clear();
1348 self.free_shelves(self.segment_count as u32, 0);
1350 }
1351}
1352
1353impl<T> sort::IndexedAccess<T> for SegmentedVec<T> {
1354 #[inline]
1355 fn get_ref(&self, index: usize) -> &T {
1356 debug_assert!(index < self.len);
1357 unsafe { self.unchecked_at(index) }
1358 }
1359
1360 #[inline]
1361 fn get_ptr(&self, index: usize) -> *const T {
1362 debug_assert!(index < self.len);
1363 let (shelf, box_idx) = Self::location(index);
1364 unsafe { (*self.dynamic_segments.get_unchecked(shelf)).add(box_idx) }
1365 }
1366
1367 #[inline]
1368 fn get_ptr_mut(&mut self, index: usize) -> *mut T {
1369 debug_assert!(index < self.len);
1370 let (shelf, box_idx) = Self::location(index);
1371 unsafe { (*self.dynamic_segments.get_unchecked(shelf)).add(box_idx) }
1372 }
1373
1374 #[inline]
1375 fn swap(&mut self, a: usize, b: usize) {
1376 if a == b {
1377 return;
1378 }
1379 debug_assert!(a < self.len && b < self.len);
1380 unsafe {
1381 let ptr_a = self.get_ptr_mut(a);
1382 let ptr_b = self.get_ptr_mut(b);
1383 std::ptr::swap(ptr_a, ptr_b);
1384 }
1385 }
1386}
1387
1388impl<T> Default for SegmentedVec<T> {
1389 fn default() -> Self {
1390 Self::new()
1391 }
1392}
1393
1394impl<T> Index<usize> for SegmentedVec<T> {
1395 type Output = T;
1396
1397 fn index(&self, index: usize) -> &Self::Output {
1398 self.get(index).expect("index out of bounds")
1399 }
1400}
1401
1402impl<T> IndexMut<usize> for SegmentedVec<T> {
1403 fn index_mut(&mut self, index: usize) -> &mut Self::Output {
1404 self.get_mut(index).expect("index out of bounds")
1405 }
1406}
1407
1408impl<T: Clone> Clone for SegmentedVec<T> {
1409 fn clone(&self) -> Self {
1410 if self.len == 0 {
1411 return Self::new();
1412 }
1413
1414 let mut new_vec = Self::new();
1415 new_vec.reserve(self.len);
1416
1417 let mut remaining = self.len;
1418 for shelf in 0..self.segment_count {
1419 let size = Self::shelf_size(shelf as u32);
1420 let count = remaining.min(size);
1421 let src = unsafe { *self.dynamic_segments.get_unchecked(shelf) };
1422 let dst = unsafe { *new_vec.dynamic_segments.get_unchecked(shelf) };
1423 for i in 0..count {
1424 unsafe { std::ptr::write(dst.add(i), (*src.add(i)).clone()) };
1425 }
1426 new_vec.len += count;
1427 remaining -= count;
1428 if remaining == 0 {
1429 break;
1430 }
1431 }
1432
1433 if new_vec.len > 0 {
1435 let biased = new_vec.len + Self::MIN_NON_ZERO_CAP;
1436 let msb = biased.ilog2();
1437 let idx = (msb - Self::MIN_CAP_EXP) as usize;
1438 new_vec.active_segment_index = idx;
1439
1440 let capacity = 1usize << msb;
1441 let offset = biased ^ capacity;
1442 unsafe {
1443 let base = *new_vec.dynamic_segments.get_unchecked(idx);
1444 new_vec.write_ptr = base.add(offset);
1445 new_vec.segment_end = base.add(capacity);
1446 }
1447 }
1448
1449 new_vec
1450 }
1451}
1452
1453impl<T: std::fmt::Debug> std::fmt::Debug for SegmentedVec<T> {
1454 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
1455 f.debug_list().entries(self.iter()).finish()
1456 }
1457}
1458
1459impl<T: PartialEq> PartialEq for SegmentedVec<T> {
1460 fn eq(&self, other: &Self) -> bool {
1461 if self.len != other.len {
1462 return false;
1463 }
1464 for i in 0..self.len {
1465 if unsafe { self.unchecked_at(i) } != unsafe { other.unchecked_at(i) } {
1466 return false;
1467 }
1468 }
1469 true
1470 }
1471}
1472
1473impl<T: Eq> Eq for SegmentedVec<T> {}
1474
1475impl<T> FromIterator<T> for SegmentedVec<T> {
1476 fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> Self {
1477 let iter = iter.into_iter();
1478 let (lower, _) = iter.size_hint();
1479 let mut vec = Self::new();
1480 vec.reserve(lower);
1481 for item in iter {
1482 vec.push(item);
1483 }
1484 vec
1485 }
1486}
1487
1488impl<T> Extend<T> for SegmentedVec<T> {
1489 fn extend<I: IntoIterator<Item = T>>(&mut self, iter: I) {
1490 let iter = iter.into_iter();
1491 let (lower, _) = iter.size_hint();
1492 self.reserve(lower);
1493 for item in iter {
1494 self.push(item);
1495 }
1496 }
1497}
1498
1499pub struct Iter<'a, T> {
1503 vec: &'a SegmentedVec<T>,
1504 ptr: *const T,
1506 segment_end: *const T,
1508 index: usize,
1510 shelf_index: u32,
1512}
1513
1514impl<'a, T> Iterator for Iter<'a, T> {
1515 type Item = &'a T;
1516
1517 #[inline]
1518 fn next(&mut self) -> Option<Self::Item> {
1519 if self.ptr < self.segment_end {
1520 let result = unsafe { &*self.ptr };
1521 self.ptr = unsafe { self.ptr.add(1) };
1522 self.index += 1;
1523 return Some(result);
1524 }
1525 self.next_segment()
1526 }
1527
1528 #[inline]
1529 fn size_hint(&self) -> (usize, Option<usize>) {
1530 let remaining = self.vec.len.saturating_sub(self.index);
1531 (remaining, Some(remaining))
1532 }
1533}
1534
1535impl<'a, T> Iter<'a, T> {
1536 #[inline]
1537 fn next_segment(&mut self) -> Option<&'a T> {
1538 if self.index >= self.vec.len {
1539 return None;
1540 }
1541
1542 let shelf_idx = self.shelf_index as usize;
1543 let shelf_size = SegmentedVec::<T>::shelf_size(self.shelf_index);
1544 let ptr = self.vec.dynamic_segments[shelf_idx];
1545 let segment_len = shelf_size.min(self.vec.len - self.index);
1546 self.ptr = ptr;
1547 self.segment_end = unsafe { ptr.add(segment_len) };
1548 self.shelf_index += 1;
1549
1550 let result = unsafe { &*self.ptr };
1551 self.ptr = unsafe { self.ptr.add(1) };
1552 self.index += 1;
1553 Some(result)
1554 }
1555}
1556
1557impl<'a, T> ExactSizeIterator for Iter<'a, T> {}
1558
1559pub struct IterMut<'a, T> {
1561 vec: &'a mut SegmentedVec<T>,
1562 ptr: *mut T,
1564 segment_end: *mut T,
1566 index: usize,
1568 shelf_index: u32,
1570}
1571
1572impl<'a, T> Iterator for IterMut<'a, T> {
1573 type Item = &'a mut T;
1574
1575 #[inline]
1576 fn next(&mut self) -> Option<Self::Item> {
1577 if self.ptr < self.segment_end {
1578 let result = self.ptr;
1579 self.ptr = unsafe { self.ptr.add(1) };
1580 self.index += 1;
1581 return Some(unsafe { &mut *result });
1582 }
1583 self.next_segment()
1584 }
1585
1586 #[inline]
1587 fn size_hint(&self) -> (usize, Option<usize>) {
1588 let remaining = self.vec.len.saturating_sub(self.index);
1589 (remaining, Some(remaining))
1590 }
1591}
1592
1593impl<'a, T> IterMut<'a, T> {
1594 #[inline]
1595 fn next_segment(&mut self) -> Option<&'a mut T> {
1596 if self.index >= self.vec.len {
1597 return None;
1598 }
1599
1600 let shelf_idx = self.shelf_index as usize;
1601 let shelf_size = SegmentedVec::<T>::shelf_size(self.shelf_index);
1602 let ptr = self.vec.dynamic_segments[shelf_idx];
1603 let segment_len = shelf_size.min(self.vec.len - self.index);
1604 self.ptr = ptr;
1605 self.segment_end = unsafe { ptr.add(segment_len) };
1606 self.shelf_index += 1;
1607
1608 let result = self.ptr;
1609 self.ptr = unsafe { self.ptr.add(1) };
1610 self.index += 1;
1611 Some(unsafe { &mut *result })
1612 }
1613}
1614
1615impl<'a, T> ExactSizeIterator for IterMut<'a, T> {}
1616
1617impl<T> IntoIterator for SegmentedVec<T> {
1618 type Item = T;
1619 type IntoIter = IntoIter<T>;
1620
1621 fn into_iter(self) -> Self::IntoIter {
1622 IntoIter {
1623 vec: self,
1624 index: 0,
1625 }
1626 }
1627}
1628
1629impl<'a, T> IntoIterator for &'a SegmentedVec<T> {
1630 type Item = &'a T;
1631 type IntoIter = Iter<'a, T>;
1632
1633 fn into_iter(self) -> Self::IntoIter {
1634 self.iter()
1635 }
1636}
1637
1638impl<'a, T> IntoIterator for &'a mut SegmentedVec<T> {
1639 type Item = &'a mut T;
1640 type IntoIter = IterMut<'a, T>;
1641
1642 fn into_iter(self) -> Self::IntoIter {
1643 self.iter_mut()
1644 }
1645}
1646
1647pub struct IntoIter<T> {
1649 vec: SegmentedVec<T>,
1650 index: usize,
1651}
1652
1653impl<T> Iterator for IntoIter<T> {
1654 type Item = T;
1655
1656 fn next(&mut self) -> Option<Self::Item> {
1657 if self.index >= self.vec.len {
1658 return None;
1659 }
1660 let value = unsafe { self.vec.unchecked_read(self.index) };
1661 self.index += 1;
1662 Some(value)
1663 }
1664
1665 fn size_hint(&self) -> (usize, Option<usize>) {
1666 let remaining = self.vec.len - self.index;
1667 (remaining, Some(remaining))
1668 }
1669}
1670
1671impl<T> ExactSizeIterator for IntoIter<T> {}
1672
1673impl<T> Drop for IntoIter<T> {
1674 fn drop(&mut self) {
1675 for i in self.index..self.vec.len {
1677 unsafe {
1678 std::ptr::drop_in_place(self.vec.unchecked_at_mut(i));
1679 }
1680 }
1681 self.vec.len = 0;
1683 }
1684}
1685
1686pub struct Drain<'a, T> {
1690 vec: &'a mut SegmentedVec<T>,
1691 range_start: usize,
1692 range_end: usize,
1693 index: usize,
1694}
1695
1696impl<'a, T> Iterator for Drain<'a, T> {
1697 type Item = T;
1698
1699 fn next(&mut self) -> Option<Self::Item> {
1700 if self.index >= self.range_end {
1701 None
1702 } else {
1703 let value = unsafe { std::ptr::read(self.vec.unchecked_at(self.index)) };
1704 self.index += 1;
1705 Some(value)
1706 }
1707 }
1708
1709 fn size_hint(&self) -> (usize, Option<usize>) {
1710 let remaining = self.range_end - self.index;
1711 (remaining, Some(remaining))
1712 }
1713}
1714
1715impl<'a, T> DoubleEndedIterator for Drain<'a, T> {
1716 fn next_back(&mut self) -> Option<Self::Item> {
1717 if self.index >= self.range_end {
1718 None
1719 } else {
1720 self.range_end -= 1;
1721 Some(unsafe { std::ptr::read(self.vec.unchecked_at(self.range_end)) })
1722 }
1723 }
1724}
1725
1726impl<'a, T> ExactSizeIterator for Drain<'a, T> {}
1727
1728impl<'a, T> Drop for Drain<'a, T> {
1729 fn drop(&mut self) {
1730 for i in self.index..self.range_end {
1732 unsafe {
1733 std::ptr::drop_in_place(self.vec.unchecked_at_mut(i));
1734 }
1735 }
1736
1737 let original_range_end = self.range_end;
1739 let original_len = self.vec.len;
1740 let drain_count = original_range_end - self.range_start;
1741
1742 for i in 0..(original_len - original_range_end) {
1743 unsafe {
1744 let src = self.vec.unchecked_at(original_range_end + i) as *const T;
1745 let dst = self.vec.unchecked_at_mut(self.range_start + i) as *mut T;
1746 std::ptr::copy_nonoverlapping(src, dst, 1);
1747 }
1748 }
1749
1750 self.vec.len = original_len - drain_count;
1751 }
1752}
1753
1754#[cfg(test)]
1755mod tests {
1756 use super::*;
1757
1758 #[test]
1759 fn test_new_empty() {
1760 let vec: SegmentedVec<i32> = SegmentedVec::new();
1761 assert!(vec.is_empty());
1762 assert_eq!(vec.len(), 0);
1763 }
1764
1765 #[test]
1766 fn test_push_pop() {
1767 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
1768 vec.push(1);
1769 vec.push(2);
1770 vec.push(3);
1771 assert_eq!(vec.len(), 3);
1772 assert_eq!(vec.pop(), Some(3));
1773 assert_eq!(vec.pop(), Some(2));
1774 assert_eq!(vec.pop(), Some(1));
1775 assert_eq!(vec.pop(), None);
1776 }
1777
1778 #[test]
1779 fn test_get() {
1780 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
1781 vec.push(10);
1782 vec.push(20);
1783 vec.push(30);
1784 assert_eq!(vec.get(0), Some(&10));
1785 assert_eq!(vec.get(1), Some(&20));
1786 assert_eq!(vec.get(2), Some(&30));
1787 assert_eq!(vec.get(3), None);
1788 }
1789
1790 #[test]
1791 fn test_index() {
1792 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
1793 vec.push(10);
1794 vec.push(20);
1795 assert_eq!(vec[0], 10);
1796 assert_eq!(vec[1], 20);
1797 vec[0] = 100;
1798 assert_eq!(vec[0], 100);
1799 }
1800
1801 #[test]
1802 fn test_stable_pointers() {
1803 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
1804 vec.push(1);
1805 let ptr = &vec[0] as *const i32;
1806
1807 for i in 2..1000 {
1809 vec.push(i);
1810 }
1811
1812 assert_eq!(unsafe { *ptr }, 1);
1814 }
1815
1816 #[test]
1817 fn test_iter() {
1818 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
1819 for i in 0..100 {
1820 vec.push(i);
1821 }
1822
1823 let collected: Vec<i32> = vec.iter().copied().collect();
1824 let expected: Vec<i32> = (0..100).collect();
1825 assert_eq!(collected, expected);
1826 }
1827
1828 #[test]
1829 fn test_iter_mut() {
1830 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
1831 for i in 0..10 {
1832 vec.push(i);
1833 }
1834
1835 for item in vec.iter_mut() {
1836 *item *= 2;
1837 }
1838
1839 let collected: Vec<i32> = vec.iter().copied().collect();
1840 let expected: Vec<i32> = (0..10).map(|x| x * 2).collect();
1841 assert_eq!(collected, expected);
1842 }
1843
1844 #[test]
1845 fn test_into_iter() {
1846 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
1847 for i in 0..10 {
1848 vec.push(i);
1849 }
1850
1851 let collected: Vec<i32> = vec.into_iter().collect();
1852 let expected: Vec<i32> = (0..10).collect();
1853 assert_eq!(collected, expected);
1854 }
1855
1856 #[test]
1857 fn test_from_iter() {
1858 let vec: SegmentedVec<i32> = (0..10).collect();
1859 assert_eq!(vec.len(), 10);
1860 for i in 0..10 {
1861 assert_eq!(vec[i], i as i32);
1862 }
1863 }
1864
1865 #[test]
1866 fn test_extend() {
1867 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
1868 vec.extend(0..5);
1869 vec.extend(5..10);
1870 assert_eq!(vec.len(), 10);
1871 for i in 0..10 {
1872 assert_eq!(vec[i], i as i32);
1873 }
1874 }
1875
1876 #[test]
1877 fn test_clear() {
1878 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
1879 vec.extend(0..10);
1880 vec.clear();
1881 assert!(vec.is_empty());
1882 assert_eq!(vec.len(), 0);
1883 }
1884
1885 #[test]
1886 fn test_truncate() {
1887 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
1888 vec.extend(0..10);
1889 vec.truncate(5);
1890 assert_eq!(vec.len(), 5);
1891 for i in 0..5 {
1892 assert_eq!(vec[i], i as i32);
1893 }
1894 }
1895
1896 #[test]
1897 fn test_clone() {
1898 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
1899 vec.extend(0..10);
1900 let vec2 = vec.clone();
1901 assert_eq!(vec, vec2);
1902 }
1903
1904 #[test]
1905 fn test_debug() {
1906 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
1907 vec.extend(0..3);
1908 let debug_str = format!("{:?}", vec);
1909 assert_eq!(debug_str, "[0, 1, 2]");
1910 }
1911
1912 #[test]
1913 fn test_first_last() {
1914 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
1915 assert_eq!(vec.first(), None);
1916 assert_eq!(vec.last(), None);
1917
1918 vec.push(1);
1919 assert_eq!(vec.first(), Some(&1));
1920 assert_eq!(vec.last(), Some(&1));
1921
1922 vec.push(2);
1923 vec.push(3);
1924 assert_eq!(vec.first(), Some(&1));
1925 assert_eq!(vec.last(), Some(&3));
1926 }
1927
1928 #[test]
1929 fn test_drop_elements() {
1930 use std::cell::Cell;
1931 use std::rc::Rc;
1932
1933 let drop_count = Rc::new(Cell::new(0));
1934
1935 struct DropCounter {
1936 counter: Rc<Cell<i32>>,
1937 }
1938
1939 impl Drop for DropCounter {
1940 fn drop(&mut self) {
1941 self.counter.set(self.counter.get() + 1);
1942 }
1943 }
1944
1945 {
1946 let mut vec: SegmentedVec<DropCounter> = SegmentedVec::new();
1947 for _ in 0..10 {
1948 vec.push(DropCounter {
1949 counter: Rc::clone(&drop_count),
1950 });
1951 }
1952 }
1953
1954 assert_eq!(drop_count.get(), 10);
1955 }
1956
1957 #[test]
1958 fn test_shrink_to_fit() {
1959 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
1960 vec.extend(0..100);
1961 vec.truncate(5);
1962 vec.shrink_to_fit();
1963 assert_eq!(vec.len(), 5);
1964 for i in 0..5 {
1965 assert_eq!(vec[i], i as i32);
1966 }
1967 }
1968
1969 #[test]
1970 fn test_sort() {
1971 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
1972 vec.extend([5, 2, 8, 1, 9, 3, 7, 4, 6, 0]);
1973 vec.sort();
1974 let result: Vec<i32> = vec.iter().copied().collect();
1975 assert_eq!(result, vec![0, 1, 2, 3, 4, 5, 6, 7, 8, 9]);
1976 }
1977
1978 #[test]
1979 fn test_sort_empty() {
1980 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
1981 vec.sort();
1982 assert!(vec.is_empty());
1983 }
1984
1985 #[test]
1986 fn test_sort_single() {
1987 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
1988 vec.push(42);
1989 vec.sort();
1990 assert_eq!(vec[0], 42);
1991 }
1992
1993 #[test]
1994 fn test_sort_already_sorted() {
1995 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
1996 vec.extend(0..10);
1997 vec.sort();
1998 let result: Vec<i32> = vec.iter().copied().collect();
1999 assert_eq!(result, (0..10).collect::<Vec<_>>());
2000 }
2001
2002 #[test]
2003 fn test_sort_reverse_sorted() {
2004 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2005 vec.extend((0..10).rev());
2006 vec.sort();
2007 let result: Vec<i32> = vec.iter().copied().collect();
2008 assert_eq!(result, (0..10).collect::<Vec<_>>());
2009 }
2010
2011 #[test]
2012 fn test_sort_by() {
2013 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2014 vec.extend([5, 2, 8, 1, 9, 3, 7, 4, 6, 0]);
2015 vec.sort_by(|a, b| b.cmp(a)); let result: Vec<i32> = vec.iter().copied().collect();
2017 assert_eq!(result, vec![9, 8, 7, 6, 5, 4, 3, 2, 1, 0]);
2018 }
2019
2020 #[test]
2021 fn test_sort_by_key() {
2022 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2023 vec.extend([-5, 2, -8, 1, -9, 3, -7, 4, -6, 0]);
2024 vec.sort_by_key(|k| k.abs());
2025 let result: Vec<i32> = vec.iter().copied().collect();
2026 assert_eq!(result, vec![0, 1, 2, 3, 4, -5, -6, -7, -8, -9]);
2027 }
2028
2029 #[test]
2030 fn test_sort_stable() {
2031 #[derive(Debug, Clone, Eq, PartialEq)]
2033 struct Item {
2034 key: i32,
2035 order: usize,
2036 }
2037
2038 let mut vec: SegmentedVec<Item> = SegmentedVec::new();
2039 vec.push(Item { key: 1, order: 0 });
2040 vec.push(Item { key: 2, order: 1 });
2041 vec.push(Item { key: 1, order: 2 });
2042 vec.push(Item { key: 2, order: 3 });
2043 vec.push(Item { key: 1, order: 4 });
2044
2045 vec.sort_by_key(|item| item.key);
2046
2047 assert_eq!(vec[0], Item { key: 1, order: 0 });
2049 assert_eq!(vec[1], Item { key: 1, order: 2 });
2050 assert_eq!(vec[2], Item { key: 1, order: 4 });
2051 assert_eq!(vec[3], Item { key: 2, order: 1 });
2053 assert_eq!(vec[4], Item { key: 2, order: 3 });
2054 }
2055
2056 #[test]
2057 fn test_sort_unstable() {
2058 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2059 vec.extend([5, 2, 8, 1, 9, 3, 7, 4, 6, 0]);
2060 vec.sort_unstable();
2061 let result: Vec<i32> = vec.iter().copied().collect();
2062 assert_eq!(result, vec![0, 1, 2, 3, 4, 5, 6, 7, 8, 9]);
2063 }
2064
2065 #[test]
2066 fn test_sort_unstable_by() {
2067 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2068 vec.extend([5, 2, 8, 1, 9, 3, 7, 4, 6, 0]);
2069 vec.sort_unstable_by(|a, b| b.cmp(a)); let result: Vec<i32> = vec.iter().copied().collect();
2071 assert_eq!(result, vec![9, 8, 7, 6, 5, 4, 3, 2, 1, 0]);
2072 }
2073
2074 #[test]
2075 fn test_sort_unstable_by_key() {
2076 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2077 vec.extend([-5, 2, -8, 1, -9, 3, -7, 4, -6, 0]);
2078 vec.sort_unstable_by_key(|k| k.abs());
2079 let result: Vec<i32> = vec.iter().copied().collect();
2081 for i in 1..result.len() {
2082 assert!(result[i - 1].abs() <= result[i].abs());
2083 }
2084 }
2085
2086 #[test]
2087 fn test_sort_large() {
2088 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2089 vec.extend((0..1000).rev());
2091 vec.sort();
2092 for i in 0..1000 {
2093 assert_eq!(vec[i], i as i32);
2094 }
2095 }
2096
2097 #[test]
2098 fn test_sort_unstable_large() {
2099 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2100 vec.extend((0..1000).rev());
2102 vec.sort_unstable();
2103 for i in 0..1000 {
2104 assert_eq!(vec[i], i as i32);
2105 }
2106 }
2107
2108 #[test]
2109 fn test_sort_pointers_remain_stable_after_sort() {
2110 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2113 vec.extend([5, 2, 8, 1, 9]);
2114
2115 let ptr = &vec[0] as *const i32;
2117
2118 vec.sort();
2119
2120 assert_eq!(unsafe { *ptr }, 1); }
2123
2124 #[test]
2127 fn test_as_slice() {
2128 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2129 vec.extend(0..10);
2130
2131 let slice = vec.as_slice();
2132 assert_eq!(slice.len(), 10);
2133 assert_eq!(slice[0], 0);
2134 assert_eq!(slice[9], 9);
2135 }
2136
2137 #[test]
2138 fn test_as_mut_slice() {
2139 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2140 vec.extend(0..10);
2141
2142 {
2143 let mut slice = vec.as_mut_slice();
2144 slice[0] = 100;
2145 slice[9] = 200;
2146 }
2147
2148 assert_eq!(vec[0], 100);
2149 assert_eq!(vec[9], 200);
2150 }
2151
2152 #[test]
2153 fn test_slice_range() {
2154 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2155 vec.extend(0..10);
2156
2157 let slice = vec.slice(2..5);
2158 assert_eq!(slice.len(), 3);
2159 assert_eq!(slice[0], 2);
2160 assert_eq!(slice[1], 3);
2161 assert_eq!(slice[2], 4);
2162 }
2163
2164 #[test]
2165 fn test_slice_mut_range() {
2166 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2167 vec.extend(0..10);
2168
2169 {
2170 let mut slice = vec.slice_mut(2..5);
2171 slice[0] = 100;
2172 slice[2] = 200;
2173 }
2174
2175 assert_eq!(vec[2], 100);
2176 assert_eq!(vec[4], 200);
2177 }
2178
2179 #[test]
2180 fn test_slice_first_last() {
2181 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2182 vec.extend(0..10);
2183
2184 let slice = vec.as_slice();
2185 assert_eq!(slice.first(), Some(&0));
2186 assert_eq!(slice.last(), Some(&9));
2187 }
2188
2189 #[test]
2190 fn test_slice_iter() {
2191 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2192 vec.extend(0..10);
2193
2194 let slice = vec.as_slice();
2195 let collected: Vec<i32> = slice.iter().copied().collect();
2196 assert_eq!(collected, (0..10).collect::<Vec<_>>());
2197 }
2198
2199 #[test]
2200 fn test_slice_iter_rev() {
2201 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2202 vec.extend(0..10);
2203
2204 let slice = vec.as_slice();
2205 let collected: Vec<i32> = slice.iter().rev().copied().collect();
2206 assert_eq!(collected, (0..10).rev().collect::<Vec<_>>());
2207 }
2208
2209 #[test]
2210 fn test_slice_contains() {
2211 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2212 vec.extend(0..10);
2213
2214 let slice = vec.as_slice();
2215 assert!(slice.contains(&5));
2216 assert!(!slice.contains(&100));
2217 }
2218
2219 #[test]
2220 fn test_slice_binary_search() {
2221 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2222 vec.extend(0..100);
2223
2224 let slice = vec.as_slice();
2225 assert_eq!(slice.binary_search(&50), Ok(50));
2226 assert_eq!(slice.binary_search(&0), Ok(0));
2227 assert_eq!(slice.binary_search(&99), Ok(99));
2228 assert_eq!(slice.binary_search(&100), Err(100));
2229 }
2230
2231 #[test]
2232 fn test_slice_split_at() {
2233 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2234 vec.extend(0..10);
2235
2236 let slice = vec.as_slice();
2237 let (left, right) = slice.split_at(5);
2238 assert_eq!(left.len(), 5);
2239 assert_eq!(right.len(), 5);
2240 assert_eq!(left[0], 0);
2241 assert_eq!(right[0], 5);
2242 }
2243
2244 #[test]
2245 fn test_slice_to_vec() {
2246 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2247 vec.extend(0..10);
2248
2249 let slice = vec.as_slice();
2250 let converted = slice.to_vec();
2251 assert_eq!(converted, (0..10).collect::<Vec<_>>());
2252 }
2253
2254 #[test]
2255 fn test_slice_mut_sort() {
2256 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2257 vec.extend([5, 3, 1, 4, 2, 8, 7, 6, 0, 9]);
2258
2259 {
2261 let mut slice = vec.slice_mut(2..8);
2262 slice.sort();
2263 }
2264
2265 let result: Vec<i32> = vec.iter().copied().collect();
2266 assert_eq!(result, vec![5, 3, 1, 2, 4, 6, 7, 8, 0, 9]);
2267 }
2268
2269 #[test]
2270 fn test_slice_mut_reverse() {
2271 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2272 vec.extend(0..10);
2273
2274 {
2275 let mut slice = vec.slice_mut(2..8);
2276 slice.reverse();
2277 }
2278
2279 let result: Vec<i32> = vec.iter().copied().collect();
2280 assert_eq!(result, vec![0, 1, 7, 6, 5, 4, 3, 2, 8, 9]);
2281 }
2282
2283 #[test]
2284 fn test_slice_mut_fill() {
2285 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2286 vec.extend(0..10);
2287
2288 {
2289 let mut slice = vec.slice_mut(2..5);
2290 slice.fill(99);
2291 }
2292
2293 let result: Vec<i32> = vec.iter().copied().collect();
2294 assert_eq!(result, vec![0, 1, 99, 99, 99, 5, 6, 7, 8, 9]);
2295 }
2296
2297 #[test]
2298 fn test_slice_starts_with() {
2299 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2300 vec.extend(0..10);
2301
2302 let slice = vec.as_slice();
2303 assert!(slice.starts_with(&[0, 1, 2]));
2304 assert!(!slice.starts_with(&[1, 2, 3]));
2305 }
2306
2307 #[test]
2308 fn test_slice_ends_with() {
2309 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2310 vec.extend(0..10);
2311
2312 let slice = vec.as_slice();
2313 assert!(slice.ends_with(&[7, 8, 9]));
2314 assert!(!slice.ends_with(&[6, 7, 8]));
2315 }
2316
2317 #[test]
2318 fn test_slice_eq() {
2319 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2320 vec.extend(0..10);
2321
2322 let slice = vec.as_slice();
2323 assert_eq!(slice, (0..10).collect::<Vec<_>>());
2324 }
2325
2326 #[test]
2327 fn test_min_non_zero_cap() {
2328 let mut vec_u8: SegmentedVec<u8> = SegmentedVec::new();
2330 vec_u8.push(1);
2331 assert_eq!(vec_u8.capacity(), 8);
2332
2333 let mut vec_i32: SegmentedVec<i32> = SegmentedVec::new();
2335 vec_i32.push(1);
2336 assert_eq!(vec_i32.capacity(), 4);
2337
2338 for i in 0u8..100 {
2340 vec_u8.push(i);
2341 }
2342 for i in 0..101 {
2343 assert_eq!(vec_u8[i], if i == 0 { 1 } else { (i - 1) as u8 });
2344 }
2345 }
2346
2347 #[test]
2348 fn test_extend_from_copy_slice() {
2349 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2350 let data: Vec<i32> = (0..100).collect();
2351 vec.extend_from_copy_slice(&data);
2352 assert_eq!(vec.len(), 100);
2353 for i in 0..100 {
2354 assert_eq!(vec[i], i as i32);
2355 }
2356
2357 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2358 vec.push(999);
2359 vec.push(998);
2360 vec.extend_from_copy_slice(&data[..10]);
2361 assert_eq!(vec.len(), 12);
2362 assert_eq!(vec[0], 999);
2363 assert_eq!(vec[1], 998);
2364 for i in 0..10 {
2365 assert_eq!(vec[i + 2], i as i32);
2366 }
2367
2368 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2369 vec.extend_from_copy_slice(&[]);
2370 assert!(vec.is_empty());
2371
2372 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2373 vec.extend_from_copy_slice(&[1, 2, 3]); assert_eq!(vec.len(), 3);
2375 vec.push(4); vec.push(5);
2377 vec.push(6);
2378 assert_eq!(vec.len(), 6);
2379 for i in 0..6 {
2380 assert_eq!(vec[i], (i + 1) as i32);
2381 }
2382 }
2383}