1mod slice;
28mod sort;
29
30pub use slice::{
31 Chunks, ChunksExact, RChunks, SegmentedSlice, SegmentedSliceMut, SliceIter, SliceIterMut,
32 Windows,
33};
34
35use std::alloc::{self, Layout};
36use std::cmp::Ordering;
37use std::marker::PhantomData;
38use std::ops::{Index, IndexMut};
39
40const MAX_SEGMENTS: usize = 64;
43
44pub struct SegmentedVec<T> {
46 dynamic_segments: [*mut T; MAX_SEGMENTS],
47 segment_count: usize,
48 len: usize,
49 write_ptr: *mut T,
51 segment_end: *mut T,
53 segment_base: *mut T,
55 _marker: PhantomData<T>,
56}
57
58impl<T> SegmentedVec<T> {
59 const MIN_NON_ZERO_CAP: usize = {
65 let size = std::mem::size_of::<T>();
66 if size == 1 {
67 8
68 } else if size <= 1024 {
69 4
70 } else {
71 1
72 }
73 };
74
75 const MIN_CAP_EXP: u32 = Self::MIN_NON_ZERO_CAP.trailing_zeros();
76
77 #[inline]
87 pub const fn new() -> Self {
88 Self {
89 dynamic_segments: [std::ptr::null_mut(); MAX_SEGMENTS],
90 segment_count: 0,
91 len: 0,
92 write_ptr: std::ptr::null_mut(),
93 segment_end: std::ptr::null_mut(),
94 segment_base: std::ptr::null_mut(),
95 _marker: PhantomData,
96 }
97 }
98
99 #[inline]
101 fn init_write_ptr(&mut self) {
102 if self.segment_count > 0 {
103 let (shelf, box_idx) = Self::location(self.len);
104 let shelf_size = Self::shelf_size(shelf as u32);
105 let base = unsafe { *self.dynamic_segments.get_unchecked(shelf) };
106 self.segment_base = base;
107 self.write_ptr = unsafe { base.add(box_idx) };
108 self.segment_end = unsafe { base.add(shelf_size) };
109 } else {
110 self.segment_base = std::ptr::null_mut();
111 self.write_ptr = std::ptr::null_mut();
112 self.segment_end = std::ptr::null_mut();
113 }
114 }
115
116 #[inline]
118 pub const fn len(&self) -> usize {
119 self.len
120 }
121
122 #[inline]
124 pub const fn is_empty(&self) -> bool {
125 self.len == 0
126 }
127
128 #[inline]
130 pub fn capacity(&self) -> usize {
131 Self::compute_capacity(self.segment_count as u32)
132 }
133
134 #[inline]
150 pub fn push(&mut self, value: T) {
151 if self.write_ptr < self.segment_end {
153 unsafe {
154 std::ptr::write(self.write_ptr, value);
155 self.write_ptr = self.write_ptr.add(1);
156 }
157 self.len += 1;
158 return;
159 }
160
161 self.push_slow(value);
163 }
164
165 #[cold]
166 #[inline(never)]
167 fn push_slow(&mut self, value: T) {
168 let new_len = self.len + 1;
169
170 let biased = self.len + Self::MIN_NON_ZERO_CAP;
173 let shelf = (biased.trailing_zeros() - Self::MIN_CAP_EXP) as usize;
174 let shelf_size = Self::shelf_size(shelf as u32);
175
176 let base = if shelf >= self.segment_count {
177 self.grow_once();
178 unsafe { *self.dynamic_segments.get_unchecked(self.segment_count - 1) }
179 } else {
180 unsafe { *self.dynamic_segments.get_unchecked(shelf) }
181 };
182
183 unsafe {
184 std::ptr::write(base, value);
185 self.segment_base = base;
186 self.write_ptr = base.add(1);
187 self.segment_end = base.add(shelf_size);
188 }
189 self.len = new_len;
190 }
191
192 #[inline]
206 pub fn pop(&mut self) -> Option<T> {
207 if self.write_ptr > self.segment_base {
209 unsafe {
210 let new_len = self.len - 1;
211 self.len = new_len;
212 self.write_ptr = self.write_ptr.sub(1);
214 Some(std::ptr::read(self.write_ptr))
216 }
217 } else {
218 self.pop_slow_path()
220 }
221 }
222
223 #[cold]
224 #[inline(never)]
225 fn pop_slow_path(&mut self) -> Option<T> {
226 if self.len == 0 {
227 return None;
228 }
229
230 unsafe {
231 let val = std::ptr::read(self.write_ptr);
234
235 let new_len = self.len - 1;
236 self.len = new_len;
237 self.init_write_ptr();
238
239 Some(val)
240 }
241 }
242
243 #[inline]
247 pub fn get(&self, index: usize) -> Option<&T> {
248 if index < self.len {
249 Some(unsafe { self.unchecked_at(index) })
250 } else {
251 None
252 }
253 }
254
255 #[inline]
259 pub fn get_mut(&mut self, index: usize) -> Option<&mut T> {
260 if index < self.len {
261 Some(unsafe { self.unchecked_at_mut(index) })
262 } else {
263 None
264 }
265 }
266
267 #[inline]
269 pub fn first(&self) -> Option<&T> {
270 self.get(0)
271 }
272
273 #[inline]
275 pub fn first_mut(&mut self) -> Option<&mut T> {
276 self.get_mut(0)
277 }
278
279 #[inline]
281 pub fn last(&self) -> Option<&T> {
282 if self.len == 0 {
283 None
284 } else {
285 self.get(self.len - 1)
286 }
287 }
288
289 #[inline]
291 pub fn last_mut(&mut self) -> Option<&mut T> {
292 if self.len == 0 {
293 None
294 } else {
295 self.get_mut(self.len - 1)
296 }
297 }
298
299 #[inline]
301 pub fn contains(&self, x: &T) -> bool
302 where
303 T: PartialEq,
304 {
305 self.as_slice().contains(x)
306 }
307
308 pub fn starts_with(&self, needle: &[T]) -> bool
310 where
311 T: PartialEq,
312 {
313 self.as_slice().starts_with(needle)
314 }
315
316 pub fn ends_with(&self, needle: &[T]) -> bool
318 where
319 T: PartialEq,
320 {
321 self.as_slice().ends_with(needle)
322 }
323
324 pub fn binary_search(&self, x: &T) -> Result<usize, usize>
329 where
330 T: Ord,
331 {
332 self.as_slice().binary_search(x)
333 }
334
335 pub fn binary_search_by<F>(&self, f: F) -> Result<usize, usize>
337 where
338 F: FnMut(&T) -> Ordering,
339 {
340 self.as_slice().binary_search_by(f)
341 }
342
343 pub fn binary_search_by_key<B, F>(&self, b: &B, f: F) -> Result<usize, usize>
345 where
346 F: FnMut(&T) -> B,
347 B: Ord,
348 {
349 self.as_slice().binary_search_by_key(b, f)
350 }
351
352 #[inline]
358 pub fn swap(&mut self, a: usize, b: usize) {
359 self.as_mut_slice().swap(a, b)
360 }
361
362 pub fn reverse(&mut self) {
364 self.as_mut_slice().reverse()
365 }
366
367 pub fn fill(&mut self, value: T)
369 where
370 T: Clone,
371 {
372 self.as_mut_slice().fill(value)
373 }
374
375 pub fn fill_with<F>(&mut self, f: F)
377 where
378 F: FnMut() -> T,
379 {
380 self.as_mut_slice().fill_with(f)
381 }
382
383 pub fn clear(&mut self) {
387 if self.len == 0 {
388 return;
389 }
390
391 if std::mem::needs_drop::<T>() {
392 let mut remaining = self.len;
394 for shelf in 0..self.segment_count {
395 let size = Self::shelf_size(shelf as u32);
396 let count = remaining.min(size);
397 let ptr = unsafe { *self.dynamic_segments.get_unchecked(shelf) };
398 unsafe { std::ptr::drop_in_place(std::ptr::slice_from_raw_parts_mut(ptr, count)) };
399 remaining -= count;
400 if remaining == 0 {
401 break;
402 }
403 }
404 }
405
406 self.len = 0;
407 self.write_ptr = std::ptr::null_mut();
409 self.segment_end = std::ptr::null_mut();
410 }
411
412 pub fn truncate(&mut self, new_len: usize) {
416 if new_len >= self.len {
417 return;
418 }
419
420 if std::mem::needs_drop::<T>() {
421 let dynamic_new_len = new_len;
422 let dynamic_old_len = self.len;
423
424 if dynamic_new_len < dynamic_old_len {
425 let mut pos = 0usize;
426 for shelf in 0..self.segment_count {
427 let size = Self::shelf_size(shelf as u32);
428 let seg_end = pos + size;
429
430 let drop_start = dynamic_new_len.max(pos);
432 let drop_end = dynamic_old_len.min(seg_end);
433
434 if drop_start < drop_end {
435 let offset = drop_start - pos;
436 let count = drop_end - drop_start;
437 let ptr =
438 unsafe { (*self.dynamic_segments.get_unchecked(shelf)).add(offset) };
439 unsafe {
440 std::ptr::drop_in_place(std::ptr::slice_from_raw_parts_mut(ptr, count))
441 };
442 }
443
444 pos = seg_end;
445 if pos >= dynamic_old_len {
446 break;
447 }
448 }
449 }
450 }
451
452 self.len = new_len;
453 if new_len > 0 {
455 self.init_write_ptr();
456 } else {
457 self.write_ptr = std::ptr::null_mut();
458 self.segment_end = std::ptr::null_mut();
459 }
460 }
461
462 pub fn reserve(&mut self, additional: usize) {
464 let old_capacity = self.capacity();
465 self.grow_capacity(self.len + additional);
466 if old_capacity == 0 && self.capacity() > 0 && self.segment_end.is_null() {
468 self.init_write_ptr();
469 }
470 }
471
472 pub fn shrink_to_fit(&mut self) {
476 self.shrink_capacity(self.len);
477 }
478
479 #[inline]
481 pub fn iter(&self) -> Iter<'_, T> {
482 Iter {
484 vec: self,
485 ptr: std::ptr::null(),
486 segment_end: std::ptr::null(),
487 index: 0,
488 shelf_index: 0,
489 }
490 }
491
492 #[inline]
494 pub fn iter_mut(&mut self) -> IterMut<'_, T> {
495 IterMut {
497 vec: self,
498 ptr: std::ptr::null_mut(),
499 segment_end: std::ptr::null_mut(),
500 index: 0,
501 shelf_index: 0,
502 }
503 }
504
505 #[inline]
520 pub fn as_slice(&self) -> SegmentedSlice<'_, T> {
521 SegmentedSlice::new(self)
522 }
523
524 #[inline]
539 pub fn as_mut_slice(&mut self) -> SegmentedSliceMut<'_, T> {
540 SegmentedSliceMut::new(self)
541 }
542
543 #[inline]
562 pub fn slice(&self, range: std::ops::Range<usize>) -> SegmentedSlice<'_, T> {
563 assert!(range.start <= range.end && range.end <= self.len);
564 SegmentedSlice::from_range(self, range.start, range.end)
565 }
566
567 #[inline]
586 pub fn slice_mut(&mut self, range: std::ops::Range<usize>) -> SegmentedSliceMut<'_, T> {
587 assert!(range.start <= range.end && range.end <= self.len);
588 SegmentedSliceMut::from_range(self, range.start, range.end)
589 }
590
591 pub fn extend_from_slice(&mut self, other: &[T])
593 where
594 T: Clone,
595 {
596 if other.is_empty() {
597 return;
598 }
599 self.reserve(other.len());
600
601 let mut src = other;
602
603 while !src.is_empty() {
605 let (shelf, box_idx, remaining) = Self::location_with_capacity(self.len);
606 let to_copy = src.len().min(remaining);
607 let dest = unsafe { (*self.dynamic_segments.get_unchecked(shelf)).add(box_idx) };
608 for (i, item) in src.iter().take(to_copy).enumerate() {
609 unsafe { std::ptr::write(dest.add(i), item.clone()) };
610 }
611 self.len += to_copy;
612 src = &src[to_copy..];
613 }
614
615 if self.len < self.capacity() {
617 self.init_write_ptr();
618 } else {
619 self.write_ptr = std::ptr::null_mut();
620 self.segment_end = std::ptr::null_mut();
621 }
622 }
623
624 pub fn extend_from_copy_slice(&mut self, other: &[T])
629 where
630 T: Copy,
631 {
632 if other.is_empty() {
633 return;
634 }
635 self.reserve(other.len());
636
637 let mut src = other;
638
639 while !src.is_empty() {
641 let (shelf, box_idx, remaining) = Self::location_with_capacity(self.len);
642 let to_copy = src.len().min(remaining);
643 unsafe {
644 let dest = (*self.dynamic_segments.get_unchecked(shelf)).add(box_idx);
645 std::ptr::copy_nonoverlapping(src.as_ptr(), dest, to_copy);
646 };
647 self.len += to_copy;
648 src = &src[to_copy..];
649 }
650
651 if self.len < self.capacity() {
653 self.init_write_ptr();
654 } else {
655 self.write_ptr = std::ptr::null_mut();
656 self.segment_end = std::ptr::null_mut();
657 }
658 }
659
660 #[inline]
677 pub fn sort(&mut self)
678 where
679 T: Ord,
680 {
681 self.as_mut_slice().sort();
682 }
683
684 pub fn sort_by<F>(&mut self, compare: F)
701 where
702 F: FnMut(&T, &T) -> Ordering,
703 {
704 self.as_mut_slice().sort_by(compare);
705 }
706
707 #[inline]
723 pub fn sort_by_key<K, F>(&mut self, f: F)
724 where
725 F: FnMut(&T) -> K,
726 K: Ord,
727 {
728 self.as_mut_slice().sort_by_key(f);
729 }
730
731 #[inline]
748 pub fn sort_unstable(&mut self)
749 where
750 T: Ord,
751 {
752 self.as_mut_slice().sort_unstable();
753 }
754
755 pub fn sort_unstable_by<F>(&mut self, compare: F)
772 where
773 F: FnMut(&T, &T) -> Ordering,
774 {
775 self.as_mut_slice().sort_unstable_by(compare);
776 }
777
778 #[inline]
798 pub fn sort_unstable_by_key<K, F>(&mut self, f: F)
799 where
800 F: FnMut(&T) -> K,
801 K: Ord,
802 {
803 self.as_mut_slice().sort_unstable_by_key(f);
804 }
805
806 pub fn is_sorted(&self) -> bool
808 where
809 T: PartialOrd,
810 {
811 self.as_slice().is_sorted()
812 }
813
814 pub fn is_sorted_by<F>(&self, compare: F) -> bool
816 where
817 F: FnMut(&T, &T) -> bool,
818 {
819 self.as_slice().is_sorted_by(compare)
820 }
821
822 pub fn is_sorted_by_key<K, F>(&self, f: F) -> bool
824 where
825 F: FnMut(&T) -> K,
826 K: PartialOrd,
827 {
828 self.as_slice().is_sorted_by_key(f)
829 }
830
831 pub fn partition_point<P>(&self, pred: P) -> usize
833 where
834 P: FnMut(&T) -> bool,
835 {
836 self.as_slice().partition_point(pred)
837 }
838
839 pub fn rotate_left(&mut self, mid: usize) {
841 self.as_mut_slice().rotate_left(mid);
842 }
843
844 pub fn rotate_right(&mut self, k: usize) {
846 self.as_mut_slice().rotate_right(k);
847 }
848
849 pub fn with_capacity(capacity: usize) -> Self {
851 let mut vec = Self::new();
852 vec.reserve(capacity);
853 vec
854 }
855
856 pub fn insert(&mut self, index: usize, element: T) {
862 assert!(index <= self.len);
863 self.push(element);
864 if index < self.len - 1 {
866 for i in (index..self.len - 1).rev() {
867 unsafe {
868 let ptr_a = self.unchecked_at_mut(i) as *mut T;
869 let ptr_b = self.unchecked_at_mut(i + 1) as *mut T;
870 std::ptr::swap(ptr_a, ptr_b);
871 }
872 }
873 }
874 }
875
876 pub fn remove(&mut self, index: usize) -> T {
882 assert!(index < self.len);
883 for i in index..self.len - 1 {
885 unsafe {
886 let ptr_a = self.unchecked_at_mut(i) as *mut T;
887 let ptr_b = self.unchecked_at_mut(i + 1) as *mut T;
888 std::ptr::swap(ptr_a, ptr_b);
889 }
890 }
891 self.pop().unwrap()
892 }
893
894 pub fn swap_remove(&mut self, index: usize) -> T {
903 assert!(index < self.len);
904 let last_index = self.len - 1;
905 if index != last_index {
906 unsafe {
907 let ptr_a = self.unchecked_at_mut(index) as *mut T;
908 let ptr_b = self.unchecked_at_mut(last_index) as *mut T;
909 std::ptr::swap(ptr_a, ptr_b);
910 }
911 }
912 self.pop().unwrap()
913 }
914
915 pub fn retain<F>(&mut self, mut f: F)
919 where
920 F: FnMut(&T) -> bool,
921 {
922 let mut i = 0;
923 while i < self.len {
924 if f(unsafe { self.unchecked_at(i) }) {
925 i += 1;
926 } else {
927 self.remove(i);
928 }
929 }
930 }
931
932 pub fn retain_mut<F>(&mut self, mut f: F)
934 where
935 F: FnMut(&mut T) -> bool,
936 {
937 let mut i = 0;
938 while i < self.len {
939 if f(unsafe { self.unchecked_at_mut(i) }) {
940 i += 1;
941 } else {
942 self.remove(i);
943 }
944 }
945 }
946
947 pub fn dedup(&mut self)
951 where
952 T: PartialEq,
953 {
954 self.dedup_by(|a, b| a == b);
955 }
956
957 pub fn dedup_by<F>(&mut self, mut same_bucket: F)
959 where
960 F: FnMut(&mut T, &mut T) -> bool,
961 {
962 if self.len <= 1 {
963 return;
964 }
965 let mut write = 1;
966 for read in 1..self.len {
967 let should_keep = unsafe {
968 let prev_ptr = self.unchecked_at_mut(write - 1) as *mut T;
969 let curr_ptr = self.unchecked_at_mut(read) as *mut T;
970 !same_bucket(&mut *prev_ptr, &mut *curr_ptr)
971 };
972 if should_keep {
973 if read != write {
974 unsafe {
975 let ptr_src = self.unchecked_at_mut(read) as *mut T;
976 let ptr_dst = self.unchecked_at_mut(write) as *mut T;
977 std::ptr::swap(ptr_dst, ptr_src);
978 }
979 }
980 write += 1;
981 } else {
982 unsafe {
984 std::ptr::drop_in_place(self.unchecked_at_mut(read));
985 }
986 }
987 }
988 self.len = write;
989 }
990
991 pub fn dedup_by_key<K, F>(&mut self, mut key: F)
993 where
994 F: FnMut(&mut T) -> K,
995 K: PartialEq,
996 {
997 self.dedup_by(|a, b| key(a) == key(b));
998 }
999
1000 pub fn resize(&mut self, new_len: usize, value: T)
1006 where
1007 T: Clone,
1008 {
1009 if new_len > self.len {
1010 self.reserve(new_len - self.len);
1011 while self.len < new_len {
1012 self.push(value.clone());
1013 }
1014 } else {
1015 self.truncate(new_len);
1016 }
1017 }
1018
1019 pub fn resize_with<F>(&mut self, new_len: usize, mut f: F)
1024 where
1025 F: FnMut() -> T,
1026 {
1027 if new_len > self.len {
1028 self.reserve(new_len - self.len);
1029 while self.len < new_len {
1030 self.push(f());
1031 }
1032 } else {
1033 self.truncate(new_len);
1034 }
1035 }
1036
1037 pub fn append(&mut self, other: &mut Self) {
1039 let other_len = other.len;
1040 self.reserve(other_len);
1041 let start = self.len;
1042 while let Some(item) = other.pop() {
1043 self.push(item);
1044 }
1045 let mut left = start;
1047 let mut right = self.len;
1048 while left < right {
1049 right -= 1;
1050 if left < right {
1051 unsafe {
1052 let ptr_a = self.unchecked_at_mut(left) as *mut T;
1053 let ptr_b = self.unchecked_at_mut(right) as *mut T;
1054 std::ptr::swap(ptr_a, ptr_b);
1055 }
1056 left += 1;
1057 }
1058 }
1059 }
1060
1061 pub fn split_off(&mut self, at: usize) -> Self {
1070 assert!(at <= self.len);
1071 let mut other = Self::new();
1072 other.reserve(self.len - at);
1073 for i in at..self.len {
1074 other.push(unsafe { self.unchecked_read(i) });
1075 }
1076 self.len = at;
1077 other
1078 }
1079
1080 pub fn chunks(&self, chunk_size: usize) -> Chunks<'_, T> {
1086 self.as_slice().chunks(chunk_size)
1087 }
1088
1089 pub fn windows(&self, size: usize) -> Windows<'_, T> {
1095 self.as_slice().windows(size)
1096 }
1097
1098 pub fn rchunks(&self, chunk_size: usize) -> RChunks<'_, T> {
1104 self.as_slice().rchunks(chunk_size)
1105 }
1106
1107 pub fn chunks_exact(&self, chunk_size: usize) -> ChunksExact<'_, T> {
1113 self.as_slice().chunks_exact(chunk_size)
1114 }
1115
1116 pub fn drain(&mut self, range: std::ops::Range<usize>) -> Drain<'_, T> {
1122 assert!(range.start <= range.end && range.end <= self.len);
1123 Drain {
1124 vec: self,
1125 range_start: range.start,
1126 range_end: range.end,
1127 index: range.start,
1128 }
1129 }
1130
1131 pub fn to_vec(&self) -> Vec<T>
1133 where
1134 T: Clone,
1135 {
1136 self.as_slice().to_vec()
1137 }
1138
1139 #[inline]
1143 fn shelf_count(box_count: usize) -> u32 {
1144 if box_count == 0 {
1145 return 0;
1146 }
1147 let val = box_count + Self::MIN_NON_ZERO_CAP;
1148 val.next_power_of_two().trailing_zeros() - Self::MIN_CAP_EXP
1149 }
1150
1151 #[inline]
1153 fn shelf_size(shelf_index: u32) -> usize {
1154 Self::MIN_NON_ZERO_CAP << shelf_index
1155 }
1156
1157 #[inline]
1160 fn location(list_index: usize) -> (usize, usize) {
1161 let biased = list_index + Self::MIN_NON_ZERO_CAP;
1162 let msb = biased.ilog2();
1163 let shelf = msb - Self::MIN_CAP_EXP;
1164 let box_idx = biased ^ (1usize << msb);
1166 (shelf as usize, box_idx)
1167 }
1168
1169 #[inline]
1172 fn location_with_capacity(list_index: usize) -> (usize, usize, usize) {
1173 let biased = list_index + Self::MIN_NON_ZERO_CAP;
1174 let msb = biased.ilog2();
1175 let shelf = msb - Self::MIN_CAP_EXP;
1176 let box_idx = biased ^ (1usize << msb);
1177 let segment_remaining = (2usize << msb) - biased;
1181 (shelf as usize, box_idx, segment_remaining)
1182 }
1183
1184 #[inline]
1190 pub(crate) unsafe fn unchecked_at(&self, index: usize) -> &T {
1191 unsafe {
1192 let (shelf, box_idx) = Self::location(index);
1193 &*(*self.dynamic_segments.get_unchecked(shelf)).add(box_idx)
1194 }
1195 }
1196
1197 #[inline]
1203 pub(crate) unsafe fn unchecked_at_mut(&mut self, index: usize) -> &mut T {
1204 unsafe {
1205 let (shelf, box_idx) = Self::location(index);
1206 &mut *(*self.dynamic_segments.get_unchecked(shelf)).add(box_idx)
1207 }
1208 }
1209
1210 #[inline]
1216 unsafe fn unchecked_read(&self, index: usize) -> T {
1217 unsafe {
1218 let (shelf, box_idx) = Self::location(index);
1219 std::ptr::read((*self.dynamic_segments.get_unchecked(shelf)).add(box_idx))
1220 }
1221 }
1222
1223 fn grow_once(&mut self) {
1224 assert!(
1225 self.segment_count < MAX_SEGMENTS,
1226 "Maximum segment count exceeded"
1227 );
1228
1229 let size = Self::shelf_size(self.segment_count as u32);
1230 let layout = Layout::array::<T>(size).expect("Layout overflow");
1231 let ptr = if layout.size() == 0 {
1232 std::ptr::dangling_mut::<T>()
1233 } else {
1234 let ptr = unsafe { alloc::alloc(layout) };
1235 if ptr.is_null() {
1236 panic!("Allocation failed");
1237 }
1238 ptr as *mut T
1239 };
1240 self.dynamic_segments[self.segment_count] = ptr;
1241 self.segment_count += 1;
1242 }
1243
1244 fn grow_capacity(&mut self, new_capacity: usize) {
1246 let new_shelf_count = Self::shelf_count(new_capacity) as usize;
1247 let old_shelf_count = self.segment_count;
1248
1249 if new_shelf_count > old_shelf_count {
1250 assert!(
1251 new_shelf_count <= MAX_SEGMENTS,
1252 "Maximum segment count exceeded"
1253 );
1254
1255 for i in old_shelf_count..new_shelf_count {
1256 let size = Self::shelf_size(i as u32);
1257 let layout = Layout::array::<T>(size).expect("Layout overflow");
1258 let ptr = if layout.size() == 0 {
1259 std::ptr::dangling_mut::<T>()
1260 } else {
1261 let ptr = unsafe { alloc::alloc(layout) };
1262 if ptr.is_null() {
1263 panic!("Allocation failed");
1264 }
1265 ptr as *mut T
1266 };
1267 self.dynamic_segments[i] = ptr;
1268 }
1269 self.segment_count = new_shelf_count;
1270 }
1271 }
1272
1273 #[inline]
1275 fn compute_capacity(shelf_count: u32) -> usize {
1276 (Self::MIN_NON_ZERO_CAP << shelf_count) - Self::MIN_NON_ZERO_CAP
1277 }
1278
1279 fn shrink_capacity(&mut self, new_capacity: usize) {
1281 let new_shelf_count = Self::shelf_count(new_capacity);
1282 let old_shelf_count = self.segment_count as u32;
1283
1284 if new_shelf_count < old_shelf_count {
1285 self.free_shelves(old_shelf_count, new_shelf_count);
1286 self.segment_count = new_shelf_count as usize;
1287 }
1288 }
1289
1290 fn free_shelves(&mut self, from_count: u32, to_count: u32) {
1292 for i in (to_count..from_count).rev() {
1293 let size = Self::shelf_size(i);
1294 let layout = Layout::array::<T>(size).expect("Layout overflow");
1295 if layout.size() > 0 {
1296 unsafe {
1297 alloc::dealloc(self.dynamic_segments[i as usize] as *mut u8, layout);
1298 }
1299 }
1300 self.dynamic_segments[i as usize] = std::ptr::null_mut();
1301 }
1302 }
1303}
1304
1305impl<T> Drop for SegmentedVec<T> {
1306 fn drop(&mut self) {
1307 self.clear();
1309 self.free_shelves(self.segment_count as u32, 0);
1311 }
1312}
1313
1314impl<T> sort::IndexedAccess<T> for SegmentedVec<T> {
1315 #[inline]
1316 fn get_ref(&self, index: usize) -> &T {
1317 debug_assert!(index < self.len);
1318 unsafe { self.unchecked_at(index) }
1319 }
1320
1321 #[inline]
1322 fn get_ptr(&self, index: usize) -> *const T {
1323 debug_assert!(index < self.len);
1324 let (shelf, box_idx) = Self::location(index);
1325 unsafe { (*self.dynamic_segments.get_unchecked(shelf)).add(box_idx) }
1326 }
1327
1328 #[inline]
1329 fn get_ptr_mut(&mut self, index: usize) -> *mut T {
1330 debug_assert!(index < self.len);
1331 let (shelf, box_idx) = Self::location(index);
1332 unsafe { (*self.dynamic_segments.get_unchecked(shelf)).add(box_idx) }
1333 }
1334
1335 #[inline]
1336 fn swap(&mut self, a: usize, b: usize) {
1337 if a == b {
1338 return;
1339 }
1340 debug_assert!(a < self.len && b < self.len);
1341 unsafe {
1342 let ptr_a = self.get_ptr_mut(a);
1343 let ptr_b = self.get_ptr_mut(b);
1344 std::ptr::swap(ptr_a, ptr_b);
1345 }
1346 }
1347}
1348
1349impl<T> Default for SegmentedVec<T> {
1350 fn default() -> Self {
1351 Self::new()
1352 }
1353}
1354
1355impl<T> Index<usize> for SegmentedVec<T> {
1356 type Output = T;
1357
1358 fn index(&self, index: usize) -> &Self::Output {
1359 self.get(index).expect("index out of bounds")
1360 }
1361}
1362
1363impl<T> IndexMut<usize> for SegmentedVec<T> {
1364 fn index_mut(&mut self, index: usize) -> &mut Self::Output {
1365 self.get_mut(index).expect("index out of bounds")
1366 }
1367}
1368
1369impl<T: Clone> Clone for SegmentedVec<T> {
1370 fn clone(&self) -> Self {
1371 if self.len == 0 {
1372 return Self::new();
1373 }
1374
1375 let mut new_vec = Self::new();
1376 new_vec.reserve(self.len);
1377
1378 let mut remaining = self.len;
1379 for shelf in 0..self.segment_count {
1380 let size = Self::shelf_size(shelf as u32);
1381 let count = remaining.min(size);
1382 let src = unsafe { *self.dynamic_segments.get_unchecked(shelf) };
1383 let dst = unsafe { *new_vec.dynamic_segments.get_unchecked(shelf) };
1384 for i in 0..count {
1385 unsafe { std::ptr::write(dst.add(i), (*src.add(i)).clone()) };
1386 }
1387 new_vec.len += count;
1388 remaining -= count;
1389 if remaining == 0 {
1390 break;
1391 }
1392 }
1393
1394 if new_vec.len < new_vec.capacity() {
1396 new_vec.init_write_ptr();
1397 } else {
1398 new_vec.write_ptr = std::ptr::null_mut();
1399 new_vec.segment_end = std::ptr::null_mut();
1400 }
1401
1402 new_vec
1403 }
1404}
1405
1406impl<T: std::fmt::Debug> std::fmt::Debug for SegmentedVec<T> {
1407 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
1408 f.debug_list().entries(self.iter()).finish()
1409 }
1410}
1411
1412impl<T: PartialEq> PartialEq for SegmentedVec<T> {
1413 fn eq(&self, other: &Self) -> bool {
1414 if self.len != other.len {
1415 return false;
1416 }
1417 for i in 0..self.len {
1418 if unsafe { self.unchecked_at(i) } != unsafe { other.unchecked_at(i) } {
1419 return false;
1420 }
1421 }
1422 true
1423 }
1424}
1425
1426impl<T: Eq> Eq for SegmentedVec<T> {}
1427
1428impl<T> FromIterator<T> for SegmentedVec<T> {
1429 fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> Self {
1430 let iter = iter.into_iter();
1431 let (lower, _) = iter.size_hint();
1432 let mut vec = Self::new();
1433 vec.reserve(lower);
1434 for item in iter {
1435 vec.push(item);
1436 }
1437 vec
1438 }
1439}
1440
1441impl<T> Extend<T> for SegmentedVec<T> {
1442 fn extend<I: IntoIterator<Item = T>>(&mut self, iter: I) {
1443 let iter = iter.into_iter();
1444 let (lower, _) = iter.size_hint();
1445 self.reserve(lower);
1446 for item in iter {
1447 self.push(item);
1448 }
1449 }
1450}
1451
1452pub struct Iter<'a, T> {
1456 vec: &'a SegmentedVec<T>,
1457 ptr: *const T,
1459 segment_end: *const T,
1461 index: usize,
1463 shelf_index: u32,
1465}
1466
1467impl<'a, T> Iterator for Iter<'a, T> {
1468 type Item = &'a T;
1469
1470 #[inline]
1471 fn next(&mut self) -> Option<Self::Item> {
1472 if self.ptr < self.segment_end {
1473 let result = unsafe { &*self.ptr };
1474 self.ptr = unsafe { self.ptr.add(1) };
1475 self.index += 1;
1476 return Some(result);
1477 }
1478 self.next_segment()
1479 }
1480
1481 #[inline]
1482 fn size_hint(&self) -> (usize, Option<usize>) {
1483 let remaining = self.vec.len.saturating_sub(self.index);
1484 (remaining, Some(remaining))
1485 }
1486}
1487
1488impl<'a, T> Iter<'a, T> {
1489 #[inline]
1490 fn next_segment(&mut self) -> Option<&'a T> {
1491 if self.index >= self.vec.len {
1492 return None;
1493 }
1494
1495 let shelf_idx = self.shelf_index as usize;
1496 let shelf_size = SegmentedVec::<T>::shelf_size(self.shelf_index);
1497 let ptr = self.vec.dynamic_segments[shelf_idx];
1498 let segment_len = shelf_size.min(self.vec.len - self.index);
1499 self.ptr = ptr;
1500 self.segment_end = unsafe { ptr.add(segment_len) };
1501 self.shelf_index += 1;
1502
1503 let result = unsafe { &*self.ptr };
1504 self.ptr = unsafe { self.ptr.add(1) };
1505 self.index += 1;
1506 Some(result)
1507 }
1508}
1509
1510impl<'a, T> ExactSizeIterator for Iter<'a, T> {}
1511
1512pub struct IterMut<'a, T> {
1514 vec: &'a mut SegmentedVec<T>,
1515 ptr: *mut T,
1517 segment_end: *mut T,
1519 index: usize,
1521 shelf_index: u32,
1523}
1524
1525impl<'a, T> Iterator for IterMut<'a, T> {
1526 type Item = &'a mut T;
1527
1528 #[inline]
1529 fn next(&mut self) -> Option<Self::Item> {
1530 if self.ptr < self.segment_end {
1531 let result = self.ptr;
1532 self.ptr = unsafe { self.ptr.add(1) };
1533 self.index += 1;
1534 return Some(unsafe { &mut *result });
1535 }
1536 self.next_segment()
1537 }
1538
1539 #[inline]
1540 fn size_hint(&self) -> (usize, Option<usize>) {
1541 let remaining = self.vec.len.saturating_sub(self.index);
1542 (remaining, Some(remaining))
1543 }
1544}
1545
1546impl<'a, T> IterMut<'a, T> {
1547 #[inline]
1548 fn next_segment(&mut self) -> Option<&'a mut T> {
1549 if self.index >= self.vec.len {
1550 return None;
1551 }
1552
1553 let shelf_idx = self.shelf_index as usize;
1554 let shelf_size = SegmentedVec::<T>::shelf_size(self.shelf_index);
1555 let ptr = self.vec.dynamic_segments[shelf_idx];
1556 let segment_len = shelf_size.min(self.vec.len - self.index);
1557 self.ptr = ptr;
1558 self.segment_end = unsafe { ptr.add(segment_len) };
1559 self.shelf_index += 1;
1560
1561 let result = self.ptr;
1562 self.ptr = unsafe { self.ptr.add(1) };
1563 self.index += 1;
1564 Some(unsafe { &mut *result })
1565 }
1566}
1567
1568impl<'a, T> ExactSizeIterator for IterMut<'a, T> {}
1569
1570impl<T> IntoIterator for SegmentedVec<T> {
1571 type Item = T;
1572 type IntoIter = IntoIter<T>;
1573
1574 fn into_iter(self) -> Self::IntoIter {
1575 IntoIter {
1576 vec: self,
1577 index: 0,
1578 }
1579 }
1580}
1581
1582impl<'a, T> IntoIterator for &'a SegmentedVec<T> {
1583 type Item = &'a T;
1584 type IntoIter = Iter<'a, T>;
1585
1586 fn into_iter(self) -> Self::IntoIter {
1587 self.iter()
1588 }
1589}
1590
1591impl<'a, T> IntoIterator for &'a mut SegmentedVec<T> {
1592 type Item = &'a mut T;
1593 type IntoIter = IterMut<'a, T>;
1594
1595 fn into_iter(self) -> Self::IntoIter {
1596 self.iter_mut()
1597 }
1598}
1599
1600pub struct IntoIter<T> {
1602 vec: SegmentedVec<T>,
1603 index: usize,
1604}
1605
1606impl<T> Iterator for IntoIter<T> {
1607 type Item = T;
1608
1609 fn next(&mut self) -> Option<Self::Item> {
1610 if self.index >= self.vec.len {
1611 return None;
1612 }
1613 let value = unsafe { self.vec.unchecked_read(self.index) };
1614 self.index += 1;
1615 Some(value)
1616 }
1617
1618 fn size_hint(&self) -> (usize, Option<usize>) {
1619 let remaining = self.vec.len - self.index;
1620 (remaining, Some(remaining))
1621 }
1622}
1623
1624impl<T> ExactSizeIterator for IntoIter<T> {}
1625
1626impl<T> Drop for IntoIter<T> {
1627 fn drop(&mut self) {
1628 for i in self.index..self.vec.len {
1630 unsafe {
1631 std::ptr::drop_in_place(self.vec.unchecked_at_mut(i));
1632 }
1633 }
1634 self.vec.len = 0;
1636 }
1637}
1638
1639pub struct Drain<'a, T> {
1643 vec: &'a mut SegmentedVec<T>,
1644 range_start: usize,
1645 range_end: usize,
1646 index: usize,
1647}
1648
1649impl<'a, T> Iterator for Drain<'a, T> {
1650 type Item = T;
1651
1652 fn next(&mut self) -> Option<Self::Item> {
1653 if self.index >= self.range_end {
1654 None
1655 } else {
1656 let value = unsafe { std::ptr::read(self.vec.unchecked_at(self.index)) };
1657 self.index += 1;
1658 Some(value)
1659 }
1660 }
1661
1662 fn size_hint(&self) -> (usize, Option<usize>) {
1663 let remaining = self.range_end - self.index;
1664 (remaining, Some(remaining))
1665 }
1666}
1667
1668impl<'a, T> DoubleEndedIterator for Drain<'a, T> {
1669 fn next_back(&mut self) -> Option<Self::Item> {
1670 if self.index >= self.range_end {
1671 None
1672 } else {
1673 self.range_end -= 1;
1674 Some(unsafe { std::ptr::read(self.vec.unchecked_at(self.range_end)) })
1675 }
1676 }
1677}
1678
1679impl<'a, T> ExactSizeIterator for Drain<'a, T> {}
1680
1681impl<'a, T> Drop for Drain<'a, T> {
1682 fn drop(&mut self) {
1683 for i in self.index..self.range_end {
1685 unsafe {
1686 std::ptr::drop_in_place(self.vec.unchecked_at_mut(i));
1687 }
1688 }
1689
1690 let original_range_end = self.range_end;
1692 let original_len = self.vec.len;
1693 let drain_count = original_range_end - self.range_start;
1694
1695 for i in 0..(original_len - original_range_end) {
1696 unsafe {
1697 let src = self.vec.unchecked_at(original_range_end + i) as *const T;
1698 let dst = self.vec.unchecked_at_mut(self.range_start + i) as *mut T;
1699 std::ptr::copy_nonoverlapping(src, dst, 1);
1700 }
1701 }
1702
1703 self.vec.len = original_len - drain_count;
1704 }
1705}
1706
1707#[cfg(test)]
1708mod tests {
1709 use super::*;
1710
1711 #[test]
1712 fn test_new_empty() {
1713 let vec: SegmentedVec<i32> = SegmentedVec::new();
1714 assert!(vec.is_empty());
1715 assert_eq!(vec.len(), 0);
1716 }
1717
1718 #[test]
1719 fn test_push_pop() {
1720 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
1721 vec.push(1);
1722 vec.push(2);
1723 vec.push(3);
1724 assert_eq!(vec.len(), 3);
1725 assert_eq!(vec.pop(), Some(3));
1726 assert_eq!(vec.pop(), Some(2));
1727 assert_eq!(vec.pop(), Some(1));
1728 assert_eq!(vec.pop(), None);
1729 }
1730
1731 #[test]
1732 fn test_get() {
1733 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
1734 vec.push(10);
1735 vec.push(20);
1736 vec.push(30);
1737 assert_eq!(vec.get(0), Some(&10));
1738 assert_eq!(vec.get(1), Some(&20));
1739 assert_eq!(vec.get(2), Some(&30));
1740 assert_eq!(vec.get(3), None);
1741 }
1742
1743 #[test]
1744 fn test_index() {
1745 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
1746 vec.push(10);
1747 vec.push(20);
1748 assert_eq!(vec[0], 10);
1749 assert_eq!(vec[1], 20);
1750 vec[0] = 100;
1751 assert_eq!(vec[0], 100);
1752 }
1753
1754 #[test]
1755 fn test_stable_pointers() {
1756 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
1757 vec.push(1);
1758 let ptr = &vec[0] as *const i32;
1759
1760 for i in 2..1000 {
1762 vec.push(i);
1763 }
1764
1765 assert_eq!(unsafe { *ptr }, 1);
1767 }
1768
1769 #[test]
1770 fn test_iter() {
1771 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
1772 for i in 0..100 {
1773 vec.push(i);
1774 }
1775
1776 let collected: Vec<i32> = vec.iter().copied().collect();
1777 let expected: Vec<i32> = (0..100).collect();
1778 assert_eq!(collected, expected);
1779 }
1780
1781 #[test]
1782 fn test_iter_mut() {
1783 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
1784 for i in 0..10 {
1785 vec.push(i);
1786 }
1787
1788 for item in vec.iter_mut() {
1789 *item *= 2;
1790 }
1791
1792 let collected: Vec<i32> = vec.iter().copied().collect();
1793 let expected: Vec<i32> = (0..10).map(|x| x * 2).collect();
1794 assert_eq!(collected, expected);
1795 }
1796
1797 #[test]
1798 fn test_into_iter() {
1799 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
1800 for i in 0..10 {
1801 vec.push(i);
1802 }
1803
1804 let collected: Vec<i32> = vec.into_iter().collect();
1805 let expected: Vec<i32> = (0..10).collect();
1806 assert_eq!(collected, expected);
1807 }
1808
1809 #[test]
1810 fn test_from_iter() {
1811 let vec: SegmentedVec<i32> = (0..10).collect();
1812 assert_eq!(vec.len(), 10);
1813 for i in 0..10 {
1814 assert_eq!(vec[i], i as i32);
1815 }
1816 }
1817
1818 #[test]
1819 fn test_extend() {
1820 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
1821 vec.extend(0..5);
1822 vec.extend(5..10);
1823 assert_eq!(vec.len(), 10);
1824 for i in 0..10 {
1825 assert_eq!(vec[i], i as i32);
1826 }
1827 }
1828
1829 #[test]
1830 fn test_clear() {
1831 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
1832 vec.extend(0..10);
1833 vec.clear();
1834 assert!(vec.is_empty());
1835 assert_eq!(vec.len(), 0);
1836 }
1837
1838 #[test]
1839 fn test_truncate() {
1840 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
1841 vec.extend(0..10);
1842 vec.truncate(5);
1843 assert_eq!(vec.len(), 5);
1844 for i in 0..5 {
1845 assert_eq!(vec[i], i as i32);
1846 }
1847 }
1848
1849 #[test]
1850 fn test_clone() {
1851 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
1852 vec.extend(0..10);
1853 let vec2 = vec.clone();
1854 assert_eq!(vec, vec2);
1855 }
1856
1857 #[test]
1858 fn test_debug() {
1859 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
1860 vec.extend(0..3);
1861 let debug_str = format!("{:?}", vec);
1862 assert_eq!(debug_str, "[0, 1, 2]");
1863 }
1864
1865 #[test]
1866 fn test_first_last() {
1867 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
1868 assert_eq!(vec.first(), None);
1869 assert_eq!(vec.last(), None);
1870
1871 vec.push(1);
1872 assert_eq!(vec.first(), Some(&1));
1873 assert_eq!(vec.last(), Some(&1));
1874
1875 vec.push(2);
1876 vec.push(3);
1877 assert_eq!(vec.first(), Some(&1));
1878 assert_eq!(vec.last(), Some(&3));
1879 }
1880
1881 #[test]
1882 fn test_drop_elements() {
1883 use std::cell::Cell;
1884 use std::rc::Rc;
1885
1886 let drop_count = Rc::new(Cell::new(0));
1887
1888 struct DropCounter {
1889 counter: Rc<Cell<i32>>,
1890 }
1891
1892 impl Drop for DropCounter {
1893 fn drop(&mut self) {
1894 self.counter.set(self.counter.get() + 1);
1895 }
1896 }
1897
1898 {
1899 let mut vec: SegmentedVec<DropCounter> = SegmentedVec::new();
1900 for _ in 0..10 {
1901 vec.push(DropCounter {
1902 counter: Rc::clone(&drop_count),
1903 });
1904 }
1905 }
1906
1907 assert_eq!(drop_count.get(), 10);
1908 }
1909
1910 #[test]
1911 fn test_shrink_to_fit() {
1912 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
1913 vec.extend(0..100);
1914 vec.truncate(5);
1915 vec.shrink_to_fit();
1916 assert_eq!(vec.len(), 5);
1917 for i in 0..5 {
1918 assert_eq!(vec[i], i as i32);
1919 }
1920 }
1921
1922 #[test]
1923 fn test_sort() {
1924 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
1925 vec.extend([5, 2, 8, 1, 9, 3, 7, 4, 6, 0]);
1926 vec.sort();
1927 let result: Vec<i32> = vec.iter().copied().collect();
1928 assert_eq!(result, vec![0, 1, 2, 3, 4, 5, 6, 7, 8, 9]);
1929 }
1930
1931 #[test]
1932 fn test_sort_empty() {
1933 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
1934 vec.sort();
1935 assert!(vec.is_empty());
1936 }
1937
1938 #[test]
1939 fn test_sort_single() {
1940 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
1941 vec.push(42);
1942 vec.sort();
1943 assert_eq!(vec[0], 42);
1944 }
1945
1946 #[test]
1947 fn test_sort_already_sorted() {
1948 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
1949 vec.extend(0..10);
1950 vec.sort();
1951 let result: Vec<i32> = vec.iter().copied().collect();
1952 assert_eq!(result, (0..10).collect::<Vec<_>>());
1953 }
1954
1955 #[test]
1956 fn test_sort_reverse_sorted() {
1957 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
1958 vec.extend((0..10).rev());
1959 vec.sort();
1960 let result: Vec<i32> = vec.iter().copied().collect();
1961 assert_eq!(result, (0..10).collect::<Vec<_>>());
1962 }
1963
1964 #[test]
1965 fn test_sort_by() {
1966 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
1967 vec.extend([5, 2, 8, 1, 9, 3, 7, 4, 6, 0]);
1968 vec.sort_by(|a, b| b.cmp(a)); let result: Vec<i32> = vec.iter().copied().collect();
1970 assert_eq!(result, vec![9, 8, 7, 6, 5, 4, 3, 2, 1, 0]);
1971 }
1972
1973 #[test]
1974 fn test_sort_by_key() {
1975 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
1976 vec.extend([-5, 2, -8, 1, -9, 3, -7, 4, -6, 0]);
1977 vec.sort_by_key(|k| k.abs());
1978 let result: Vec<i32> = vec.iter().copied().collect();
1979 assert_eq!(result, vec![0, 1, 2, 3, 4, -5, -6, -7, -8, -9]);
1980 }
1981
1982 #[test]
1983 fn test_sort_stable() {
1984 #[derive(Debug, Clone, Eq, PartialEq)]
1986 struct Item {
1987 key: i32,
1988 order: usize,
1989 }
1990
1991 let mut vec: SegmentedVec<Item> = SegmentedVec::new();
1992 vec.push(Item { key: 1, order: 0 });
1993 vec.push(Item { key: 2, order: 1 });
1994 vec.push(Item { key: 1, order: 2 });
1995 vec.push(Item { key: 2, order: 3 });
1996 vec.push(Item { key: 1, order: 4 });
1997
1998 vec.sort_by_key(|item| item.key);
1999
2000 assert_eq!(vec[0], Item { key: 1, order: 0 });
2002 assert_eq!(vec[1], Item { key: 1, order: 2 });
2003 assert_eq!(vec[2], Item { key: 1, order: 4 });
2004 assert_eq!(vec[3], Item { key: 2, order: 1 });
2006 assert_eq!(vec[4], Item { key: 2, order: 3 });
2007 }
2008
2009 #[test]
2010 fn test_sort_unstable() {
2011 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2012 vec.extend([5, 2, 8, 1, 9, 3, 7, 4, 6, 0]);
2013 vec.sort_unstable();
2014 let result: Vec<i32> = vec.iter().copied().collect();
2015 assert_eq!(result, vec![0, 1, 2, 3, 4, 5, 6, 7, 8, 9]);
2016 }
2017
2018 #[test]
2019 fn test_sort_unstable_by() {
2020 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2021 vec.extend([5, 2, 8, 1, 9, 3, 7, 4, 6, 0]);
2022 vec.sort_unstable_by(|a, b| b.cmp(a)); let result: Vec<i32> = vec.iter().copied().collect();
2024 assert_eq!(result, vec![9, 8, 7, 6, 5, 4, 3, 2, 1, 0]);
2025 }
2026
2027 #[test]
2028 fn test_sort_unstable_by_key() {
2029 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2030 vec.extend([-5, 2, -8, 1, -9, 3, -7, 4, -6, 0]);
2031 vec.sort_unstable_by_key(|k| k.abs());
2032 let result: Vec<i32> = vec.iter().copied().collect();
2034 for i in 1..result.len() {
2035 assert!(result[i - 1].abs() <= result[i].abs());
2036 }
2037 }
2038
2039 #[test]
2040 fn test_sort_large() {
2041 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2042 vec.extend((0..1000).rev());
2044 vec.sort();
2045 for i in 0..1000 {
2046 assert_eq!(vec[i], i as i32);
2047 }
2048 }
2049
2050 #[test]
2051 fn test_sort_unstable_large() {
2052 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2053 vec.extend((0..1000).rev());
2055 vec.sort_unstable();
2056 for i in 0..1000 {
2057 assert_eq!(vec[i], i as i32);
2058 }
2059 }
2060
2061 #[test]
2062 fn test_sort_pointers_remain_stable_after_sort() {
2063 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2066 vec.extend([5, 2, 8, 1, 9]);
2067
2068 let ptr = &vec[0] as *const i32;
2070
2071 vec.sort();
2072
2073 assert_eq!(unsafe { *ptr }, 1); }
2076
2077 #[test]
2080 fn test_as_slice() {
2081 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2082 vec.extend(0..10);
2083
2084 let slice = vec.as_slice();
2085 assert_eq!(slice.len(), 10);
2086 assert_eq!(slice[0], 0);
2087 assert_eq!(slice[9], 9);
2088 }
2089
2090 #[test]
2091 fn test_as_mut_slice() {
2092 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2093 vec.extend(0..10);
2094
2095 {
2096 let mut slice = vec.as_mut_slice();
2097 slice[0] = 100;
2098 slice[9] = 200;
2099 }
2100
2101 assert_eq!(vec[0], 100);
2102 assert_eq!(vec[9], 200);
2103 }
2104
2105 #[test]
2106 fn test_slice_range() {
2107 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2108 vec.extend(0..10);
2109
2110 let slice = vec.slice(2..5);
2111 assert_eq!(slice.len(), 3);
2112 assert_eq!(slice[0], 2);
2113 assert_eq!(slice[1], 3);
2114 assert_eq!(slice[2], 4);
2115 }
2116
2117 #[test]
2118 fn test_slice_mut_range() {
2119 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2120 vec.extend(0..10);
2121
2122 {
2123 let mut slice = vec.slice_mut(2..5);
2124 slice[0] = 100;
2125 slice[2] = 200;
2126 }
2127
2128 assert_eq!(vec[2], 100);
2129 assert_eq!(vec[4], 200);
2130 }
2131
2132 #[test]
2133 fn test_slice_first_last() {
2134 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2135 vec.extend(0..10);
2136
2137 let slice = vec.as_slice();
2138 assert_eq!(slice.first(), Some(&0));
2139 assert_eq!(slice.last(), Some(&9));
2140 }
2141
2142 #[test]
2143 fn test_slice_iter() {
2144 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2145 vec.extend(0..10);
2146
2147 let slice = vec.as_slice();
2148 let collected: Vec<i32> = slice.iter().copied().collect();
2149 assert_eq!(collected, (0..10).collect::<Vec<_>>());
2150 }
2151
2152 #[test]
2153 fn test_slice_iter_rev() {
2154 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2155 vec.extend(0..10);
2156
2157 let slice = vec.as_slice();
2158 let collected: Vec<i32> = slice.iter().rev().copied().collect();
2159 assert_eq!(collected, (0..10).rev().collect::<Vec<_>>());
2160 }
2161
2162 #[test]
2163 fn test_slice_contains() {
2164 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2165 vec.extend(0..10);
2166
2167 let slice = vec.as_slice();
2168 assert!(slice.contains(&5));
2169 assert!(!slice.contains(&100));
2170 }
2171
2172 #[test]
2173 fn test_slice_binary_search() {
2174 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2175 vec.extend(0..100);
2176
2177 let slice = vec.as_slice();
2178 assert_eq!(slice.binary_search(&50), Ok(50));
2179 assert_eq!(slice.binary_search(&0), Ok(0));
2180 assert_eq!(slice.binary_search(&99), Ok(99));
2181 assert_eq!(slice.binary_search(&100), Err(100));
2182 }
2183
2184 #[test]
2185 fn test_slice_split_at() {
2186 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2187 vec.extend(0..10);
2188
2189 let slice = vec.as_slice();
2190 let (left, right) = slice.split_at(5);
2191 assert_eq!(left.len(), 5);
2192 assert_eq!(right.len(), 5);
2193 assert_eq!(left[0], 0);
2194 assert_eq!(right[0], 5);
2195 }
2196
2197 #[test]
2198 fn test_slice_to_vec() {
2199 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2200 vec.extend(0..10);
2201
2202 let slice = vec.as_slice();
2203 let converted = slice.to_vec();
2204 assert_eq!(converted, (0..10).collect::<Vec<_>>());
2205 }
2206
2207 #[test]
2208 fn test_slice_mut_sort() {
2209 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2210 vec.extend([5, 3, 1, 4, 2, 8, 7, 6, 0, 9]);
2211
2212 {
2214 let mut slice = vec.slice_mut(2..8);
2215 slice.sort();
2216 }
2217
2218 let result: Vec<i32> = vec.iter().copied().collect();
2219 assert_eq!(result, vec![5, 3, 1, 2, 4, 6, 7, 8, 0, 9]);
2220 }
2221
2222 #[test]
2223 fn test_slice_mut_reverse() {
2224 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2225 vec.extend(0..10);
2226
2227 {
2228 let mut slice = vec.slice_mut(2..8);
2229 slice.reverse();
2230 }
2231
2232 let result: Vec<i32> = vec.iter().copied().collect();
2233 assert_eq!(result, vec![0, 1, 7, 6, 5, 4, 3, 2, 8, 9]);
2234 }
2235
2236 #[test]
2237 fn test_slice_mut_fill() {
2238 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2239 vec.extend(0..10);
2240
2241 {
2242 let mut slice = vec.slice_mut(2..5);
2243 slice.fill(99);
2244 }
2245
2246 let result: Vec<i32> = vec.iter().copied().collect();
2247 assert_eq!(result, vec![0, 1, 99, 99, 99, 5, 6, 7, 8, 9]);
2248 }
2249
2250 #[test]
2251 fn test_slice_starts_with() {
2252 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2253 vec.extend(0..10);
2254
2255 let slice = vec.as_slice();
2256 assert!(slice.starts_with(&[0, 1, 2]));
2257 assert!(!slice.starts_with(&[1, 2, 3]));
2258 }
2259
2260 #[test]
2261 fn test_slice_ends_with() {
2262 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2263 vec.extend(0..10);
2264
2265 let slice = vec.as_slice();
2266 assert!(slice.ends_with(&[7, 8, 9]));
2267 assert!(!slice.ends_with(&[6, 7, 8]));
2268 }
2269
2270 #[test]
2271 fn test_slice_eq() {
2272 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2273 vec.extend(0..10);
2274
2275 let slice = vec.as_slice();
2276 assert_eq!(slice, (0..10).collect::<Vec<_>>());
2277 }
2278
2279 #[test]
2280 fn test_min_non_zero_cap() {
2281 let mut vec_u8: SegmentedVec<u8> = SegmentedVec::new();
2283 vec_u8.push(1);
2284 assert_eq!(vec_u8.capacity(), 8);
2285
2286 let mut vec_i32: SegmentedVec<i32> = SegmentedVec::new();
2288 vec_i32.push(1);
2289 assert_eq!(vec_i32.capacity(), 4);
2290
2291 for i in 0u8..100 {
2293 vec_u8.push(i);
2294 }
2295 for i in 0..101 {
2296 assert_eq!(vec_u8[i], if i == 0 { 1 } else { (i - 1) as u8 });
2297 }
2298 }
2299
2300 #[test]
2301 fn test_extend_from_copy_slice() {
2302 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2303 let data: Vec<i32> = (0..100).collect();
2304 vec.extend_from_copy_slice(&data);
2305 assert_eq!(vec.len(), 100);
2306 for i in 0..100 {
2307 assert_eq!(vec[i], i as i32);
2308 }
2309
2310 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2311 vec.push(999);
2312 vec.push(998);
2313 vec.extend_from_copy_slice(&data[..10]);
2314 assert_eq!(vec.len(), 12);
2315 assert_eq!(vec[0], 999);
2316 assert_eq!(vec[1], 998);
2317 for i in 0..10 {
2318 assert_eq!(vec[i + 2], i as i32);
2319 }
2320
2321 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2322 vec.extend_from_copy_slice(&[]);
2323 assert!(vec.is_empty());
2324
2325 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2326 vec.extend_from_copy_slice(&[1, 2, 3]); assert_eq!(vec.len(), 3);
2328 vec.push(4); vec.push(5);
2330 vec.push(6);
2331 assert_eq!(vec.len(), 6);
2332 for i in 0..6 {
2333 assert_eq!(vec[i], (i + 1) as i32);
2334 }
2335 }
2336}