1mod slice;
28mod sort;
29
30pub use slice::{
31 Chunks, ChunksExact, RChunks, SegmentedSlice, SegmentedSliceMut, SliceIter, SliceIterMut,
32 Windows,
33};
34
35use std::alloc::{self, Layout};
36use std::cmp::Ordering;
37use std::marker::PhantomData;
38use std::ops::{Index, IndexMut};
39
40const MAX_SEGMENTS: usize = 64;
43
44#[repr(C)]
46pub struct SegmentedVec<T> {
47 write_ptr: *mut T,
49 segment_end: *mut T,
51 len: usize,
52 active_segment_index: usize,
54 segment_count: usize,
55 dynamic_segments: [*mut T; MAX_SEGMENTS],
56 _marker: PhantomData<T>,
57}
58
59impl<T> SegmentedVec<T> {
60 const MIN_NON_ZERO_CAP: usize = {
66 let size = std::mem::size_of::<T>();
67 if size == 1 {
68 8
69 } else if size <= 1024 {
70 4
71 } else {
72 1
73 }
74 };
75
76 const MIN_CAP_EXP: u32 = Self::MIN_NON_ZERO_CAP.trailing_zeros();
77
78 #[inline]
88 pub const fn new() -> Self {
89 Self {
90 dynamic_segments: [std::ptr::null_mut(); MAX_SEGMENTS],
91 segment_count: 0,
92 len: 0,
93 write_ptr: std::ptr::null_mut(),
94 segment_end: std::ptr::null_mut(),
95 active_segment_index: usize::MAX,
97 _marker: PhantomData,
98 }
99 }
100
101 #[inline]
103 pub const fn len(&self) -> usize {
104 self.len
105 }
106
107 #[inline]
109 pub const fn is_empty(&self) -> bool {
110 self.len == 0
111 }
112
113 #[inline]
115 pub fn capacity(&self) -> usize {
116 Self::compute_capacity(self.segment_count as u32)
117 }
118
119 #[inline]
135 pub fn push(&mut self, value: T) {
136 if self.write_ptr < self.segment_end {
138 unsafe {
139 std::ptr::write(self.write_ptr, value);
140 self.write_ptr = self.write_ptr.add(1);
141 }
142 self.len += 1;
143 return;
144 }
145
146 self.push_slow(value);
148 }
149
150 #[cold]
151 #[inline(never)]
152 fn push_slow(&mut self, value: T) {
153 self.active_segment_index = self.active_segment_index.wrapping_add(1);
154
155 if self.active_segment_index >= self.segment_count {
156 self.grow_once();
157 }
158
159 let idx = self.active_segment_index;
160
161 let base = unsafe { *self.dynamic_segments.get_unchecked(idx) };
164 let shelf_size = Self::MIN_NON_ZERO_CAP << idx;
165
166 unsafe {
167 std::ptr::write(base, value);
168 self.write_ptr = base.add(1);
169 self.segment_end = base.add(shelf_size);
170 }
171 self.len += 1;
172 }
173
174 #[inline]
188 pub fn pop(&mut self) -> Option<T> {
189 if self.len == 0 {
190 return None;
191 }
192
193 let segment_base = unsafe {
196 *self
197 .dynamic_segments
198 .get_unchecked(self.active_segment_index)
199 };
200 if self.write_ptr > segment_base {
201 self.len -= 1;
202 unsafe {
203 self.write_ptr = self.write_ptr.sub(1);
205 Some(std::ptr::read(self.write_ptr))
207 }
208 } else {
209 self.pop_slow_path()
211 }
212 }
213
214 #[cold]
215 #[inline(never)]
216 fn pop_slow_path(&mut self) -> Option<T> {
217 self.active_segment_index -= 1;
220 let idx = self.active_segment_index;
221
222 let base = unsafe { *self.dynamic_segments.get_unchecked(idx) };
223 let capacity = Self::MIN_NON_ZERO_CAP << idx;
224
225 self.segment_end = unsafe { base.add(capacity) };
226 self.write_ptr = unsafe { self.segment_end.sub(1) };
227 self.len -= 1;
228 Some(unsafe { std::ptr::read(self.write_ptr) })
229 }
230
231 #[inline]
235 pub fn get(&self, index: usize) -> Option<&T> {
236 if index < self.len {
237 Some(unsafe { self.unchecked_at(index) })
238 } else {
239 None
240 }
241 }
242
243 #[inline]
247 pub fn get_mut(&mut self, index: usize) -> Option<&mut T> {
248 if index < self.len {
249 Some(unsafe { self.unchecked_at_mut(index) })
250 } else {
251 None
252 }
253 }
254
255 #[inline]
257 pub fn first(&self) -> Option<&T> {
258 if self.len == 0 {
259 None
260 } else {
261 Some(unsafe { &*self.dynamic_segments[0] })
263 }
264 }
265
266 #[inline]
268 pub fn first_mut(&mut self) -> Option<&mut T> {
269 if self.len == 0 {
270 None
271 } else {
272 Some(unsafe { &mut *self.dynamic_segments[0] })
274 }
275 }
276
277 #[inline]
279 pub fn last(&self) -> Option<&T> {
280 if self.len == 0 {
281 return None;
282 }
283
284 let segment_base = unsafe {
287 *self
288 .dynamic_segments
289 .get_unchecked(self.active_segment_index)
290 };
291
292 if self.write_ptr > segment_base {
293 Some(unsafe { &*self.write_ptr.sub(1) })
295 } else {
296 self.get(self.len - 1)
298 }
299 }
300
301 #[inline]
303 pub fn last_mut(&mut self) -> Option<&mut T> {
304 if self.len == 0 {
305 return None;
306 }
307
308 let segment_base = unsafe {
311 *self
312 .dynamic_segments
313 .get_unchecked(self.active_segment_index)
314 };
315
316 if self.write_ptr > segment_base {
317 Some(unsafe { &mut *self.write_ptr.sub(1) })
319 } else {
320 self.get_mut(self.len - 1)
322 }
323 }
324
325 #[inline]
330 pub fn contains(&self, x: &T) -> bool
331 where
332 T: PartialEq,
333 {
334 let mut remaining = self.len;
335 let mut segment_idx = 0;
336
337 while remaining > 0 {
338 let segment_capacity = Self::MIN_NON_ZERO_CAP << segment_idx;
339 let segment_len = segment_capacity.min(remaining);
340
341 let base = unsafe { *self.dynamic_segments.get_unchecked(segment_idx) };
343 let slice = unsafe { std::slice::from_raw_parts(base, segment_len) };
344
345 if slice.contains(x) {
346 return true;
347 }
348
349 segment_idx += 1;
350 remaining -= segment_len;
351 }
352
353 false
354 }
355
356 pub fn starts_with(&self, needle: &[T]) -> bool
358 where
359 T: PartialEq,
360 {
361 if needle.len() > self.len {
362 return false;
363 }
364
365 if needle.is_empty() {
366 return true;
367 }
368
369 let mut current_segment_capacity = Self::MIN_NON_ZERO_CAP;
370 let mut needle_cursor = 0;
371 let mut idx = 0;
372
373 while needle_cursor < needle.len() {
374 unsafe {
375 let segment_base = *self.dynamic_segments.get_unchecked(idx);
376
377 let remaining = needle.len() - needle_cursor;
378 let match_len = remaining.min(current_segment_capacity);
379
380 let haystack_slice = std::slice::from_raw_parts(segment_base, match_len);
381
382 let needle_slice = &needle[needle_cursor..needle_cursor + match_len];
383
384 if haystack_slice != needle_slice {
385 return false;
386 }
387
388 needle_cursor += match_len;
389 idx += 1;
390 current_segment_capacity <<= 1;
391 }
392 }
393
394 true
395 }
396
397 pub fn ends_with(&self, needle: &[T]) -> bool
399 where
400 T: PartialEq,
401 {
402 if needle.len() > self.len {
403 return false;
404 }
405
406 if needle.is_empty() {
407 return true;
408 }
409
410 let mut remaining = needle.len();
411 let mut idx = self.active_segment_index;
412
413 unsafe {
415 let segment_base = *self.dynamic_segments.get_unchecked(idx);
416
417 let segment_len = self.write_ptr.offset_from(segment_base) as usize;
419 let match_len = remaining.min(segment_len);
420
421 let haystack_start = self.write_ptr.sub(match_len);
422 let haystack_slice = std::slice::from_raw_parts(haystack_start, match_len);
423
424 let needle_start = remaining - match_len;
425 let needle_slice = &needle[needle_start..remaining];
426
427 if haystack_slice != needle_slice {
428 return false;
429 }
430
431 remaining -= match_len;
432 if remaining == 0 {
433 return true;
434 }
435
436 idx -= 1;
437 }
438
439 loop {
441 unsafe {
442 let segment_base = *self.dynamic_segments.get_unchecked(idx);
443
444 let segment_capacity = Self::MIN_NON_ZERO_CAP << idx;
445 let match_len = remaining.min(segment_capacity);
446
447 let haystack_start = segment_base.add(segment_capacity - match_len);
448 let haystack_slice = std::slice::from_raw_parts(haystack_start, match_len);
449
450 let needle_start = remaining - match_len;
451 let needle_slice = &needle[needle_start..remaining];
452
453 if haystack_slice != needle_slice {
454 return false;
455 }
456
457 remaining -= match_len;
458 if remaining == 0 {
459 return true;
460 }
461
462 idx -= 1;
463 }
464 }
465 }
466
467 pub fn binary_search(&self, x: &T) -> Result<usize, usize>
472 where
473 T: Ord,
474 {
475 self.binary_search_by(|p| p.cmp(x))
476 }
477
478 pub fn binary_search_by<F>(&self, mut f: F) -> Result<usize, usize>
480 where
481 F: FnMut(&T) -> Ordering,
482 {
483 if self.len == 0 {
484 return Err(0);
485 }
486
487 let active_capacity = Self::MIN_NON_ZERO_CAP << self.active_segment_index;
490
491 let segment_base = unsafe { self.segment_end.sub(active_capacity) };
493 let first_elem = unsafe { &*segment_base };
494
495 if f(first_elem) != Ordering::Greater {
498 unsafe {
499 let segment_len = self.write_ptr.offset_from(segment_base) as usize;
501 let slice = std::slice::from_raw_parts(segment_base, segment_len);
502
503 let offset = active_capacity - Self::MIN_NON_ZERO_CAP;
504
505 return match slice.binary_search_by(f) {
506 Ok(idx) => Ok(offset + idx),
507 Err(idx) => Err(offset + idx),
508 };
509 }
510 }
511
512 let mut idx = self.active_segment_index;
516 while idx > 0 {
517 idx -= 1;
518 unsafe {
519 let segment_base = *self.dynamic_segments.get_unchecked(idx);
520 if f(&*segment_base) != Ordering::Greater {
521 let capacity = Self::MIN_NON_ZERO_CAP << idx;
522 let slice = std::slice::from_raw_parts(segment_base, capacity);
523
524 let global_offset = capacity - Self::MIN_NON_ZERO_CAP;
525
526 return match slice.binary_search_by(f) {
527 Ok(i) => Ok(global_offset + i),
528 Err(i) => Err(global_offset + i),
529 };
530 }
531 }
532 }
533
534 Err(0)
535 }
536
537 pub fn binary_search_by_key<B, F>(&self, b: &B, mut f: F) -> Result<usize, usize>
539 where
540 F: FnMut(&T) -> B,
541 B: Ord,
542 {
543 self.binary_search_by(|k| f(k).cmp(b))
544 }
545
546 #[inline]
552 pub fn swap(&mut self, a: usize, b: usize) {
553 let ptr_b = &raw mut self[a];
554 let ptr_a = &raw mut self[b];
555 unsafe {
558 std::ptr::swap(ptr_b, ptr_a);
559 }
560 }
561
562 pub fn reverse(&mut self) {
564 if self.len < 2 {
565 return;
566 }
567
568 let mut remaining_total = self.len;
569
570 let mut f_segment_index = 0;
572 let mut f_segment_capacity = Self::MIN_NON_ZERO_CAP;
573 let mut f_segment_base = unsafe { *self.dynamic_segments.get_unchecked(0) };
574 let mut f_remaining = f_segment_capacity;
575
576 let mut b_segment_index = self.active_segment_index;
578 let mut b_segment_capacity = Self::MIN_NON_ZERO_CAP << b_segment_index;
579 let mut b_segment_base = unsafe { *self.dynamic_segments.get_unchecked(b_segment_index) };
580 let mut b_remaining = unsafe { self.write_ptr.offset_from(b_segment_base) as usize };
581 let mut b_segment_end = unsafe { self.write_ptr.sub(1) };
582
583 loop {
584 if f_segment_index == b_segment_index {
586 unsafe {
587 let slice = std::slice::from_raw_parts_mut(f_segment_base, remaining_total);
588 slice.reverse();
589 }
590 return;
591 }
592
593 let count = f_remaining.min(b_remaining);
594
595 unsafe {
596 self.swap_chunks_reversed_simd(f_segment_base, b_segment_end, count);
597
598 f_segment_base = f_segment_base.add(count);
599 b_segment_end = b_segment_end.sub(count);
600 }
601
602 remaining_total -= count * 2;
603 if remaining_total == 0 {
604 return;
605 }
606
607 f_remaining -= count;
609 if f_remaining == 0 {
610 f_segment_index += 1;
611 f_segment_capacity <<= 1;
612 f_remaining = f_segment_capacity;
613 unsafe {
615 f_segment_base = *self.dynamic_segments.get_unchecked(f_segment_index);
616 }
617 }
618
619 b_remaining -= count;
621 if b_remaining == 0 {
622 b_segment_index -= 1;
623 b_segment_capacity >>= 1;
624 b_remaining = b_segment_capacity;
625 unsafe {
626 b_segment_base = *self.dynamic_segments.get_unchecked(b_segment_index);
627 b_segment_end = b_segment_base.add(b_segment_capacity - 1);
628 }
629 }
630 }
631 }
632
633 #[inline(always)]
635 unsafe fn swap_chunks_reversed_simd(&mut self, base_a: *mut T, base_b: *mut T, count: usize) {
636 for i in 0..count {
638 std::ptr::swap(base_a.add(i), base_b.sub(i));
639 }
640 }
641
642 pub fn fill(&mut self, value: T)
644 where
645 T: Clone,
646 {
647 self.as_mut_slice().fill(value)
648 }
649
650 pub fn fill_with<F>(&mut self, f: F)
652 where
653 F: FnMut() -> T,
654 {
655 self.as_mut_slice().fill_with(f)
656 }
657
658 pub fn clear(&mut self) {
662 if self.len == 0 {
663 return;
664 }
665
666 if std::mem::needs_drop::<T>() {
667 let mut remaining = self.len;
669 for shelf in 0..self.segment_count {
670 let size = Self::shelf_size(shelf as u32);
671 let count = remaining.min(size);
672 let ptr = unsafe { *self.dynamic_segments.get_unchecked(shelf) };
673 unsafe { std::ptr::drop_in_place(std::ptr::slice_from_raw_parts_mut(ptr, count)) };
674 remaining -= count;
675 if remaining == 0 {
676 break;
677 }
678 }
679 }
680
681 self.len = 0;
682 self.write_ptr = std::ptr::null_mut();
684 self.segment_end = std::ptr::null_mut();
685 self.active_segment_index = usize::MAX;
686 }
687
688 pub fn truncate(&mut self, new_len: usize) {
692 if new_len >= self.len {
693 return;
694 }
695
696 if std::mem::needs_drop::<T>() {
697 let dynamic_new_len = new_len;
698 let dynamic_old_len = self.len;
699
700 if dynamic_new_len < dynamic_old_len {
701 let mut pos = 0usize;
702 for shelf in 0..self.segment_count {
703 let size = Self::shelf_size(shelf as u32);
704 let seg_end = pos + size;
705
706 let drop_start = dynamic_new_len.max(pos);
708 let drop_end = dynamic_old_len.min(seg_end);
709
710 if drop_start < drop_end {
711 let offset = drop_start - pos;
712 let count = drop_end - drop_start;
713 let ptr =
714 unsafe { (*self.dynamic_segments.get_unchecked(shelf)).add(offset) };
715 unsafe {
716 std::ptr::drop_in_place(std::ptr::slice_from_raw_parts_mut(ptr, count))
717 };
718 }
719
720 pos = seg_end;
721 if pos >= dynamic_old_len {
722 break;
723 }
724 }
725 }
726 }
727
728 self.len = new_len;
729 if new_len > 0 {
731 let biased = new_len + Self::MIN_NON_ZERO_CAP;
732 let msb = biased.ilog2();
733 let idx = (msb - Self::MIN_CAP_EXP) as usize;
734 self.active_segment_index = idx;
735
736 let capacity = 1usize << msb;
737 let offset = biased ^ capacity;
738 unsafe {
739 let base = *self.dynamic_segments.get_unchecked(idx);
740 self.write_ptr = base.add(offset);
741 self.segment_end = base.add(capacity);
742 }
743 } else {
744 self.write_ptr = std::ptr::null_mut();
746 self.segment_end = std::ptr::null_mut();
747 self.active_segment_index = usize::MAX;
749 }
750 }
751
752 pub fn reserve(&mut self, additional: usize) {
754 self.grow_capacity(self.len + additional);
755 if self.write_ptr.is_null() {
757 unsafe {
758 let base = *self.dynamic_segments.get_unchecked(0);
760 self.write_ptr = base;
761 self.segment_end = base.add(Self::MIN_NON_ZERO_CAP);
762
763 self.active_segment_index = 0;
768 }
769 }
770 }
771
772 pub fn shrink_to_fit(&mut self) {
776 self.shrink_capacity(self.len);
777 }
778
779 #[inline]
781 pub fn iter(&self) -> Iter<'_, T> {
782 Iter {
784 vec: self,
785 ptr: std::ptr::null(),
786 segment_end: std::ptr::null(),
787 index: 0,
788 shelf_index: 0,
789 }
790 }
791
792 #[inline]
794 pub fn iter_mut(&mut self) -> IterMut<'_, T> {
795 IterMut {
797 vec: self,
798 ptr: std::ptr::null_mut(),
799 segment_end: std::ptr::null_mut(),
800 index: 0,
801 shelf_index: 0,
802 }
803 }
804
805 #[inline]
820 pub fn as_slice(&self) -> SegmentedSlice<'_, T> {
821 SegmentedSlice::new(self)
822 }
823
824 #[inline]
839 pub fn as_mut_slice(&mut self) -> SegmentedSliceMut<'_, T> {
840 SegmentedSliceMut::new(self)
841 }
842
843 #[inline]
862 pub fn slice(&self, range: std::ops::Range<usize>) -> SegmentedSlice<'_, T> {
863 assert!(range.start <= range.end && range.end <= self.len);
864 SegmentedSlice::from_range(self, range.start, range.end)
865 }
866
867 #[inline]
886 pub fn slice_mut(&mut self, range: std::ops::Range<usize>) -> SegmentedSliceMut<'_, T> {
887 assert!(range.start <= range.end && range.end <= self.len);
888 SegmentedSliceMut::from_range(self, range.start, range.end)
889 }
890
891 pub fn extend_from_slice(&mut self, other: &[T])
893 where
894 T: Clone,
895 {
896 if other.is_empty() {
897 return;
898 }
899 self.reserve(other.len());
900
901 let mut src = other;
902
903 while !src.is_empty() {
905 let (shelf, box_idx, remaining) = Self::location_with_capacity(self.len);
906 let to_copy = src.len().min(remaining);
907 let dest = unsafe { (*self.dynamic_segments.get_unchecked(shelf)).add(box_idx) };
908 for (i, item) in src.iter().take(to_copy).enumerate() {
909 unsafe { std::ptr::write(dest.add(i), item.clone()) };
910 }
911 self.len += to_copy;
912 src = &src[to_copy..];
913 }
914
915 if self.len > 0 {
917 let biased = self.len + Self::MIN_NON_ZERO_CAP;
918 let msb = biased.ilog2();
919 let idx = (msb - Self::MIN_CAP_EXP) as usize;
920 self.active_segment_index = idx;
921
922 let capacity = 1usize << msb;
923 let offset = biased ^ capacity;
924 unsafe {
925 let base = *self.dynamic_segments.get_unchecked(idx);
926 self.write_ptr = base.add(offset);
927 self.segment_end = base.add(capacity);
928 }
929 }
930 }
931
932 pub fn extend_from_copy_slice(&mut self, other: &[T])
937 where
938 T: Copy,
939 {
940 if other.is_empty() {
941 return;
942 }
943 self.reserve(other.len());
944
945 let mut src = other;
946
947 while !src.is_empty() {
949 let (shelf, box_idx, remaining) = Self::location_with_capacity(self.len);
950 let to_copy = src.len().min(remaining);
951 unsafe {
952 let dest = (*self.dynamic_segments.get_unchecked(shelf)).add(box_idx);
953 std::ptr::copy_nonoverlapping(src.as_ptr(), dest, to_copy);
954 };
955 self.len += to_copy;
956 src = &src[to_copy..];
957 }
958
959 if self.len > 0 {
961 let biased = self.len + Self::MIN_NON_ZERO_CAP;
962 let msb = biased.ilog2();
963 let idx = (msb - Self::MIN_CAP_EXP) as usize;
964 self.active_segment_index = idx;
965
966 let capacity = 1usize << msb;
967 let offset = biased ^ capacity;
968 unsafe {
969 let base = *self.dynamic_segments.get_unchecked(idx);
970 self.write_ptr = base.add(offset);
971 self.segment_end = base.add(capacity);
972 }
973 }
974 }
975
976 #[inline]
993 pub fn sort(&mut self)
994 where
995 T: Ord,
996 {
997 self.as_mut_slice().sort();
998 }
999
1000 pub fn sort_by<F>(&mut self, compare: F)
1017 where
1018 F: FnMut(&T, &T) -> Ordering,
1019 {
1020 self.as_mut_slice().sort_by(compare);
1021 }
1022
1023 #[inline]
1039 pub fn sort_by_key<K, F>(&mut self, f: F)
1040 where
1041 F: FnMut(&T) -> K,
1042 K: Ord,
1043 {
1044 self.as_mut_slice().sort_by_key(f);
1045 }
1046
1047 #[inline]
1064 pub fn sort_unstable(&mut self)
1065 where
1066 T: Ord,
1067 {
1068 self.as_mut_slice().sort_unstable();
1069 }
1070
1071 pub fn sort_unstable_by<F>(&mut self, compare: F)
1088 where
1089 F: FnMut(&T, &T) -> Ordering,
1090 {
1091 self.as_mut_slice().sort_unstable_by(compare);
1092 }
1093
1094 #[inline]
1114 pub fn sort_unstable_by_key<K, F>(&mut self, f: F)
1115 where
1116 F: FnMut(&T) -> K,
1117 K: Ord,
1118 {
1119 self.as_mut_slice().sort_unstable_by_key(f);
1120 }
1121
1122 pub fn is_sorted(&self) -> bool
1124 where
1125 T: PartialOrd,
1126 {
1127 self.as_slice().is_sorted()
1128 }
1129
1130 pub fn is_sorted_by<F>(&self, compare: F) -> bool
1132 where
1133 F: FnMut(&T, &T) -> bool,
1134 {
1135 self.as_slice().is_sorted_by(compare)
1136 }
1137
1138 pub fn is_sorted_by_key<K, F>(&self, f: F) -> bool
1140 where
1141 F: FnMut(&T) -> K,
1142 K: PartialOrd,
1143 {
1144 self.as_slice().is_sorted_by_key(f)
1145 }
1146
1147 pub fn partition_point<P>(&self, pred: P) -> usize
1149 where
1150 P: FnMut(&T) -> bool,
1151 {
1152 self.as_slice().partition_point(pred)
1153 }
1154
1155 pub fn rotate_left(&mut self, mid: usize) {
1157 self.as_mut_slice().rotate_left(mid);
1158 }
1159
1160 pub fn rotate_right(&mut self, k: usize) {
1162 self.as_mut_slice().rotate_right(k);
1163 }
1164
1165 pub fn with_capacity(capacity: usize) -> Self {
1167 let mut vec = Self::new();
1168 vec.reserve(capacity);
1169 vec
1170 }
1171
1172 pub fn insert(&mut self, index: usize, element: T) {
1178 assert!(index <= self.len);
1179 self.push(element);
1180 if index < self.len - 1 {
1182 for i in (index..self.len - 1).rev() {
1183 unsafe {
1184 let ptr_a = self.unchecked_at_mut(i) as *mut T;
1185 let ptr_b = self.unchecked_at_mut(i + 1) as *mut T;
1186 std::ptr::swap(ptr_a, ptr_b);
1187 }
1188 }
1189 }
1190 }
1191
1192 pub fn remove(&mut self, index: usize) -> T {
1198 assert!(index < self.len);
1199 for i in index..self.len - 1 {
1201 unsafe {
1202 let ptr_a = self.unchecked_at_mut(i) as *mut T;
1203 let ptr_b = self.unchecked_at_mut(i + 1) as *mut T;
1204 std::ptr::swap(ptr_a, ptr_b);
1205 }
1206 }
1207 self.pop().unwrap()
1208 }
1209
1210 pub fn swap_remove(&mut self, index: usize) -> T {
1219 assert!(index < self.len);
1220 let last_index = self.len - 1;
1221 if index != last_index {
1222 unsafe {
1223 let ptr_a = self.unchecked_at_mut(index) as *mut T;
1224 let ptr_b = self.unchecked_at_mut(last_index) as *mut T;
1228 std::ptr::swap(ptr_a, ptr_b);
1229 }
1230 }
1231 self.pop().unwrap()
1232 }
1233
1234 pub fn retain<F>(&mut self, mut f: F)
1238 where
1239 F: FnMut(&T) -> bool,
1240 {
1241 let mut i = 0;
1242 while i < self.len {
1243 if f(unsafe { self.unchecked_at(i) }) {
1244 i += 1;
1245 } else {
1246 self.remove(i);
1247 }
1248 }
1249 }
1250
1251 pub fn retain_mut<F>(&mut self, mut f: F)
1253 where
1254 F: FnMut(&mut T) -> bool,
1255 {
1256 let mut i = 0;
1257 while i < self.len {
1258 if f(unsafe { self.unchecked_at_mut(i) }) {
1259 i += 1;
1260 } else {
1261 self.remove(i);
1262 }
1263 }
1264 }
1265
1266 pub fn dedup(&mut self)
1270 where
1271 T: PartialEq,
1272 {
1273 self.dedup_by(|a, b| a == b);
1274 }
1275
1276 pub fn dedup_by<F>(&mut self, mut same_bucket: F)
1278 where
1279 F: FnMut(&mut T, &mut T) -> bool,
1280 {
1281 if self.len <= 1 {
1282 return;
1283 }
1284 let mut write = 1;
1285 for read in 1..self.len {
1286 let should_keep = unsafe {
1287 let prev_ptr = self.unchecked_at_mut(write - 1) as *mut T;
1288 let curr_ptr = self.unchecked_at_mut(read) as *mut T;
1289 !same_bucket(&mut *prev_ptr, &mut *curr_ptr)
1290 };
1291 if should_keep {
1292 if read != write {
1293 unsafe {
1294 let ptr_src = self.unchecked_at_mut(read) as *mut T;
1295 let ptr_dst = self.unchecked_at_mut(write) as *mut T;
1296 std::ptr::swap(ptr_dst, ptr_src);
1297 }
1298 }
1299 write += 1;
1300 } else {
1301 unsafe {
1303 std::ptr::drop_in_place(self.unchecked_at_mut(read));
1304 }
1305 }
1306 }
1307 self.len = write;
1308 }
1309
1310 pub fn dedup_by_key<K, F>(&mut self, mut key: F)
1312 where
1313 F: FnMut(&mut T) -> K,
1314 K: PartialEq,
1315 {
1316 self.dedup_by(|a, b| key(a) == key(b));
1317 }
1318
1319 pub fn resize(&mut self, new_len: usize, value: T)
1325 where
1326 T: Clone,
1327 {
1328 if new_len > self.len {
1329 self.reserve(new_len - self.len);
1330 while self.len < new_len {
1331 self.push(value.clone());
1332 }
1333 } else {
1334 self.truncate(new_len);
1335 }
1336 }
1337
1338 pub fn resize_with<F>(&mut self, new_len: usize, mut f: F)
1343 where
1344 F: FnMut() -> T,
1345 {
1346 if new_len > self.len {
1347 self.reserve(new_len - self.len);
1348 while self.len < new_len {
1349 self.push(f());
1350 }
1351 } else {
1352 self.truncate(new_len);
1353 }
1354 }
1355
1356 pub fn append(&mut self, other: &mut Self) {
1358 let other_len = other.len;
1359 self.reserve(other_len);
1360 let start = self.len;
1361 while let Some(item) = other.pop() {
1362 self.push(item);
1363 }
1364 let mut left = start;
1366 let mut right = self.len;
1367 while left < right {
1368 right -= 1;
1369 if left < right {
1370 unsafe {
1371 let ptr_a = self.unchecked_at_mut(left) as *mut T;
1372 let ptr_b = self.unchecked_at_mut(right) as *mut T;
1373 std::ptr::swap(ptr_a, ptr_b);
1374 }
1375 left += 1;
1376 }
1377 }
1378 }
1379
1380 pub fn split_off(&mut self, at: usize) -> Self {
1389 assert!(at <= self.len);
1390 let mut other = Self::new();
1391 other.reserve(self.len - at);
1392 for i in at..self.len {
1393 other.push(unsafe { self.unchecked_read(i) });
1394 }
1395 self.len = at;
1396 other
1397 }
1398
1399 pub fn chunks(&self, chunk_size: usize) -> Chunks<'_, T> {
1405 self.as_slice().chunks(chunk_size)
1406 }
1407
1408 pub fn windows(&self, size: usize) -> Windows<'_, T> {
1414 self.as_slice().windows(size)
1415 }
1416
1417 pub fn rchunks(&self, chunk_size: usize) -> RChunks<'_, T> {
1423 self.as_slice().rchunks(chunk_size)
1424 }
1425
1426 pub fn chunks_exact(&self, chunk_size: usize) -> ChunksExact<'_, T> {
1432 self.as_slice().chunks_exact(chunk_size)
1433 }
1434
1435 pub fn drain(&mut self, range: std::ops::Range<usize>) -> Drain<'_, T> {
1441 assert!(range.start <= range.end && range.end <= self.len);
1442 Drain {
1443 vec: self,
1444 range_start: range.start,
1445 range_end: range.end,
1446 index: range.start,
1447 }
1448 }
1449
1450 pub fn to_vec(&self) -> Vec<T>
1452 where
1453 T: Clone,
1454 {
1455 self.as_slice().to_vec()
1456 }
1457
1458 #[inline]
1462 fn shelf_count(box_count: usize) -> u32 {
1463 if box_count == 0 {
1464 return 0;
1465 }
1466 let val = box_count + Self::MIN_NON_ZERO_CAP;
1467 val.next_power_of_two().trailing_zeros() - Self::MIN_CAP_EXP
1468 }
1469
1470 #[inline]
1472 fn shelf_size(shelf_index: u32) -> usize {
1473 Self::MIN_NON_ZERO_CAP << shelf_index
1474 }
1475
1476 #[inline]
1479 fn location(list_index: usize) -> (usize, usize) {
1480 let biased = list_index + Self::MIN_NON_ZERO_CAP;
1481 let msb = biased.ilog2();
1482 let shelf = msb - Self::MIN_CAP_EXP;
1483 let box_idx = biased ^ (1usize << msb);
1485 (shelf as usize, box_idx)
1486 }
1487
1488 #[inline]
1491 fn location_with_capacity(list_index: usize) -> (usize, usize, usize) {
1492 let biased = list_index + Self::MIN_NON_ZERO_CAP;
1493 let msb = biased.ilog2();
1494 let shelf = msb - Self::MIN_CAP_EXP;
1495 let box_idx = biased ^ (1usize << msb);
1496 let segment_remaining = (2usize << msb) - biased;
1500 (shelf as usize, box_idx, segment_remaining)
1501 }
1502
1503 #[inline]
1509 pub(crate) unsafe fn unchecked_at(&self, index: usize) -> &T {
1510 unsafe {
1511 let (shelf, box_idx) = Self::location(index);
1512 &*(*self.dynamic_segments.get_unchecked(shelf)).add(box_idx)
1513 }
1514 }
1515
1516 #[inline]
1522 pub(crate) unsafe fn unchecked_at_mut(&mut self, index: usize) -> &mut T {
1523 unsafe {
1524 let (shelf, box_idx) = Self::location(index);
1525 &mut *(*self.dynamic_segments.get_unchecked(shelf)).add(box_idx)
1526 }
1527 }
1528
1529 #[inline]
1535 unsafe fn unchecked_read(&self, index: usize) -> T {
1536 unsafe {
1537 let (shelf, box_idx) = Self::location(index);
1538 std::ptr::read((*self.dynamic_segments.get_unchecked(shelf)).add(box_idx))
1539 }
1540 }
1541
1542 fn grow_once(&mut self) {
1543 assert!(
1544 self.segment_count < MAX_SEGMENTS,
1545 "Maximum segment count exceeded"
1546 );
1547
1548 let size = Self::shelf_size(self.segment_count as u32);
1549 let layout = Layout::array::<T>(size).expect("Layout overflow");
1550 let ptr = if layout.size() == 0 {
1551 std::ptr::dangling_mut::<T>()
1552 } else {
1553 let ptr = unsafe { alloc::alloc(layout) };
1554 if ptr.is_null() {
1555 panic!("Allocation failed");
1556 }
1557 ptr as *mut T
1558 };
1559 self.dynamic_segments[self.segment_count] = ptr;
1560 self.segment_count += 1;
1561 }
1562
1563 fn grow_capacity(&mut self, new_capacity: usize) {
1565 let new_shelf_count = Self::shelf_count(new_capacity) as usize;
1566 let old_shelf_count = self.segment_count;
1567
1568 if new_shelf_count > old_shelf_count {
1569 assert!(
1570 new_shelf_count <= MAX_SEGMENTS,
1571 "Maximum segment count exceeded"
1572 );
1573
1574 for i in old_shelf_count..new_shelf_count {
1575 let size = Self::shelf_size(i as u32);
1576 let layout = Layout::array::<T>(size).expect("Layout overflow");
1577 let ptr = if layout.size() == 0 {
1578 std::ptr::dangling_mut::<T>()
1579 } else {
1580 let ptr = unsafe { alloc::alloc(layout) };
1581 if ptr.is_null() {
1582 panic!("Allocation failed");
1583 }
1584 ptr as *mut T
1585 };
1586 self.dynamic_segments[i] = ptr;
1587 }
1588 self.segment_count = new_shelf_count;
1589 }
1590 }
1591
1592 #[inline]
1594 fn compute_capacity(shelf_count: u32) -> usize {
1595 (Self::MIN_NON_ZERO_CAP << shelf_count) - Self::MIN_NON_ZERO_CAP
1596 }
1597
1598 fn shrink_capacity(&mut self, new_capacity: usize) {
1600 let new_shelf_count = Self::shelf_count(new_capacity);
1601 let old_shelf_count = self.segment_count as u32;
1602
1603 if new_shelf_count < old_shelf_count {
1604 self.free_shelves(old_shelf_count, new_shelf_count);
1605 self.segment_count = new_shelf_count as usize;
1606 }
1607 }
1608
1609 fn free_shelves(&mut self, from_count: u32, to_count: u32) {
1611 for i in (to_count..from_count).rev() {
1612 let size = Self::shelf_size(i);
1613 let layout = Layout::array::<T>(size).expect("Layout overflow");
1614 if layout.size() > 0 {
1615 unsafe {
1616 alloc::dealloc(self.dynamic_segments[i as usize] as *mut u8, layout);
1617 }
1618 }
1619 self.dynamic_segments[i as usize] = std::ptr::null_mut();
1620 }
1621 }
1622}
1623
1624impl<T> Drop for SegmentedVec<T> {
1625 fn drop(&mut self) {
1626 self.clear();
1628 self.free_shelves(self.segment_count as u32, 0);
1630 }
1631}
1632
1633impl<T> sort::IndexedAccess<T> for SegmentedVec<T> {
1634 #[inline]
1635 fn get_ref(&self, index: usize) -> &T {
1636 debug_assert!(index < self.len);
1637 unsafe { self.unchecked_at(index) }
1638 }
1639
1640 #[inline]
1641 fn get_ptr(&self, index: usize) -> *const T {
1642 debug_assert!(index < self.len);
1643 let (shelf, box_idx) = Self::location(index);
1644 unsafe { (*self.dynamic_segments.get_unchecked(shelf)).add(box_idx) }
1645 }
1646
1647 #[inline]
1648 fn get_ptr_mut(&mut self, index: usize) -> *mut T {
1649 debug_assert!(index < self.len);
1650 let (shelf, box_idx) = Self::location(index);
1651 unsafe { (*self.dynamic_segments.get_unchecked(shelf)).add(box_idx) }
1652 }
1653
1654 #[inline]
1655 fn swap(&mut self, a: usize, b: usize) {
1656 if a == b {
1657 return;
1658 }
1659 debug_assert!(a < self.len && b < self.len);
1660 unsafe {
1661 let ptr_a = self.get_ptr_mut(a);
1662 let ptr_b = self.get_ptr_mut(b);
1663 std::ptr::swap(ptr_a, ptr_b);
1664 }
1665 }
1666}
1667
1668impl<T> Default for SegmentedVec<T> {
1669 fn default() -> Self {
1670 Self::new()
1671 }
1672}
1673
1674impl<T> Index<usize> for SegmentedVec<T> {
1675 type Output = T;
1676
1677 fn index(&self, index: usize) -> &Self::Output {
1678 self.get(index).expect("index out of bounds")
1679 }
1680}
1681
1682impl<T> IndexMut<usize> for SegmentedVec<T> {
1683 fn index_mut(&mut self, index: usize) -> &mut Self::Output {
1684 self.get_mut(index).expect("index out of bounds")
1685 }
1686}
1687
1688impl<T: Clone> Clone for SegmentedVec<T> {
1689 fn clone(&self) -> Self {
1690 if self.len == 0 {
1691 return Self::new();
1692 }
1693
1694 let mut new_vec = Self::new();
1695 new_vec.reserve(self.len);
1696
1697 let mut remaining = self.len;
1698 for shelf in 0..self.segment_count {
1699 let size = Self::shelf_size(shelf as u32);
1700 let count = remaining.min(size);
1701 let src = unsafe { *self.dynamic_segments.get_unchecked(shelf) };
1702 let dst = unsafe { *new_vec.dynamic_segments.get_unchecked(shelf) };
1703 for i in 0..count {
1704 unsafe { std::ptr::write(dst.add(i), (*src.add(i)).clone()) };
1705 }
1706 new_vec.len += count;
1707 remaining -= count;
1708 if remaining == 0 {
1709 break;
1710 }
1711 }
1712
1713 if new_vec.len > 0 {
1715 let biased = new_vec.len + Self::MIN_NON_ZERO_CAP;
1716 let msb = biased.ilog2();
1717 let idx = (msb - Self::MIN_CAP_EXP) as usize;
1718 new_vec.active_segment_index = idx;
1719
1720 let capacity = 1usize << msb;
1721 let offset = biased ^ capacity;
1722 unsafe {
1723 let base = *new_vec.dynamic_segments.get_unchecked(idx);
1724 new_vec.write_ptr = base.add(offset);
1725 new_vec.segment_end = base.add(capacity);
1726 }
1727 }
1728
1729 new_vec
1730 }
1731}
1732
1733impl<T: std::fmt::Debug> std::fmt::Debug for SegmentedVec<T> {
1734 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
1735 f.debug_list().entries(self.iter()).finish()
1736 }
1737}
1738
1739impl<T: PartialEq> PartialEq for SegmentedVec<T> {
1740 fn eq(&self, other: &Self) -> bool {
1741 if self.len != other.len {
1742 return false;
1743 }
1744 for i in 0..self.len {
1745 if unsafe { self.unchecked_at(i) } != unsafe { other.unchecked_at(i) } {
1746 return false;
1747 }
1748 }
1749 true
1750 }
1751}
1752
1753impl<T: Eq> Eq for SegmentedVec<T> {}
1754
1755impl<T> FromIterator<T> for SegmentedVec<T> {
1756 fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> Self {
1757 let iter = iter.into_iter();
1758 let (lower, _) = iter.size_hint();
1759 let mut vec = Self::new();
1760 vec.reserve(lower);
1761 for item in iter {
1762 vec.push(item);
1763 }
1764 vec
1765 }
1766}
1767
1768impl<T> Extend<T> for SegmentedVec<T> {
1769 fn extend<I: IntoIterator<Item = T>>(&mut self, iter: I) {
1770 let iter = iter.into_iter();
1771 let (lower, _) = iter.size_hint();
1772 self.reserve(lower);
1773 for item in iter {
1774 self.push(item);
1775 }
1776 }
1777}
1778
1779pub struct Iter<'a, T> {
1783 vec: &'a SegmentedVec<T>,
1784 ptr: *const T,
1786 segment_end: *const T,
1788 index: usize,
1790 shelf_index: u32,
1792}
1793
1794impl<'a, T> Iterator for Iter<'a, T> {
1795 type Item = &'a T;
1796
1797 #[inline]
1798 fn next(&mut self) -> Option<Self::Item> {
1799 if self.ptr < self.segment_end {
1800 let result = unsafe { &*self.ptr };
1801 self.ptr = unsafe { self.ptr.add(1) };
1802 self.index += 1;
1803 return Some(result);
1804 }
1805 self.next_segment()
1806 }
1807
1808 #[inline]
1809 fn size_hint(&self) -> (usize, Option<usize>) {
1810 let remaining = self.vec.len.saturating_sub(self.index);
1811 (remaining, Some(remaining))
1812 }
1813}
1814
1815impl<'a, T> Iter<'a, T> {
1816 #[inline]
1817 fn next_segment(&mut self) -> Option<&'a T> {
1818 if self.index >= self.vec.len {
1819 return None;
1820 }
1821
1822 let shelf_idx = self.shelf_index as usize;
1823 let shelf_size = SegmentedVec::<T>::shelf_size(self.shelf_index);
1824 let ptr = self.vec.dynamic_segments[shelf_idx];
1825 let segment_len = shelf_size.min(self.vec.len - self.index);
1826 self.ptr = ptr;
1827 self.segment_end = unsafe { ptr.add(segment_len) };
1828 self.shelf_index += 1;
1829
1830 let result = unsafe { &*self.ptr };
1831 self.ptr = unsafe { self.ptr.add(1) };
1832 self.index += 1;
1833 Some(result)
1834 }
1835}
1836
1837impl<'a, T> ExactSizeIterator for Iter<'a, T> {}
1838
1839pub struct IterMut<'a, T> {
1841 vec: &'a mut SegmentedVec<T>,
1842 ptr: *mut T,
1844 segment_end: *mut T,
1846 index: usize,
1848 shelf_index: u32,
1850}
1851
1852impl<'a, T> Iterator for IterMut<'a, T> {
1853 type Item = &'a mut T;
1854
1855 #[inline]
1856 fn next(&mut self) -> Option<Self::Item> {
1857 if self.ptr < self.segment_end {
1858 let result = self.ptr;
1859 self.ptr = unsafe { self.ptr.add(1) };
1860 self.index += 1;
1861 return Some(unsafe { &mut *result });
1862 }
1863 self.next_segment()
1864 }
1865
1866 #[inline]
1867 fn size_hint(&self) -> (usize, Option<usize>) {
1868 let remaining = self.vec.len.saturating_sub(self.index);
1869 (remaining, Some(remaining))
1870 }
1871}
1872
1873impl<'a, T> IterMut<'a, T> {
1874 #[inline]
1875 fn next_segment(&mut self) -> Option<&'a mut T> {
1876 if self.index >= self.vec.len {
1877 return None;
1878 }
1879
1880 let shelf_idx = self.shelf_index as usize;
1881 let shelf_size = SegmentedVec::<T>::shelf_size(self.shelf_index);
1882 let ptr = self.vec.dynamic_segments[shelf_idx];
1883 let segment_len = shelf_size.min(self.vec.len - self.index);
1884 self.ptr = ptr;
1885 self.segment_end = unsafe { ptr.add(segment_len) };
1886 self.shelf_index += 1;
1887
1888 let result = self.ptr;
1889 self.ptr = unsafe { self.ptr.add(1) };
1890 self.index += 1;
1891 Some(unsafe { &mut *result })
1892 }
1893}
1894
1895impl<'a, T> ExactSizeIterator for IterMut<'a, T> {}
1896
1897impl<T> IntoIterator for SegmentedVec<T> {
1898 type Item = T;
1899 type IntoIter = IntoIter<T>;
1900
1901 fn into_iter(self) -> Self::IntoIter {
1902 IntoIter {
1903 vec: self,
1904 index: 0,
1905 }
1906 }
1907}
1908
1909impl<'a, T> IntoIterator for &'a SegmentedVec<T> {
1910 type Item = &'a T;
1911 type IntoIter = Iter<'a, T>;
1912
1913 fn into_iter(self) -> Self::IntoIter {
1914 self.iter()
1915 }
1916}
1917
1918impl<'a, T> IntoIterator for &'a mut SegmentedVec<T> {
1919 type Item = &'a mut T;
1920 type IntoIter = IterMut<'a, T>;
1921
1922 fn into_iter(self) -> Self::IntoIter {
1923 self.iter_mut()
1924 }
1925}
1926
1927pub struct IntoIter<T> {
1929 vec: SegmentedVec<T>,
1930 index: usize,
1931}
1932
1933impl<T> Iterator for IntoIter<T> {
1934 type Item = T;
1935
1936 fn next(&mut self) -> Option<Self::Item> {
1937 if self.index >= self.vec.len {
1938 return None;
1939 }
1940 let value = unsafe { self.vec.unchecked_read(self.index) };
1941 self.index += 1;
1942 Some(value)
1943 }
1944
1945 fn size_hint(&self) -> (usize, Option<usize>) {
1946 let remaining = self.vec.len - self.index;
1947 (remaining, Some(remaining))
1948 }
1949}
1950
1951impl<T> ExactSizeIterator for IntoIter<T> {}
1952
1953impl<T> Drop for IntoIter<T> {
1954 fn drop(&mut self) {
1955 for i in self.index..self.vec.len {
1957 unsafe {
1958 std::ptr::drop_in_place(self.vec.unchecked_at_mut(i));
1959 }
1960 }
1961 self.vec.len = 0;
1963 }
1964}
1965
1966pub struct Drain<'a, T> {
1970 vec: &'a mut SegmentedVec<T>,
1971 range_start: usize,
1972 range_end: usize,
1973 index: usize,
1974}
1975
1976impl<'a, T> Iterator for Drain<'a, T> {
1977 type Item = T;
1978
1979 fn next(&mut self) -> Option<Self::Item> {
1980 if self.index >= self.range_end {
1981 None
1982 } else {
1983 let value = unsafe { std::ptr::read(self.vec.unchecked_at(self.index)) };
1984 self.index += 1;
1985 Some(value)
1986 }
1987 }
1988
1989 fn size_hint(&self) -> (usize, Option<usize>) {
1990 let remaining = self.range_end - self.index;
1991 (remaining, Some(remaining))
1992 }
1993}
1994
1995impl<'a, T> DoubleEndedIterator for Drain<'a, T> {
1996 fn next_back(&mut self) -> Option<Self::Item> {
1997 if self.index >= self.range_end {
1998 None
1999 } else {
2000 self.range_end -= 1;
2001 Some(unsafe { std::ptr::read(self.vec.unchecked_at(self.range_end)) })
2002 }
2003 }
2004}
2005
2006impl<'a, T> ExactSizeIterator for Drain<'a, T> {}
2007
2008impl<'a, T> Drop for Drain<'a, T> {
2009 fn drop(&mut self) {
2010 for i in self.index..self.range_end {
2012 unsafe {
2013 std::ptr::drop_in_place(self.vec.unchecked_at_mut(i));
2014 }
2015 }
2016
2017 let original_range_end = self.range_end;
2019 let original_len = self.vec.len;
2020 let drain_count = original_range_end - self.range_start;
2021
2022 for i in 0..(original_len - original_range_end) {
2023 unsafe {
2024 let src = self.vec.unchecked_at(original_range_end + i) as *const T;
2025 let dst = self.vec.unchecked_at_mut(self.range_start + i) as *mut T;
2026 std::ptr::copy_nonoverlapping(src, dst, 1);
2027 }
2028 }
2029
2030 self.vec.len = original_len - drain_count;
2031 }
2032}
2033
2034#[cfg(test)]
2035mod tests {
2036 use super::*;
2037
2038 #[test]
2039 fn test_new_empty() {
2040 let vec: SegmentedVec<i32> = SegmentedVec::new();
2041 assert!(vec.is_empty());
2042 assert_eq!(vec.len(), 0);
2043 }
2044
2045 #[test]
2046 fn test_push_pop() {
2047 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2048 vec.push(1);
2049 vec.push(2);
2050 vec.push(3);
2051 assert_eq!(vec.len(), 3);
2052 assert_eq!(vec.pop(), Some(3));
2053 assert_eq!(vec.pop(), Some(2));
2054 assert_eq!(vec.pop(), Some(1));
2055 assert_eq!(vec.pop(), None);
2056 }
2057
2058 #[test]
2059 fn test_get() {
2060 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2061 vec.push(10);
2062 vec.push(20);
2063 vec.push(30);
2064 assert_eq!(vec.get(0), Some(&10));
2065 assert_eq!(vec.get(1), Some(&20));
2066 assert_eq!(vec.get(2), Some(&30));
2067 assert_eq!(vec.get(3), None);
2068 }
2069
2070 #[test]
2071 fn test_index() {
2072 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2073 vec.push(10);
2074 vec.push(20);
2075 assert_eq!(vec[0], 10);
2076 assert_eq!(vec[1], 20);
2077 vec[0] = 100;
2078 assert_eq!(vec[0], 100);
2079 }
2080
2081 #[test]
2082 fn test_stable_pointers() {
2083 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2084 vec.push(1);
2085 let ptr = &vec[0] as *const i32;
2086
2087 for i in 2..1000 {
2089 vec.push(i);
2090 }
2091
2092 assert_eq!(unsafe { *ptr }, 1);
2094 }
2095
2096 #[test]
2097 fn test_iter() {
2098 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2099 for i in 0..100 {
2100 vec.push(i);
2101 }
2102
2103 let collected: Vec<i32> = vec.iter().copied().collect();
2104 let expected: Vec<i32> = (0..100).collect();
2105 assert_eq!(collected, expected);
2106 }
2107
2108 #[test]
2109 fn test_iter_mut() {
2110 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2111 for i in 0..10 {
2112 vec.push(i);
2113 }
2114
2115 for item in vec.iter_mut() {
2116 *item *= 2;
2117 }
2118
2119 let collected: Vec<i32> = vec.iter().copied().collect();
2120 let expected: Vec<i32> = (0..10).map(|x| x * 2).collect();
2121 assert_eq!(collected, expected);
2122 }
2123
2124 #[test]
2125 fn test_into_iter() {
2126 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2127 for i in 0..10 {
2128 vec.push(i);
2129 }
2130
2131 let collected: Vec<i32> = vec.into_iter().collect();
2132 let expected: Vec<i32> = (0..10).collect();
2133 assert_eq!(collected, expected);
2134 }
2135
2136 #[test]
2137 fn test_from_iter() {
2138 let vec: SegmentedVec<i32> = (0..10).collect();
2139 assert_eq!(vec.len(), 10);
2140 for i in 0..10 {
2141 assert_eq!(vec[i], i as i32);
2142 }
2143 }
2144
2145 #[test]
2146 fn test_extend() {
2147 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2148 vec.extend(0..5);
2149 vec.extend(5..10);
2150 assert_eq!(vec.len(), 10);
2151 for i in 0..10 {
2152 assert_eq!(vec[i], i as i32);
2153 }
2154 }
2155
2156 #[test]
2157 fn test_clear() {
2158 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2159 vec.extend(0..10);
2160 vec.clear();
2161 assert!(vec.is_empty());
2162 assert_eq!(vec.len(), 0);
2163 }
2164
2165 #[test]
2166 fn test_truncate() {
2167 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2168 vec.extend(0..10);
2169 vec.truncate(5);
2170 assert_eq!(vec.len(), 5);
2171 for i in 0..5 {
2172 assert_eq!(vec[i], i as i32);
2173 }
2174 }
2175
2176 #[test]
2177 fn test_clone() {
2178 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2179 vec.extend(0..10);
2180 let vec2 = vec.clone();
2181 assert_eq!(vec, vec2);
2182 }
2183
2184 #[test]
2185 fn test_debug() {
2186 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2187 vec.extend(0..3);
2188 let debug_str = format!("{:?}", vec);
2189 assert_eq!(debug_str, "[0, 1, 2]");
2190 }
2191
2192 #[test]
2193 fn test_first_last() {
2194 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2195 assert_eq!(vec.first(), None);
2196 assert_eq!(vec.last(), None);
2197
2198 vec.push(1);
2199 assert_eq!(vec.first(), Some(&1));
2200 assert_eq!(vec.last(), Some(&1));
2201
2202 vec.push(2);
2203 vec.push(3);
2204 assert_eq!(vec.first(), Some(&1));
2205 assert_eq!(vec.last(), Some(&3));
2206 }
2207
2208 #[test]
2209 fn test_drop_elements() {
2210 use std::cell::Cell;
2211 use std::rc::Rc;
2212
2213 let drop_count = Rc::new(Cell::new(0));
2214
2215 struct DropCounter {
2216 counter: Rc<Cell<i32>>,
2217 }
2218
2219 impl Drop for DropCounter {
2220 fn drop(&mut self) {
2221 self.counter.set(self.counter.get() + 1);
2222 }
2223 }
2224
2225 {
2226 let mut vec: SegmentedVec<DropCounter> = SegmentedVec::new();
2227 for _ in 0..10 {
2228 vec.push(DropCounter {
2229 counter: Rc::clone(&drop_count),
2230 });
2231 }
2232 }
2233
2234 assert_eq!(drop_count.get(), 10);
2235 }
2236
2237 #[test]
2238 fn test_shrink_to_fit() {
2239 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2240 vec.extend(0..100);
2241 vec.truncate(5);
2242 vec.shrink_to_fit();
2243 assert_eq!(vec.len(), 5);
2244 for i in 0..5 {
2245 assert_eq!(vec[i], i as i32);
2246 }
2247 }
2248
2249 #[test]
2250 fn test_sort() {
2251 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2252 vec.extend([5, 2, 8, 1, 9, 3, 7, 4, 6, 0]);
2253 vec.sort();
2254 let result: Vec<i32> = vec.iter().copied().collect();
2255 assert_eq!(result, vec![0, 1, 2, 3, 4, 5, 6, 7, 8, 9]);
2256 }
2257
2258 #[test]
2259 fn test_sort_empty() {
2260 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2261 vec.sort();
2262 assert!(vec.is_empty());
2263 }
2264
2265 #[test]
2266 fn test_sort_single() {
2267 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2268 vec.push(42);
2269 vec.sort();
2270 assert_eq!(vec[0], 42);
2271 }
2272
2273 #[test]
2274 fn test_sort_already_sorted() {
2275 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2276 vec.extend(0..10);
2277 vec.sort();
2278 let result: Vec<i32> = vec.iter().copied().collect();
2279 assert_eq!(result, (0..10).collect::<Vec<_>>());
2280 }
2281
2282 #[test]
2283 fn test_sort_reverse_sorted() {
2284 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2285 vec.extend((0..10).rev());
2286 vec.sort();
2287 let result: Vec<i32> = vec.iter().copied().collect();
2288 assert_eq!(result, (0..10).collect::<Vec<_>>());
2289 }
2290
2291 #[test]
2292 fn test_sort_by() {
2293 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2294 vec.extend([5, 2, 8, 1, 9, 3, 7, 4, 6, 0]);
2295 vec.sort_by(|a, b| b.cmp(a)); let result: Vec<i32> = vec.iter().copied().collect();
2297 assert_eq!(result, vec![9, 8, 7, 6, 5, 4, 3, 2, 1, 0]);
2298 }
2299
2300 #[test]
2301 fn test_sort_by_key() {
2302 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2303 vec.extend([-5, 2, -8, 1, -9, 3, -7, 4, -6, 0]);
2304 vec.sort_by_key(|k| k.abs());
2305 let result: Vec<i32> = vec.iter().copied().collect();
2306 assert_eq!(result, vec![0, 1, 2, 3, 4, -5, -6, -7, -8, -9]);
2307 }
2308
2309 #[test]
2310 fn test_sort_stable() {
2311 #[derive(Debug, Clone, Eq, PartialEq)]
2313 struct Item {
2314 key: i32,
2315 order: usize,
2316 }
2317
2318 let mut vec: SegmentedVec<Item> = SegmentedVec::new();
2319 vec.push(Item { key: 1, order: 0 });
2320 vec.push(Item { key: 2, order: 1 });
2321 vec.push(Item { key: 1, order: 2 });
2322 vec.push(Item { key: 2, order: 3 });
2323 vec.push(Item { key: 1, order: 4 });
2324
2325 vec.sort_by_key(|item| item.key);
2326
2327 assert_eq!(vec[0], Item { key: 1, order: 0 });
2329 assert_eq!(vec[1], Item { key: 1, order: 2 });
2330 assert_eq!(vec[2], Item { key: 1, order: 4 });
2331 assert_eq!(vec[3], Item { key: 2, order: 1 });
2333 assert_eq!(vec[4], Item { key: 2, order: 3 });
2334 }
2335
2336 #[test]
2337 fn test_sort_unstable() {
2338 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2339 vec.extend([5, 2, 8, 1, 9, 3, 7, 4, 6, 0]);
2340 vec.sort_unstable();
2341 let result: Vec<i32> = vec.iter().copied().collect();
2342 assert_eq!(result, vec![0, 1, 2, 3, 4, 5, 6, 7, 8, 9]);
2343 }
2344
2345 #[test]
2346 fn test_sort_unstable_by() {
2347 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2348 vec.extend([5, 2, 8, 1, 9, 3, 7, 4, 6, 0]);
2349 vec.sort_unstable_by(|a, b| b.cmp(a)); let result: Vec<i32> = vec.iter().copied().collect();
2351 assert_eq!(result, vec![9, 8, 7, 6, 5, 4, 3, 2, 1, 0]);
2352 }
2353
2354 #[test]
2355 fn test_sort_unstable_by_key() {
2356 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2357 vec.extend([-5, 2, -8, 1, -9, 3, -7, 4, -6, 0]);
2358 vec.sort_unstable_by_key(|k| k.abs());
2359 let result: Vec<i32> = vec.iter().copied().collect();
2361 for i in 1..result.len() {
2362 assert!(result[i - 1].abs() <= result[i].abs());
2363 }
2364 }
2365
2366 #[test]
2367 fn test_sort_large() {
2368 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2369 vec.extend((0..1000).rev());
2371 vec.sort();
2372 for i in 0..1000 {
2373 assert_eq!(vec[i], i as i32);
2374 }
2375 }
2376
2377 #[test]
2378 fn test_sort_unstable_large() {
2379 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2380 vec.extend((0..1000).rev());
2382 vec.sort_unstable();
2383 for i in 0..1000 {
2384 assert_eq!(vec[i], i as i32);
2385 }
2386 }
2387
2388 #[test]
2389 fn test_sort_pointers_remain_stable_after_sort() {
2390 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2393 vec.extend([5, 2, 8, 1, 9]);
2394
2395 let ptr = &vec[0] as *const i32;
2397
2398 vec.sort();
2399
2400 assert_eq!(unsafe { *ptr }, 1); }
2403
2404 #[test]
2407 fn test_as_slice() {
2408 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2409 vec.extend(0..10);
2410
2411 let slice = vec.as_slice();
2412 assert_eq!(slice.len(), 10);
2413 assert_eq!(slice[0], 0);
2414 assert_eq!(slice[9], 9);
2415 }
2416
2417 #[test]
2418 fn test_as_mut_slice() {
2419 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2420 vec.extend(0..10);
2421
2422 {
2423 let mut slice = vec.as_mut_slice();
2424 slice[0] = 100;
2425 slice[9] = 200;
2426 }
2427
2428 assert_eq!(vec[0], 100);
2429 assert_eq!(vec[9], 200);
2430 }
2431
2432 #[test]
2433 fn test_slice_range() {
2434 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2435 vec.extend(0..10);
2436
2437 let slice = vec.slice(2..5);
2438 assert_eq!(slice.len(), 3);
2439 assert_eq!(slice[0], 2);
2440 assert_eq!(slice[1], 3);
2441 assert_eq!(slice[2], 4);
2442 }
2443
2444 #[test]
2445 fn test_slice_mut_range() {
2446 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2447 vec.extend(0..10);
2448
2449 {
2450 let mut slice = vec.slice_mut(2..5);
2451 slice[0] = 100;
2452 slice[2] = 200;
2453 }
2454
2455 assert_eq!(vec[2], 100);
2456 assert_eq!(vec[4], 200);
2457 }
2458
2459 #[test]
2460 fn test_slice_first_last() {
2461 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2462 vec.extend(0..10);
2463
2464 let slice = vec.as_slice();
2465 assert_eq!(slice.first(), Some(&0));
2466 assert_eq!(slice.last(), Some(&9));
2467 }
2468
2469 #[test]
2470 fn test_slice_iter() {
2471 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2472 vec.extend(0..10);
2473
2474 let slice = vec.as_slice();
2475 let collected: Vec<i32> = slice.iter().copied().collect();
2476 assert_eq!(collected, (0..10).collect::<Vec<_>>());
2477 }
2478
2479 #[test]
2480 fn test_slice_iter_rev() {
2481 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2482 vec.extend(0..10);
2483
2484 let slice = vec.as_slice();
2485 let collected: Vec<i32> = slice.iter().rev().copied().collect();
2486 assert_eq!(collected, (0..10).rev().collect::<Vec<_>>());
2487 }
2488
2489 #[test]
2490 fn test_slice_contains() {
2491 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2492 vec.extend(0..10);
2493
2494 let slice = vec.as_slice();
2495 assert!(slice.contains(&5));
2496 assert!(!slice.contains(&100));
2497 }
2498
2499 #[test]
2500 fn test_slice_binary_search() {
2501 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2502 vec.extend(0..100);
2503
2504 let slice = vec.as_slice();
2505 assert_eq!(slice.binary_search(&50), Ok(50));
2506 assert_eq!(slice.binary_search(&0), Ok(0));
2507 assert_eq!(slice.binary_search(&99), Ok(99));
2508 assert_eq!(slice.binary_search(&100), Err(100));
2509 }
2510
2511 #[test]
2512 fn test_slice_split_at() {
2513 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2514 vec.extend(0..10);
2515
2516 let slice = vec.as_slice();
2517 let (left, right) = slice.split_at(5);
2518 assert_eq!(left.len(), 5);
2519 assert_eq!(right.len(), 5);
2520 assert_eq!(left[0], 0);
2521 assert_eq!(right[0], 5);
2522 }
2523
2524 #[test]
2525 fn test_slice_to_vec() {
2526 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2527 vec.extend(0..10);
2528
2529 let slice = vec.as_slice();
2530 let converted = slice.to_vec();
2531 assert_eq!(converted, (0..10).collect::<Vec<_>>());
2532 }
2533
2534 #[test]
2535 fn test_slice_mut_sort() {
2536 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2537 vec.extend([5, 3, 1, 4, 2, 8, 7, 6, 0, 9]);
2538
2539 {
2541 let mut slice = vec.slice_mut(2..8);
2542 slice.sort();
2543 }
2544
2545 let result: Vec<i32> = vec.iter().copied().collect();
2546 assert_eq!(result, vec![5, 3, 1, 2, 4, 6, 7, 8, 0, 9]);
2547 }
2548
2549 #[test]
2550 fn test_slice_mut_reverse() {
2551 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2552 vec.extend(0..10);
2553
2554 {
2555 let mut slice = vec.slice_mut(2..8);
2556 slice.reverse();
2557 }
2558
2559 let result: Vec<i32> = vec.iter().copied().collect();
2560 assert_eq!(result, vec![0, 1, 7, 6, 5, 4, 3, 2, 8, 9]);
2561 }
2562
2563 #[test]
2564 fn test_slice_mut_fill() {
2565 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2566 vec.extend(0..10);
2567
2568 {
2569 let mut slice = vec.slice_mut(2..5);
2570 slice.fill(99);
2571 }
2572
2573 let result: Vec<i32> = vec.iter().copied().collect();
2574 assert_eq!(result, vec![0, 1, 99, 99, 99, 5, 6, 7, 8, 9]);
2575 }
2576
2577 #[test]
2578 fn test_slice_starts_with() {
2579 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2580 vec.extend(0..10);
2581
2582 let slice = vec.as_slice();
2583 assert!(slice.starts_with(&[0, 1, 2]));
2584 assert!(!slice.starts_with(&[1, 2, 3]));
2585 }
2586
2587 #[test]
2588 fn test_slice_ends_with() {
2589 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2590 vec.extend(0..10);
2591
2592 let slice = vec.as_slice();
2593 assert!(slice.ends_with(&[7, 8, 9]));
2594 assert!(!slice.ends_with(&[6, 7, 8]));
2595 }
2596
2597 #[test]
2598 fn test_starts_with_edge_cases() {
2599 let empty: SegmentedVec<i32> = SegmentedVec::new();
2601 assert!(empty.starts_with(&[])); assert!(!empty.starts_with(&[1])); let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2606 vec.extend(0..10);
2607 assert!(vec.starts_with(&[])); let full: Vec<i32> = (0..10).collect();
2611 assert!(vec.starts_with(&full));
2612
2613 let longer: Vec<i32> = (0..20).collect();
2615 assert!(!vec.starts_with(&longer));
2616
2617 assert!(vec.starts_with(&[0]));
2619 assert!(!vec.starts_with(&[1]));
2620 }
2621
2622 #[test]
2623 fn test_ends_with_edge_cases() {
2624 let empty: SegmentedVec<i32> = SegmentedVec::new();
2626 assert!(empty.ends_with(&[])); assert!(!empty.ends_with(&[1])); let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2631 vec.extend(0..10);
2632 assert!(vec.ends_with(&[])); let full: Vec<i32> = (0..10).collect();
2636 assert!(vec.ends_with(&full));
2637
2638 let longer: Vec<i32> = (0..20).collect();
2640 assert!(!vec.ends_with(&longer));
2641
2642 assert!(vec.ends_with(&[9]));
2644 assert!(!vec.ends_with(&[8]));
2645 }
2646
2647 #[test]
2648 fn test_starts_with_across_segments() {
2649 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2651 vec.extend(0..1000);
2652
2653 assert!(vec.starts_with(&[0, 1, 2, 3, 4]));
2655
2656 let prefix: Vec<i32> = (0..100).collect();
2658 assert!(vec.starts_with(&prefix));
2659
2660 assert!(!vec.starts_with(&[1, 2, 3]));
2662 }
2663
2664 #[test]
2665 fn test_ends_with_across_segments() {
2666 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2668 vec.extend(0..1000);
2669
2670 assert!(vec.ends_with(&[997, 998, 999]));
2672
2673 let suffix: Vec<i32> = (900..1000).collect();
2675 assert!(vec.ends_with(&suffix));
2676
2677 assert!(!vec.ends_with(&[996, 997, 998]));
2679 }
2680
2681 #[test]
2682 fn test_slice_eq() {
2683 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2684 vec.extend(0..10);
2685
2686 let slice = vec.as_slice();
2687 assert_eq!(slice, (0..10).collect::<Vec<_>>());
2688 }
2689
2690 #[test]
2691 fn test_min_non_zero_cap() {
2692 let mut vec_u8: SegmentedVec<u8> = SegmentedVec::new();
2694 vec_u8.push(1);
2695 assert_eq!(vec_u8.capacity(), 8);
2696
2697 let mut vec_i32: SegmentedVec<i32> = SegmentedVec::new();
2699 vec_i32.push(1);
2700 assert_eq!(vec_i32.capacity(), 4);
2701
2702 for i in 0u8..100 {
2704 vec_u8.push(i);
2705 }
2706 for i in 0..101 {
2707 assert_eq!(vec_u8[i], if i == 0 { 1 } else { (i - 1) as u8 });
2708 }
2709 }
2710
2711 #[test]
2712 fn test_extend_from_copy_slice() {
2713 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2714 let data: Vec<i32> = (0..100).collect();
2715 vec.extend_from_copy_slice(&data);
2716 assert_eq!(vec.len(), 100);
2717 for i in 0..100 {
2718 assert_eq!(vec[i], i as i32);
2719 }
2720
2721 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2722 vec.push(999);
2723 vec.push(998);
2724 vec.extend_from_copy_slice(&data[..10]);
2725 assert_eq!(vec.len(), 12);
2726 assert_eq!(vec[0], 999);
2727 assert_eq!(vec[1], 998);
2728 for i in 0..10 {
2729 assert_eq!(vec[i + 2], i as i32);
2730 }
2731
2732 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2733 vec.extend_from_copy_slice(&[]);
2734 assert!(vec.is_empty());
2735
2736 let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2737 vec.extend_from_copy_slice(&[1, 2, 3]); assert_eq!(vec.len(), 3);
2739 vec.push(4); vec.push(5);
2741 vec.push(6);
2742 assert_eq!(vec.len(), 6);
2743 for i in 0..6 {
2744 assert_eq!(vec[i], (i + 1) as i32);
2745 }
2746 }
2747}