segmented_vec/
lib.rs

1//! A segmented growable vector with stable pointers.
2//!
3//! Unlike `Vec`, pushing new elements never invalidates pointers to existing elements.
4//! This is achieved by storing elements in segments of exponentially growing sizes.
5//!
6//! # Example
7//!
8//! ```
9//! use segmented_vec::SegmentedVec;
10//!
11//! let mut vec: SegmentedVec<i32> = SegmentedVec::new();
12//! vec.push(1);
13//! vec.push(2);
14//!
15//! // Get a pointer to the first element
16//! let ptr = &vec[0] as *const i32;
17//!
18//! // Push more elements - the pointer remains valid!
19//! for i in 3..100 {
20//!     vec.push(i);
21//! }
22//!
23//! // The pointer is still valid
24//! assert_eq!(unsafe { *ptr }, 1);
25//! ```
26
27mod slice;
28mod sort;
29
30pub use slice::{
31    Chunks, ChunksExact, RChunks, SegmentedSlice, SegmentedSliceMut, SliceIter, SliceIterMut,
32    Windows,
33};
34
35use std::alloc::{self, Layout};
36use std::cmp::Ordering;
37use std::marker::PhantomData;
38use std::ops::{Index, IndexMut};
39
40/// Maximum number of dynamic segments supported.
41/// With exponentially growing segments, 64 segments can hold more than 2^64 elements.
42const MAX_SEGMENTS: usize = 64;
43
44/// A segmented vector with stable pointers.
45#[repr(C)]
46pub struct SegmentedVec<T> {
47    /// Cached pointer to the next write position (for fast push)
48    write_ptr: *mut T,
49    /// Pointer to the end of the current segment
50    segment_end: *mut T,
51    len: usize,
52    /// Index of the current segment
53    active_segment_index: usize,
54    segment_count: usize,
55    dynamic_segments: [*mut T; MAX_SEGMENTS],
56    _marker: PhantomData<T>,
57}
58
59impl<T> SegmentedVec<T> {
60    /// Minimum capacity for the first dynamic segment
61    /// Avoids tiny allocations that heap allocators round up anyway.
62    /// - 8 for 1-byte elements (allocators round up small requests)
63    /// - 4 for moderate elements (<= 1 KiB)
64    /// - 1 for large elements (avoid wasting space)
65    const MIN_NON_ZERO_CAP: usize = {
66        let size = std::mem::size_of::<T>();
67        if size == 1 {
68            8
69        } else if size <= 1024 {
70            4
71        } else {
72            1
73        }
74    };
75
76    const MIN_CAP_EXP: u32 = Self::MIN_NON_ZERO_CAP.trailing_zeros();
77
78    /// Creates a new empty `SegmentedVec`.
79    ///
80    /// # Example
81    ///
82    /// ```
83    /// use segmented_vec::SegmentedVec;
84    /// let vec: SegmentedVec<i32> = SegmentedVec::new();
85    /// assert!(vec.is_empty());
86    /// ```
87    #[inline]
88    pub const fn new() -> Self {
89        Self {
90            dynamic_segments: [std::ptr::null_mut(); MAX_SEGMENTS],
91            segment_count: 0,
92            len: 0,
93            write_ptr: std::ptr::null_mut(),
94            segment_end: std::ptr::null_mut(),
95            // Optimization: Initialize to MAX so the first increment wrap around and make it 0
96            active_segment_index: usize::MAX,
97            _marker: PhantomData,
98        }
99    }
100
101    /// Returns the number of elements in the vector.
102    #[inline]
103    pub const fn len(&self) -> usize {
104        self.len
105    }
106
107    /// Returns `true` if the vector contains no elements.
108    #[inline]
109    pub const fn is_empty(&self) -> bool {
110        self.len == 0
111    }
112
113    /// Returns the current capacity of the vector.
114    #[inline]
115    pub fn capacity(&self) -> usize {
116        Self::compute_capacity(self.segment_count as u32)
117    }
118
119    /// Appends an element to the back of the vector.
120    ///
121    /// # Panics
122    ///
123    /// Panics if allocation fails.
124    ///
125    /// # Example
126    ///
127    /// ```
128    /// use segmented_vec::SegmentedVec;
129    /// let mut vec: SegmentedVec<i32> = SegmentedVec::new();
130    /// vec.push(1);
131    /// vec.push(2);
132    /// assert_eq!(vec.len(), 2);
133    /// ```
134    #[inline]
135    pub fn push(&mut self, value: T) {
136        // Fast path: we have space in the current segment
137        if self.write_ptr < self.segment_end {
138            unsafe {
139                std::ptr::write(self.write_ptr, value);
140                self.write_ptr = self.write_ptr.add(1);
141            }
142            self.len += 1;
143            return;
144        }
145
146        // Slow path: need to grow or move to next segment
147        self.push_slow(value);
148    }
149
150    #[cold]
151    #[inline(never)]
152    fn push_slow(&mut self, value: T) {
153        self.active_segment_index = self.active_segment_index.wrapping_add(1);
154
155        if self.active_segment_index >= self.segment_count {
156            self.grow_once();
157        }
158
159        let idx = self.active_segment_index;
160
161        // Load the base pointer for the new segment
162        // SAFETY: grow_once guarantees this slot is populated.
163        let base = unsafe { *self.dynamic_segments.get_unchecked(idx) };
164        let shelf_size = Self::MIN_NON_ZERO_CAP << idx;
165
166        unsafe {
167            std::ptr::write(base, value);
168            self.write_ptr = base.add(1);
169            self.segment_end = base.add(shelf_size);
170        }
171        self.len += 1;
172    }
173
174    /// Removes the last element from the vector and returns it, or `None` if empty.
175    ///
176    /// # Example
177    ///
178    /// ```
179    /// use segmented_vec::SegmentedVec;
180    /// let mut vec: SegmentedVec<i32> = SegmentedVec::new();
181    /// vec.push(1);
182    /// vec.push(2);
183    /// assert_eq!(vec.pop(), Some(2));
184    /// assert_eq!(vec.pop(), Some(1));
185    /// assert_eq!(vec.pop(), None);
186    /// ```
187    #[inline]
188    pub fn pop(&mut self) -> Option<T> {
189        if self.len == 0 {
190            return None;
191        }
192
193        // Fast path: current segment still has elements
194        // SAFETY: len > 0 means active_segment_index is valid
195        let segment_base = unsafe {
196            *self
197                .dynamic_segments
198                .get_unchecked(self.active_segment_index)
199        };
200        if self.write_ptr > segment_base {
201            self.len -= 1;
202            unsafe {
203                // Move back to the last written element
204                self.write_ptr = self.write_ptr.sub(1);
205                // Return the value
206                Some(std::ptr::read(self.write_ptr))
207            }
208        } else {
209            // Slow path: current segment doesn't have any element left, cross the boundary
210            self.pop_slow_path()
211        }
212    }
213
214    #[cold]
215    #[inline(never)]
216    fn pop_slow_path(&mut self) -> Option<T> {
217        // Safe because caller checked len > 0, and we're at segment boundary,
218        // so active_segment_index must be > 0
219        self.active_segment_index -= 1;
220        let idx = self.active_segment_index;
221
222        let base = unsafe { *self.dynamic_segments.get_unchecked(idx) };
223        let capacity = Self::MIN_NON_ZERO_CAP << idx;
224
225        self.segment_end = unsafe { base.add(capacity) };
226        self.write_ptr = unsafe { self.segment_end.sub(1) };
227        self.len -= 1;
228        Some(unsafe { std::ptr::read(self.write_ptr) })
229    }
230
231    /// Returns a reference to the element at the given index.
232    ///
233    /// Returns `None` if the index is out of bounds.
234    #[inline]
235    pub fn get(&self, index: usize) -> Option<&T> {
236        if index < self.len {
237            Some(unsafe { self.unchecked_at(index) })
238        } else {
239            None
240        }
241    }
242
243    /// Returns a mutable reference to the element at the given index.
244    ///
245    /// Returns `None` if the index is out of bounds.
246    #[inline]
247    pub fn get_mut(&mut self, index: usize) -> Option<&mut T> {
248        if index < self.len {
249            Some(unsafe { self.unchecked_at_mut(index) })
250        } else {
251            None
252        }
253    }
254
255    /// Returns a reference to the first element, or `None` if empty.
256    #[inline]
257    pub fn first(&self) -> Option<&T> {
258        if self.len == 0 {
259            None
260        } else {
261            // SAFETY: len > 0 means segment 0 exists with at least one element
262            Some(unsafe { &*self.dynamic_segments[0] })
263        }
264    }
265
266    /// Returns a mutable reference to the first element, or `None` if empty.
267    #[inline]
268    pub fn first_mut(&mut self) -> Option<&mut T> {
269        if self.len == 0 {
270            None
271        } else {
272            // SAFETY: len > 0 means segment 0 exists with at least one element
273            Some(unsafe { &mut *self.dynamic_segments[0] })
274        }
275    }
276
277    /// Returns a reference to the last element, or `None` if empty.
278    #[inline]
279    pub fn last(&self) -> Option<&T> {
280        if self.len == 0 {
281            return None;
282        }
283
284        // Fast path: check if current segment has elements
285        // SAFETY: len > 0 means active_segment_index is valid
286        let segment_base = unsafe {
287            *self
288                .dynamic_segments
289                .get_unchecked(self.active_segment_index)
290        };
291
292        if self.write_ptr > segment_base {
293            // Last element is in the current segment
294            Some(unsafe { &*self.write_ptr.sub(1) })
295        } else {
296            // Current segment is empty, last element is in previous segment
297            self.get(self.len - 1)
298        }
299    }
300
301    /// Returns a mutable reference to the last element, or `None` if empty.
302    #[inline]
303    pub fn last_mut(&mut self) -> Option<&mut T> {
304        if self.len == 0 {
305            return None;
306        }
307
308        // Fast path: check if current segment has elements
309        // SAFETY: len > 0 means active_segment_index is valid
310        let segment_base = unsafe {
311            *self
312                .dynamic_segments
313                .get_unchecked(self.active_segment_index)
314        };
315
316        if self.write_ptr > segment_base {
317            // Last element is in the current segment
318            Some(unsafe { &mut *self.write_ptr.sub(1) })
319        } else {
320            // Current segment is empty, last element is in previous segment
321            self.get_mut(self.len - 1)
322        }
323    }
324
325    /// Returns `true` if the slice contains an element with the given value.
326    #[inline]
327    pub fn contains(&self, x: &T) -> bool
328    where
329        T: PartialEq,
330    {
331        self.as_slice().contains(x)
332    }
333
334    /// Returns `true` if `needle` is a prefix of the vector.
335    pub fn starts_with(&self, needle: &[T]) -> bool
336    where
337        T: PartialEq,
338    {
339        self.as_slice().starts_with(needle)
340    }
341
342    /// Returns `true` if `needle` is a suffix of the vector.
343    pub fn ends_with(&self, needle: &[T]) -> bool
344    where
345        T: PartialEq,
346    {
347        self.as_slice().ends_with(needle)
348    }
349
350    /// Binary searches this vector for a given element.
351    ///
352    /// If the value is found, returns `Ok(index)`. If not found, returns
353    /// `Err(index)` where `index` is the position where the element could be inserted.
354    pub fn binary_search(&self, x: &T) -> Result<usize, usize>
355    where
356        T: Ord,
357    {
358        self.as_slice().binary_search(x)
359    }
360
361    /// Binary searches this vector with a comparator function.
362    pub fn binary_search_by<F>(&self, f: F) -> Result<usize, usize>
363    where
364        F: FnMut(&T) -> Ordering,
365    {
366        self.as_slice().binary_search_by(f)
367    }
368
369    /// Binary searches this vector with a key extraction function.
370    pub fn binary_search_by_key<B, F>(&self, b: &B, f: F) -> Result<usize, usize>
371    where
372        F: FnMut(&T) -> B,
373        B: Ord,
374    {
375        self.as_slice().binary_search_by_key(b, f)
376    }
377
378    /// Swaps two elements in the vector.
379    ///
380    /// # Panics
381    ///
382    /// Panics if `a` or `b` are out of bounds.
383    #[inline]
384    pub fn swap(&mut self, a: usize, b: usize) {
385        self.as_mut_slice().swap(a, b)
386    }
387
388    /// Reverses the order of elements in the vector.
389    pub fn reverse(&mut self) {
390        self.as_mut_slice().reverse()
391    }
392
393    /// Fills the vector with the given value.
394    pub fn fill(&mut self, value: T)
395    where
396        T: Clone,
397    {
398        self.as_mut_slice().fill(value)
399    }
400
401    /// Fills the vector with values produced by a function.
402    pub fn fill_with<F>(&mut self, f: F)
403    where
404        F: FnMut() -> T,
405    {
406        self.as_mut_slice().fill_with(f)
407    }
408
409    /// Clears the vector, removing all elements.
410    ///
411    /// This does not deallocate the dynamic segments.
412    pub fn clear(&mut self) {
413        if self.len == 0 {
414            return;
415        }
416
417        if std::mem::needs_drop::<T>() {
418            // Drop elements in dynamic segments
419            let mut remaining = self.len;
420            for shelf in 0..self.segment_count {
421                let size = Self::shelf_size(shelf as u32);
422                let count = remaining.min(size);
423                let ptr = unsafe { *self.dynamic_segments.get_unchecked(shelf) };
424                unsafe { std::ptr::drop_in_place(std::ptr::slice_from_raw_parts_mut(ptr, count)) };
425                remaining -= count;
426                if remaining == 0 {
427                    break;
428                }
429            }
430        }
431
432        self.len = 0;
433        // Reset write pointer cache
434        self.write_ptr = std::ptr::null_mut();
435        self.segment_end = std::ptr::null_mut();
436        self.active_segment_index = usize::MAX;
437    }
438
439    /// Shortens the vector, keeping the first `new_len` elements.
440    ///
441    /// If `new_len` is greater than or equal to the current length, this has no effect.
442    pub fn truncate(&mut self, new_len: usize) {
443        if new_len >= self.len {
444            return;
445        }
446
447        if std::mem::needs_drop::<T>() {
448            let dynamic_new_len = new_len;
449            let dynamic_old_len = self.len;
450
451            if dynamic_new_len < dynamic_old_len {
452                let mut pos = 0usize;
453                for shelf in 0..self.segment_count {
454                    let size = Self::shelf_size(shelf as u32);
455                    let seg_end = pos + size;
456
457                    // Calculate overlap with [dynamic_new_len, dynamic_old_len)
458                    let drop_start = dynamic_new_len.max(pos);
459                    let drop_end = dynamic_old_len.min(seg_end);
460
461                    if drop_start < drop_end {
462                        let offset = drop_start - pos;
463                        let count = drop_end - drop_start;
464                        let ptr =
465                            unsafe { (*self.dynamic_segments.get_unchecked(shelf)).add(offset) };
466                        unsafe {
467                            std::ptr::drop_in_place(std::ptr::slice_from_raw_parts_mut(ptr, count))
468                        };
469                    }
470
471                    pos = seg_end;
472                    if pos >= dynamic_old_len {
473                        break;
474                    }
475                }
476            }
477        }
478
479        self.len = new_len;
480        // Update write pointer cache
481        if new_len > 0 {
482            let biased = new_len + Self::MIN_NON_ZERO_CAP;
483            let msb = biased.ilog2();
484            let idx = (msb - Self::MIN_CAP_EXP) as usize;
485            self.active_segment_index = idx;
486
487            let capacity = 1usize << msb;
488            let offset = biased ^ capacity;
489            unsafe {
490                let base = *self.dynamic_segments.get_unchecked(idx);
491                self.write_ptr = base.add(offset);
492                self.segment_end = base.add(capacity);
493            }
494        } else {
495            // Reset to "Genesis" state
496            self.write_ptr = std::ptr::null_mut();
497            self.segment_end = std::ptr::null_mut();
498            // Set to -1 so the next push wraps to 0
499            self.active_segment_index = usize::MAX;
500        }
501    }
502
503    /// Reserves capacity for at least `additional` more elements.
504    pub fn reserve(&mut self, additional: usize) {
505        self.grow_capacity(self.len + additional);
506        // Initialize write pointer if we didn't have capacity before
507        if self.write_ptr.is_null() {
508            unsafe {
509                // We just guaranteed segment 0 exists in the loop above.
510                let base = *self.dynamic_segments.get_unchecked(0);
511                self.write_ptr = base;
512                self.segment_end = base.add(Self::MIN_NON_ZERO_CAP);
513
514                // Set to 0 so we are valid.
515                // Note: push_slow expects MAX->0 transition, but since we set
516                // write_ptr here, the first push will hit the FAST path (write_ptr < end)
517                // and skip push_slow entirely.
518                self.active_segment_index = 0;
519            }
520        }
521    }
522
523    /// Shrinks the capacity to match the current length.
524    ///
525    /// This deallocates unused dynamic segments.
526    pub fn shrink_to_fit(&mut self) {
527        self.shrink_capacity(self.len);
528    }
529
530    /// Returns an iterator over references to the elements.
531    #[inline]
532    pub fn iter(&self) -> Iter<'_, T> {
533        // Initialize with null pointers - first next() call will set up the segment
534        Iter {
535            vec: self,
536            ptr: std::ptr::null(),
537            segment_end: std::ptr::null(),
538            index: 0,
539            shelf_index: 0,
540        }
541    }
542
543    /// Returns an iterator over mutable references to the elements.
544    #[inline]
545    pub fn iter_mut(&mut self) -> IterMut<'_, T> {
546        // Initialize with null pointers - first next() call will set up the segment
547        IterMut {
548            vec: self,
549            ptr: std::ptr::null_mut(),
550            segment_end: std::ptr::null_mut(),
551            index: 0,
552            shelf_index: 0,
553        }
554    }
555
556    /// Returns an immutable slice view of the entire vector.
557    ///
558    /// # Example
559    ///
560    /// ```
561    /// use segmented_vec::SegmentedVec;
562    ///
563    /// let mut vec: SegmentedVec<i32> = SegmentedVec::new();
564    /// vec.extend(0..10);
565    ///
566    /// let slice = vec.as_slice();
567    /// assert_eq!(slice.len(), 10);
568    /// assert_eq!(slice[0], 0);
569    /// ```
570    #[inline]
571    pub fn as_slice(&self) -> SegmentedSlice<'_, T> {
572        SegmentedSlice::new(self)
573    }
574
575    /// Returns a mutable slice view of the entire vector.
576    ///
577    /// # Example
578    ///
579    /// ```
580    /// use segmented_vec::SegmentedVec;
581    ///
582    /// let mut vec: SegmentedVec<i32> = SegmentedVec::new();
583    /// vec.extend(0..10);
584    ///
585    /// let mut slice = vec.as_mut_slice();
586    /// slice[0] = 100;
587    /// assert_eq!(vec[0], 100);
588    /// ```
589    #[inline]
590    pub fn as_mut_slice(&mut self) -> SegmentedSliceMut<'_, T> {
591        SegmentedSliceMut::new(self)
592    }
593
594    /// Returns an immutable slice view of a range of the vector.
595    ///
596    /// # Panics
597    ///
598    /// Panics if the range is out of bounds.
599    ///
600    /// # Example
601    ///
602    /// ```
603    /// use segmented_vec::SegmentedVec;
604    ///
605    /// let mut vec: SegmentedVec<i32> = SegmentedVec::new();
606    /// vec.extend(0..10);
607    ///
608    /// let slice = vec.slice(2..5);
609    /// assert_eq!(slice.len(), 3);
610    /// assert_eq!(slice[0], 2);
611    /// ```
612    #[inline]
613    pub fn slice(&self, range: std::ops::Range<usize>) -> SegmentedSlice<'_, T> {
614        assert!(range.start <= range.end && range.end <= self.len);
615        SegmentedSlice::from_range(self, range.start, range.end)
616    }
617
618    /// Returns a mutable slice view of a range of the vector.
619    ///
620    /// # Panics
621    ///
622    /// Panics if the range is out of bounds.
623    ///
624    /// # Example
625    ///
626    /// ```
627    /// use segmented_vec::SegmentedVec;
628    ///
629    /// let mut vec: SegmentedVec<i32> = SegmentedVec::new();
630    /// vec.extend(0..10);
631    ///
632    /// let mut slice = vec.slice_mut(2..5);
633    /// slice[0] = 100;
634    /// assert_eq!(vec[2], 100);
635    /// ```
636    #[inline]
637    pub fn slice_mut(&mut self, range: std::ops::Range<usize>) -> SegmentedSliceMut<'_, T> {
638        assert!(range.start <= range.end && range.end <= self.len);
639        SegmentedSliceMut::from_range(self, range.start, range.end)
640    }
641
642    /// Extends the vector by cloning elements from a slice.
643    pub fn extend_from_slice(&mut self, other: &[T])
644    where
645        T: Clone,
646    {
647        if other.is_empty() {
648            return;
649        }
650        self.reserve(other.len());
651
652        let mut src = other;
653
654        // Fill dynamic segments
655        while !src.is_empty() {
656            let (shelf, box_idx, remaining) = Self::location_with_capacity(self.len);
657            let to_copy = src.len().min(remaining);
658            let dest = unsafe { (*self.dynamic_segments.get_unchecked(shelf)).add(box_idx) };
659            for (i, item) in src.iter().take(to_copy).enumerate() {
660                unsafe { std::ptr::write(dest.add(i), item.clone()) };
661            }
662            self.len += to_copy;
663            src = &src[to_copy..];
664        }
665
666        // Set up write pointer cache for next push
667        if self.len > 0 {
668            let biased = self.len + Self::MIN_NON_ZERO_CAP;
669            let msb = biased.ilog2();
670            let idx = (msb - Self::MIN_CAP_EXP) as usize;
671            self.active_segment_index = idx;
672
673            let capacity = 1usize << msb;
674            let offset = biased ^ capacity;
675            unsafe {
676                let base = *self.dynamic_segments.get_unchecked(idx);
677                self.write_ptr = base.add(offset);
678                self.segment_end = base.add(capacity);
679            }
680        }
681    }
682
683    /// Extends the vector by copying elements from a slice (for `Copy` types).
684    ///
685    /// This is more efficient than `extend_from_slice` for `Copy` types
686    /// as it uses bulk memory copy operations.
687    pub fn extend_from_copy_slice(&mut self, other: &[T])
688    where
689        T: Copy,
690    {
691        if other.is_empty() {
692            return;
693        }
694        self.reserve(other.len());
695
696        let mut src = other;
697
698        // Fill dynamic segments
699        while !src.is_empty() {
700            let (shelf, box_idx, remaining) = Self::location_with_capacity(self.len);
701            let to_copy = src.len().min(remaining);
702            unsafe {
703                let dest = (*self.dynamic_segments.get_unchecked(shelf)).add(box_idx);
704                std::ptr::copy_nonoverlapping(src.as_ptr(), dest, to_copy);
705            };
706            self.len += to_copy;
707            src = &src[to_copy..];
708        }
709
710        // Set up write pointer cache for next push
711        if self.len > 0 {
712            let biased = self.len + Self::MIN_NON_ZERO_CAP;
713            let msb = biased.ilog2();
714            let idx = (msb - Self::MIN_CAP_EXP) as usize;
715            self.active_segment_index = idx;
716
717            let capacity = 1usize << msb;
718            let offset = biased ^ capacity;
719            unsafe {
720                let base = *self.dynamic_segments.get_unchecked(idx);
721                self.write_ptr = base.add(offset);
722                self.segment_end = base.add(capacity);
723            }
724        }
725    }
726
727    /// Sorts the vector in place using a stable sorting algorithm.
728    ///
729    /// This sort is stable (i.e., does not reorder equal elements) and O(n * log(n)) worst-case.
730    ///
731    /// The algorithm is a merge sort adapted from the Rust standard library.
732    ///
733    /// # Examples
734    ///
735    /// ```
736    /// use segmented_vec::SegmentedVec;
737    ///
738    /// let mut vec: SegmentedVec<i32> = SegmentedVec::new();
739    /// vec.extend([3, 1, 4, 1, 5, 9, 2, 6]);
740    /// vec.sort();
741    /// assert_eq!(vec.iter().copied().collect::<Vec<_>>(), vec![1, 1, 2, 3, 4, 5, 6, 9]);
742    /// ```
743    #[inline]
744    pub fn sort(&mut self)
745    where
746        T: Ord,
747    {
748        self.as_mut_slice().sort();
749    }
750
751    /// Sorts the vector in place with a comparator function.
752    ///
753    /// This sort is stable (i.e., does not reorder equal elements) and O(n * log(n)) worst-case.
754    ///
755    /// The comparator function must define a total ordering for the elements.
756    ///
757    /// # Examples
758    ///
759    /// ```
760    /// use segmented_vec::SegmentedVec;
761    ///
762    /// let mut vec: SegmentedVec<i32> = SegmentedVec::new();
763    /// vec.extend([3, 1, 4, 1, 5, 9, 2, 6]);
764    /// vec.sort_by(|a, b| b.cmp(a)); // reverse order
765    /// assert_eq!(vec.iter().copied().collect::<Vec<_>>(), vec![9, 6, 5, 4, 3, 2, 1, 1]);
766    /// ```
767    pub fn sort_by<F>(&mut self, compare: F)
768    where
769        F: FnMut(&T, &T) -> Ordering,
770    {
771        self.as_mut_slice().sort_by(compare);
772    }
773
774    /// Sorts the vector in place with a key extraction function.
775    ///
776    /// This sort is stable (i.e., does not reorder equal elements) and O(m * n * log(n)) worst-case,
777    /// where the key function is O(m).
778    ///
779    /// # Examples
780    ///
781    /// ```
782    /// use segmented_vec::SegmentedVec;
783    ///
784    /// let mut vec: SegmentedVec<i32> = SegmentedVec::new();
785    /// vec.extend([-3, 1, -4, 1, 5, -9, 2, 6]);
786    /// vec.sort_by_key(|k| k.abs());
787    /// assert_eq!(vec.iter().copied().collect::<Vec<_>>(), vec![1, 1, 2, -3, -4, 5, 6, -9]);
788    /// ```
789    #[inline]
790    pub fn sort_by_key<K, F>(&mut self, f: F)
791    where
792        F: FnMut(&T) -> K,
793        K: Ord,
794    {
795        self.as_mut_slice().sort_by_key(f);
796    }
797
798    /// Sorts the vector in place using an unstable sorting algorithm.
799    ///
800    /// This sort is unstable (i.e., may reorder equal elements), in-place, and O(n * log(n)) worst-case.
801    ///
802    /// The algorithm is a quicksort with heapsort fallback, adapted from the Rust standard library.
803    ///
804    /// # Examples
805    ///
806    /// ```
807    /// use segmented_vec::SegmentedVec;
808    ///
809    /// let mut vec: SegmentedVec<i32> = SegmentedVec::new();
810    /// vec.extend([3, 1, 4, 1, 5, 9, 2, 6]);
811    /// vec.sort_unstable();
812    /// assert_eq!(vec.iter().copied().collect::<Vec<_>>(), vec![1, 1, 2, 3, 4, 5, 6, 9]);
813    /// ```
814    #[inline]
815    pub fn sort_unstable(&mut self)
816    where
817        T: Ord,
818    {
819        self.as_mut_slice().sort_unstable();
820    }
821
822    /// Sorts the vector in place with a comparator function using an unstable sorting algorithm.
823    ///
824    /// This sort is unstable (i.e., may reorder equal elements), in-place, and O(n * log(n)) worst-case.
825    ///
826    /// The comparator function must define a total ordering for the elements.
827    ///
828    /// # Examples
829    ///
830    /// ```
831    /// use segmented_vec::SegmentedVec;
832    ///
833    /// let mut vec: SegmentedVec<i32> = SegmentedVec::new();
834    /// vec.extend([3, 1, 4, 1, 5, 9, 2, 6]);
835    /// vec.sort_unstable_by(|a, b| b.cmp(a)); // reverse order
836    /// assert_eq!(vec.iter().copied().collect::<Vec<_>>(), vec![9, 6, 5, 4, 3, 2, 1, 1]);
837    /// ```
838    pub fn sort_unstable_by<F>(&mut self, compare: F)
839    where
840        F: FnMut(&T, &T) -> Ordering,
841    {
842        self.as_mut_slice().sort_unstable_by(compare);
843    }
844
845    /// Sorts the vector in place with a key extraction function using an unstable sorting algorithm.
846    ///
847    /// This sort is unstable (i.e., may reorder equal elements), in-place, and O(n * log(n)) worst-case,
848    /// where the key function is O(m).
849    ///
850    /// # Examples
851    ///
852    /// ```
853    /// use segmented_vec::SegmentedVec;
854    ///
855    /// let mut vec: SegmentedVec<i32> = SegmentedVec::new();
856    /// vec.extend([-3, 1, -4, 1, 5, -9, 2, 6]);
857    /// vec.sort_unstable_by_key(|k| k.abs());
858    /// // Note: unstable sort may reorder equal elements, so we just check it's sorted by abs value
859    /// let result: Vec<i32> = vec.iter().copied().collect();
860    /// for i in 1..result.len() {
861    ///     assert!(result[i-1].abs() <= result[i].abs());
862    /// }
863    /// ```
864    #[inline]
865    pub fn sort_unstable_by_key<K, F>(&mut self, f: F)
866    where
867        F: FnMut(&T) -> K,
868        K: Ord,
869    {
870        self.as_mut_slice().sort_unstable_by_key(f);
871    }
872
873    /// Checks if the elements of this vector are sorted.
874    pub fn is_sorted(&self) -> bool
875    where
876        T: PartialOrd,
877    {
878        self.as_slice().is_sorted()
879    }
880
881    /// Checks if the elements of this vector are sorted using the given comparator function.
882    pub fn is_sorted_by<F>(&self, compare: F) -> bool
883    where
884        F: FnMut(&T, &T) -> bool,
885    {
886        self.as_slice().is_sorted_by(compare)
887    }
888
889    /// Checks if the elements of this vector are sorted using the given key extraction function.
890    pub fn is_sorted_by_key<K, F>(&self, f: F) -> bool
891    where
892        F: FnMut(&T) -> K,
893        K: PartialOrd,
894    {
895        self.as_slice().is_sorted_by_key(f)
896    }
897
898    /// Returns the index of the partition point according to the given predicate.
899    pub fn partition_point<P>(&self, pred: P) -> usize
900    where
901        P: FnMut(&T) -> bool,
902    {
903        self.as_slice().partition_point(pred)
904    }
905
906    /// Rotates the vector in-place such that the first `mid` elements move to the end.
907    pub fn rotate_left(&mut self, mid: usize) {
908        self.as_mut_slice().rotate_left(mid);
909    }
910
911    /// Rotates the vector in-place such that the last `k` elements move to the front.
912    pub fn rotate_right(&mut self, k: usize) {
913        self.as_mut_slice().rotate_right(k);
914    }
915
916    /// Creates a new `SegmentedVec` with at least the specified capacity.
917    pub fn with_capacity(capacity: usize) -> Self {
918        let mut vec = Self::new();
919        vec.reserve(capacity);
920        vec
921    }
922
923    /// Inserts an element at position `index`, shifting all elements after it to the right.
924    ///
925    /// # Panics
926    ///
927    /// Panics if `index > len`.
928    pub fn insert(&mut self, index: usize, element: T) {
929        assert!(index <= self.len);
930        self.push(element);
931        // Rotate the new element into place
932        if index < self.len - 1 {
933            for i in (index..self.len - 1).rev() {
934                unsafe {
935                    let ptr_a = self.unchecked_at_mut(i) as *mut T;
936                    let ptr_b = self.unchecked_at_mut(i + 1) as *mut T;
937                    std::ptr::swap(ptr_a, ptr_b);
938                }
939            }
940        }
941    }
942
943    /// Removes and returns the element at position `index`, shifting all elements after it to the left.
944    ///
945    /// # Panics
946    ///
947    /// Panics if `index >= len`.
948    pub fn remove(&mut self, index: usize) -> T {
949        assert!(index < self.len);
950        // Shift elements left
951        for i in index..self.len - 1 {
952            unsafe {
953                let ptr_a = self.unchecked_at_mut(i) as *mut T;
954                let ptr_b = self.unchecked_at_mut(i + 1) as *mut T;
955                std::ptr::swap(ptr_a, ptr_b);
956            }
957        }
958        self.pop().unwrap()
959    }
960
961    /// Removes an element from the vector and returns it.
962    ///
963    /// The removed element is replaced by the last element of the vector.
964    /// This does not preserve ordering, but is O(1).
965    ///
966    /// # Panics
967    ///
968    /// Panics if `index >= len`.
969    pub fn swap_remove(&mut self, index: usize) -> T {
970        assert!(index < self.len);
971        let last_index = self.len - 1;
972        if index != last_index {
973            unsafe {
974                let ptr_a = self.unchecked_at_mut(index) as *mut T;
975                // Can't use write_ptr.sub(1) here because write_ptr might be at
976                // a segment boundary (start of segment), in which case the last
977                // element is in the previous segment.
978                let ptr_b = self.unchecked_at_mut(last_index) as *mut T;
979                std::ptr::swap(ptr_a, ptr_b);
980            }
981        }
982        self.pop().unwrap()
983    }
984
985    /// Retains only the elements specified by the predicate.
986    ///
987    /// Removes all elements for which `f(&element)` returns `false`.
988    pub fn retain<F>(&mut self, mut f: F)
989    where
990        F: FnMut(&T) -> bool,
991    {
992        let mut i = 0;
993        while i < self.len {
994            if f(unsafe { self.unchecked_at(i) }) {
995                i += 1;
996            } else {
997                self.remove(i);
998            }
999        }
1000    }
1001
1002    /// Retains only the elements specified by the predicate, passing a mutable reference.
1003    pub fn retain_mut<F>(&mut self, mut f: F)
1004    where
1005        F: FnMut(&mut T) -> bool,
1006    {
1007        let mut i = 0;
1008        while i < self.len {
1009            if f(unsafe { self.unchecked_at_mut(i) }) {
1010                i += 1;
1011            } else {
1012                self.remove(i);
1013            }
1014        }
1015    }
1016
1017    /// Removes consecutive duplicate elements.
1018    ///
1019    /// If the vector is sorted, this removes all duplicates.
1020    pub fn dedup(&mut self)
1021    where
1022        T: PartialEq,
1023    {
1024        self.dedup_by(|a, b| a == b);
1025    }
1026
1027    /// Removes consecutive elements that satisfy the given equality relation.
1028    pub fn dedup_by<F>(&mut self, mut same_bucket: F)
1029    where
1030        F: FnMut(&mut T, &mut T) -> bool,
1031    {
1032        if self.len <= 1 {
1033            return;
1034        }
1035        let mut write = 1;
1036        for read in 1..self.len {
1037            let should_keep = unsafe {
1038                let prev_ptr = self.unchecked_at_mut(write - 1) as *mut T;
1039                let curr_ptr = self.unchecked_at_mut(read) as *mut T;
1040                !same_bucket(&mut *prev_ptr, &mut *curr_ptr)
1041            };
1042            if should_keep {
1043                if read != write {
1044                    unsafe {
1045                        let ptr_src = self.unchecked_at_mut(read) as *mut T;
1046                        let ptr_dst = self.unchecked_at_mut(write) as *mut T;
1047                        std::ptr::swap(ptr_dst, ptr_src);
1048                    }
1049                }
1050                write += 1;
1051            } else {
1052                // Drop the duplicate
1053                unsafe {
1054                    std::ptr::drop_in_place(self.unchecked_at_mut(read));
1055                }
1056            }
1057        }
1058        self.len = write;
1059    }
1060
1061    /// Removes consecutive elements that map to the same key.
1062    pub fn dedup_by_key<K, F>(&mut self, mut key: F)
1063    where
1064        F: FnMut(&mut T) -> K,
1065        K: PartialEq,
1066    {
1067        self.dedup_by(|a, b| key(a) == key(b));
1068    }
1069
1070    /// Resizes the vector in-place so that `len` is equal to `new_len`.
1071    ///
1072    /// If `new_len` is greater than `len`, the vector is extended by the difference,
1073    /// with each additional slot filled with `value`.
1074    /// If `new_len` is less than `len`, the vector is simply truncated.
1075    pub fn resize(&mut self, new_len: usize, value: T)
1076    where
1077        T: Clone,
1078    {
1079        if new_len > self.len {
1080            self.reserve(new_len - self.len);
1081            while self.len < new_len {
1082                self.push(value.clone());
1083            }
1084        } else {
1085            self.truncate(new_len);
1086        }
1087    }
1088
1089    /// Resizes the vector in-place so that `len` is equal to `new_len`.
1090    ///
1091    /// If `new_len` is greater than `len`, the vector is extended by the difference,
1092    /// with each additional slot filled with the result of calling the closure `f`.
1093    pub fn resize_with<F>(&mut self, new_len: usize, mut f: F)
1094    where
1095        F: FnMut() -> T,
1096    {
1097        if new_len > self.len {
1098            self.reserve(new_len - self.len);
1099            while self.len < new_len {
1100                self.push(f());
1101            }
1102        } else {
1103            self.truncate(new_len);
1104        }
1105    }
1106
1107    /// Moves all the elements of `other` into `self`, leaving `other` empty.
1108    pub fn append(&mut self, other: &mut Self) {
1109        let other_len = other.len;
1110        self.reserve(other_len);
1111        let start = self.len;
1112        while let Some(item) = other.pop() {
1113            self.push(item);
1114        }
1115        // Reverse the appended portion since pop() returns in reverse order
1116        let mut left = start;
1117        let mut right = self.len;
1118        while left < right {
1119            right -= 1;
1120            if left < right {
1121                unsafe {
1122                    let ptr_a = self.unchecked_at_mut(left) as *mut T;
1123                    let ptr_b = self.unchecked_at_mut(right) as *mut T;
1124                    std::ptr::swap(ptr_a, ptr_b);
1125                }
1126                left += 1;
1127            }
1128        }
1129    }
1130
1131    /// Splits the vector into two at the given index.
1132    ///
1133    /// Returns a newly allocated vector containing the elements in the range `[at, len)`.
1134    /// After the call, the original vector will contain elements `[0, at)`.
1135    ///
1136    /// # Panics
1137    ///
1138    /// Panics if `at > len`.
1139    pub fn split_off(&mut self, at: usize) -> Self {
1140        assert!(at <= self.len);
1141        let mut other = Self::new();
1142        other.reserve(self.len - at);
1143        for i in at..self.len {
1144            other.push(unsafe { self.unchecked_read(i) });
1145        }
1146        self.len = at;
1147        other
1148    }
1149
1150    /// Returns an iterator over `chunk_size` elements of the vector at a time.
1151    ///
1152    /// # Panics
1153    ///
1154    /// Panics if `chunk_size` is 0.
1155    pub fn chunks(&self, chunk_size: usize) -> Chunks<'_, T> {
1156        self.as_slice().chunks(chunk_size)
1157    }
1158
1159    /// Returns an iterator over all contiguous windows of length `size`.
1160    ///
1161    /// # Panics
1162    ///
1163    /// Panics if `size` is 0.
1164    pub fn windows(&self, size: usize) -> Windows<'_, T> {
1165        self.as_slice().windows(size)
1166    }
1167
1168    /// Returns an iterator over `chunk_size` elements at a time, starting from the end.
1169    ///
1170    /// # Panics
1171    ///
1172    /// Panics if `chunk_size` is 0.
1173    pub fn rchunks(&self, chunk_size: usize) -> RChunks<'_, T> {
1174        self.as_slice().rchunks(chunk_size)
1175    }
1176
1177    /// Returns an iterator over exactly `chunk_size` elements at a time.
1178    ///
1179    /// # Panics
1180    ///
1181    /// Panics if `chunk_size` is 0.
1182    pub fn chunks_exact(&self, chunk_size: usize) -> ChunksExact<'_, T> {
1183        self.as_slice().chunks_exact(chunk_size)
1184    }
1185
1186    /// Creates a draining iterator that removes the specified range and yields the removed items.
1187    ///
1188    /// # Panics
1189    ///
1190    /// Panics if the starting point is greater than the end point or if the end point is greater than the length.
1191    pub fn drain(&mut self, range: std::ops::Range<usize>) -> Drain<'_, T> {
1192        assert!(range.start <= range.end && range.end <= self.len);
1193        Drain {
1194            vec: self,
1195            range_start: range.start,
1196            range_end: range.end,
1197            index: range.start,
1198        }
1199    }
1200
1201    /// Copies the elements to a new `Vec<T>`.
1202    pub fn to_vec(&self) -> Vec<T>
1203    where
1204        T: Clone,
1205    {
1206        self.as_slice().to_vec()
1207    }
1208
1209    // --- Internal helper methods ---
1210
1211    /// Calculate the number of shelves needed for a given capacity.
1212    #[inline]
1213    fn shelf_count(box_count: usize) -> u32 {
1214        if box_count == 0 {
1215            return 0;
1216        }
1217        let val = box_count + Self::MIN_NON_ZERO_CAP;
1218        val.next_power_of_two().trailing_zeros() - Self::MIN_CAP_EXP
1219    }
1220
1221    /// Calculate the size of a shelf at a given index.
1222    #[inline]
1223    fn shelf_size(shelf_index: u32) -> usize {
1224        Self::MIN_NON_ZERO_CAP << shelf_index
1225    }
1226
1227    /// Calculate which shelf and box index a list index falls into.
1228    /// Returns (shelf_index, box_index).
1229    #[inline]
1230    fn location(list_index: usize) -> (usize, usize) {
1231        let biased = list_index + Self::MIN_NON_ZERO_CAP;
1232        let msb = biased.ilog2();
1233        let shelf = msb - Self::MIN_CAP_EXP;
1234        // Clear the most significant bit to get box_index
1235        let box_idx = biased ^ (1usize << msb);
1236        (shelf as usize, box_idx)
1237    }
1238
1239    /// Calculate shelf, box index, and remaining capacity in one go.
1240    /// Returns (shelf_index, box_index, segment_remaining).
1241    #[inline]
1242    fn location_with_capacity(list_index: usize) -> (usize, usize, usize) {
1243        let biased = list_index + Self::MIN_NON_ZERO_CAP;
1244        let msb = biased.ilog2();
1245        let shelf = msb - Self::MIN_CAP_EXP;
1246        let box_idx = biased ^ (1usize << msb);
1247        // segment_remaining = shelf_size - box_idx = (1 << msb) - box_idx
1248        //                   = (1 << msb) - (biased - (1 << msb))
1249        //                   = (2 << msb) - biased
1250        let segment_remaining = (2usize << msb) - biased;
1251        (shelf as usize, box_idx, segment_remaining)
1252    }
1253
1254    /// Get an unchecked reference to an element.
1255    ///
1256    /// # Safety
1257    ///
1258    /// `index` must be less than `self.len`.
1259    #[inline]
1260    pub(crate) unsafe fn unchecked_at(&self, index: usize) -> &T {
1261        unsafe {
1262            let (shelf, box_idx) = Self::location(index);
1263            &*(*self.dynamic_segments.get_unchecked(shelf)).add(box_idx)
1264        }
1265    }
1266
1267    /// Get an unchecked mutable reference to an element.
1268    ///
1269    /// # Safety
1270    ///
1271    /// `index` must be less than `self.len`.
1272    #[inline]
1273    pub(crate) unsafe fn unchecked_at_mut(&mut self, index: usize) -> &mut T {
1274        unsafe {
1275            let (shelf, box_idx) = Self::location(index);
1276            &mut *(*self.dynamic_segments.get_unchecked(shelf)).add(box_idx)
1277        }
1278    }
1279
1280    /// Read a value from an index.
1281    ///
1282    /// # Safety
1283    ///
1284    /// `index` must be less than `self.len`, and the value must not be read again.
1285    #[inline]
1286    unsafe fn unchecked_read(&self, index: usize) -> T {
1287        unsafe {
1288            let (shelf, box_idx) = Self::location(index);
1289            std::ptr::read((*self.dynamic_segments.get_unchecked(shelf)).add(box_idx))
1290        }
1291    }
1292
1293    fn grow_once(&mut self) {
1294        assert!(
1295            self.segment_count < MAX_SEGMENTS,
1296            "Maximum segment count exceeded"
1297        );
1298
1299        let size = Self::shelf_size(self.segment_count as u32);
1300        let layout = Layout::array::<T>(size).expect("Layout overflow");
1301        let ptr = if layout.size() == 0 {
1302            std::ptr::dangling_mut::<T>()
1303        } else {
1304            let ptr = unsafe { alloc::alloc(layout) };
1305            if ptr.is_null() {
1306                panic!("Allocation failed");
1307            }
1308            ptr as *mut T
1309        };
1310        self.dynamic_segments[self.segment_count] = ptr;
1311        self.segment_count += 1;
1312    }
1313
1314    /// Grow capacity to accommodate at least `new_capacity` elements.
1315    fn grow_capacity(&mut self, new_capacity: usize) {
1316        let new_shelf_count = Self::shelf_count(new_capacity) as usize;
1317        let old_shelf_count = self.segment_count;
1318
1319        if new_shelf_count > old_shelf_count {
1320            assert!(
1321                new_shelf_count <= MAX_SEGMENTS,
1322                "Maximum segment count exceeded"
1323            );
1324
1325            for i in old_shelf_count..new_shelf_count {
1326                let size = Self::shelf_size(i as u32);
1327                let layout = Layout::array::<T>(size).expect("Layout overflow");
1328                let ptr = if layout.size() == 0 {
1329                    std::ptr::dangling_mut::<T>()
1330                } else {
1331                    let ptr = unsafe { alloc::alloc(layout) };
1332                    if ptr.is_null() {
1333                        panic!("Allocation failed");
1334                    }
1335                    ptr as *mut T
1336                };
1337                self.dynamic_segments[i] = ptr;
1338            }
1339            self.segment_count = new_shelf_count;
1340        }
1341    }
1342
1343    /// Compute total capacity given the number of dynamic shelves.
1344    #[inline]
1345    fn compute_capacity(shelf_count: u32) -> usize {
1346        (Self::MIN_NON_ZERO_CAP << shelf_count) - Self::MIN_NON_ZERO_CAP
1347    }
1348
1349    /// Shrink capacity to the minimum needed for `new_capacity` elements.
1350    fn shrink_capacity(&mut self, new_capacity: usize) {
1351        let new_shelf_count = Self::shelf_count(new_capacity);
1352        let old_shelf_count = self.segment_count as u32;
1353
1354        if new_shelf_count < old_shelf_count {
1355            self.free_shelves(old_shelf_count, new_shelf_count);
1356            self.segment_count = new_shelf_count as usize;
1357        }
1358    }
1359
1360    /// Free shelves from `from_count` down to `to_count` (exclusive).
1361    fn free_shelves(&mut self, from_count: u32, to_count: u32) {
1362        for i in (to_count..from_count).rev() {
1363            let size = Self::shelf_size(i);
1364            let layout = Layout::array::<T>(size).expect("Layout overflow");
1365            if layout.size() > 0 {
1366                unsafe {
1367                    alloc::dealloc(self.dynamic_segments[i as usize] as *mut u8, layout);
1368                }
1369            }
1370            self.dynamic_segments[i as usize] = std::ptr::null_mut();
1371        }
1372    }
1373}
1374
1375impl<T> Drop for SegmentedVec<T> {
1376    fn drop(&mut self) {
1377        // Drop all elements
1378        self.clear();
1379        // Free all dynamic segments
1380        self.free_shelves(self.segment_count as u32, 0);
1381    }
1382}
1383
1384impl<T> sort::IndexedAccess<T> for SegmentedVec<T> {
1385    #[inline]
1386    fn get_ref(&self, index: usize) -> &T {
1387        debug_assert!(index < self.len);
1388        unsafe { self.unchecked_at(index) }
1389    }
1390
1391    #[inline]
1392    fn get_ptr(&self, index: usize) -> *const T {
1393        debug_assert!(index < self.len);
1394        let (shelf, box_idx) = Self::location(index);
1395        unsafe { (*self.dynamic_segments.get_unchecked(shelf)).add(box_idx) }
1396    }
1397
1398    #[inline]
1399    fn get_ptr_mut(&mut self, index: usize) -> *mut T {
1400        debug_assert!(index < self.len);
1401        let (shelf, box_idx) = Self::location(index);
1402        unsafe { (*self.dynamic_segments.get_unchecked(shelf)).add(box_idx) }
1403    }
1404
1405    #[inline]
1406    fn swap(&mut self, a: usize, b: usize) {
1407        if a == b {
1408            return;
1409        }
1410        debug_assert!(a < self.len && b < self.len);
1411        unsafe {
1412            let ptr_a = self.get_ptr_mut(a);
1413            let ptr_b = self.get_ptr_mut(b);
1414            std::ptr::swap(ptr_a, ptr_b);
1415        }
1416    }
1417}
1418
1419impl<T> Default for SegmentedVec<T> {
1420    fn default() -> Self {
1421        Self::new()
1422    }
1423}
1424
1425impl<T> Index<usize> for SegmentedVec<T> {
1426    type Output = T;
1427
1428    fn index(&self, index: usize) -> &Self::Output {
1429        self.get(index).expect("index out of bounds")
1430    }
1431}
1432
1433impl<T> IndexMut<usize> for SegmentedVec<T> {
1434    fn index_mut(&mut self, index: usize) -> &mut Self::Output {
1435        self.get_mut(index).expect("index out of bounds")
1436    }
1437}
1438
1439impl<T: Clone> Clone for SegmentedVec<T> {
1440    fn clone(&self) -> Self {
1441        if self.len == 0 {
1442            return Self::new();
1443        }
1444
1445        let mut new_vec = Self::new();
1446        new_vec.reserve(self.len);
1447
1448        let mut remaining = self.len;
1449        for shelf in 0..self.segment_count {
1450            let size = Self::shelf_size(shelf as u32);
1451            let count = remaining.min(size);
1452            let src = unsafe { *self.dynamic_segments.get_unchecked(shelf) };
1453            let dst = unsafe { *new_vec.dynamic_segments.get_unchecked(shelf) };
1454            for i in 0..count {
1455                unsafe { std::ptr::write(dst.add(i), (*src.add(i)).clone()) };
1456            }
1457            new_vec.len += count;
1458            remaining -= count;
1459            if remaining == 0 {
1460                break;
1461            }
1462        }
1463
1464        // Set up write pointer
1465        if new_vec.len > 0 {
1466            let biased = new_vec.len + Self::MIN_NON_ZERO_CAP;
1467            let msb = biased.ilog2();
1468            let idx = (msb - Self::MIN_CAP_EXP) as usize;
1469            new_vec.active_segment_index = idx;
1470
1471            let capacity = 1usize << msb;
1472            let offset = biased ^ capacity;
1473            unsafe {
1474                let base = *new_vec.dynamic_segments.get_unchecked(idx);
1475                new_vec.write_ptr = base.add(offset);
1476                new_vec.segment_end = base.add(capacity);
1477            }
1478        }
1479
1480        new_vec
1481    }
1482}
1483
1484impl<T: std::fmt::Debug> std::fmt::Debug for SegmentedVec<T> {
1485    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
1486        f.debug_list().entries(self.iter()).finish()
1487    }
1488}
1489
1490impl<T: PartialEq> PartialEq for SegmentedVec<T> {
1491    fn eq(&self, other: &Self) -> bool {
1492        if self.len != other.len {
1493            return false;
1494        }
1495        for i in 0..self.len {
1496            if unsafe { self.unchecked_at(i) } != unsafe { other.unchecked_at(i) } {
1497                return false;
1498            }
1499        }
1500        true
1501    }
1502}
1503
1504impl<T: Eq> Eq for SegmentedVec<T> {}
1505
1506impl<T> FromIterator<T> for SegmentedVec<T> {
1507    fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> Self {
1508        let iter = iter.into_iter();
1509        let (lower, _) = iter.size_hint();
1510        let mut vec = Self::new();
1511        vec.reserve(lower);
1512        for item in iter {
1513            vec.push(item);
1514        }
1515        vec
1516    }
1517}
1518
1519impl<T> Extend<T> for SegmentedVec<T> {
1520    fn extend<I: IntoIterator<Item = T>>(&mut self, iter: I) {
1521        let iter = iter.into_iter();
1522        let (lower, _) = iter.size_hint();
1523        self.reserve(lower);
1524        for item in iter {
1525            self.push(item);
1526        }
1527    }
1528}
1529
1530// --- Iterator implementations ---
1531
1532/// An iterator over references to elements of a `SegmentedVec`.
1533pub struct Iter<'a, T> {
1534    vec: &'a SegmentedVec<T>,
1535    /// Current pointer within segment
1536    ptr: *const T,
1537    /// End of current segment (min of segment capacity and vec.len)
1538    segment_end: *const T,
1539    /// Current logical index
1540    index: usize,
1541    /// Current shelf index (for dynamic segments)
1542    shelf_index: u32,
1543}
1544
1545impl<'a, T> Iterator for Iter<'a, T> {
1546    type Item = &'a T;
1547
1548    #[inline]
1549    fn next(&mut self) -> Option<Self::Item> {
1550        if self.ptr < self.segment_end {
1551            let result = unsafe { &*self.ptr };
1552            self.ptr = unsafe { self.ptr.add(1) };
1553            self.index += 1;
1554            return Some(result);
1555        }
1556        self.next_segment()
1557    }
1558
1559    #[inline]
1560    fn size_hint(&self) -> (usize, Option<usize>) {
1561        let remaining = self.vec.len.saturating_sub(self.index);
1562        (remaining, Some(remaining))
1563    }
1564}
1565
1566impl<'a, T> Iter<'a, T> {
1567    #[inline]
1568    fn next_segment(&mut self) -> Option<&'a T> {
1569        if self.index >= self.vec.len {
1570            return None;
1571        }
1572
1573        let shelf_idx = self.shelf_index as usize;
1574        let shelf_size = SegmentedVec::<T>::shelf_size(self.shelf_index);
1575        let ptr = self.vec.dynamic_segments[shelf_idx];
1576        let segment_len = shelf_size.min(self.vec.len - self.index);
1577        self.ptr = ptr;
1578        self.segment_end = unsafe { ptr.add(segment_len) };
1579        self.shelf_index += 1;
1580
1581        let result = unsafe { &*self.ptr };
1582        self.ptr = unsafe { self.ptr.add(1) };
1583        self.index += 1;
1584        Some(result)
1585    }
1586}
1587
1588impl<'a, T> ExactSizeIterator for Iter<'a, T> {}
1589
1590/// An iterator over mutable references to elements of a `SegmentedVec`.
1591pub struct IterMut<'a, T> {
1592    vec: &'a mut SegmentedVec<T>,
1593    /// Current pointer within segment
1594    ptr: *mut T,
1595    /// End of current segment (min of segment capacity and vec.len)
1596    segment_end: *mut T,
1597    /// Current logical index
1598    index: usize,
1599    /// Current shelf index (for dynamic segments)
1600    shelf_index: u32,
1601}
1602
1603impl<'a, T> Iterator for IterMut<'a, T> {
1604    type Item = &'a mut T;
1605
1606    #[inline]
1607    fn next(&mut self) -> Option<Self::Item> {
1608        if self.ptr < self.segment_end {
1609            let result = self.ptr;
1610            self.ptr = unsafe { self.ptr.add(1) };
1611            self.index += 1;
1612            return Some(unsafe { &mut *result });
1613        }
1614        self.next_segment()
1615    }
1616
1617    #[inline]
1618    fn size_hint(&self) -> (usize, Option<usize>) {
1619        let remaining = self.vec.len.saturating_sub(self.index);
1620        (remaining, Some(remaining))
1621    }
1622}
1623
1624impl<'a, T> IterMut<'a, T> {
1625    #[inline]
1626    fn next_segment(&mut self) -> Option<&'a mut T> {
1627        if self.index >= self.vec.len {
1628            return None;
1629        }
1630
1631        let shelf_idx = self.shelf_index as usize;
1632        let shelf_size = SegmentedVec::<T>::shelf_size(self.shelf_index);
1633        let ptr = self.vec.dynamic_segments[shelf_idx];
1634        let segment_len = shelf_size.min(self.vec.len - self.index);
1635        self.ptr = ptr;
1636        self.segment_end = unsafe { ptr.add(segment_len) };
1637        self.shelf_index += 1;
1638
1639        let result = self.ptr;
1640        self.ptr = unsafe { self.ptr.add(1) };
1641        self.index += 1;
1642        Some(unsafe { &mut *result })
1643    }
1644}
1645
1646impl<'a, T> ExactSizeIterator for IterMut<'a, T> {}
1647
1648impl<T> IntoIterator for SegmentedVec<T> {
1649    type Item = T;
1650    type IntoIter = IntoIter<T>;
1651
1652    fn into_iter(self) -> Self::IntoIter {
1653        IntoIter {
1654            vec: self,
1655            index: 0,
1656        }
1657    }
1658}
1659
1660impl<'a, T> IntoIterator for &'a SegmentedVec<T> {
1661    type Item = &'a T;
1662    type IntoIter = Iter<'a, T>;
1663
1664    fn into_iter(self) -> Self::IntoIter {
1665        self.iter()
1666    }
1667}
1668
1669impl<'a, T> IntoIterator for &'a mut SegmentedVec<T> {
1670    type Item = &'a mut T;
1671    type IntoIter = IterMut<'a, T>;
1672
1673    fn into_iter(self) -> Self::IntoIter {
1674        self.iter_mut()
1675    }
1676}
1677
1678/// An owning iterator over elements of a `SegmentedVec`.
1679pub struct IntoIter<T> {
1680    vec: SegmentedVec<T>,
1681    index: usize,
1682}
1683
1684impl<T> Iterator for IntoIter<T> {
1685    type Item = T;
1686
1687    fn next(&mut self) -> Option<Self::Item> {
1688        if self.index >= self.vec.len {
1689            return None;
1690        }
1691        let value = unsafe { self.vec.unchecked_read(self.index) };
1692        self.index += 1;
1693        Some(value)
1694    }
1695
1696    fn size_hint(&self) -> (usize, Option<usize>) {
1697        let remaining = self.vec.len - self.index;
1698        (remaining, Some(remaining))
1699    }
1700}
1701
1702impl<T> ExactSizeIterator for IntoIter<T> {}
1703
1704impl<T> Drop for IntoIter<T> {
1705    fn drop(&mut self) {
1706        // Drop remaining elements that weren't consumed
1707        for i in self.index..self.vec.len {
1708            unsafe {
1709                std::ptr::drop_in_place(self.vec.unchecked_at_mut(i));
1710            }
1711        }
1712        // Prevent the Vec from dropping elements again
1713        self.vec.len = 0;
1714    }
1715}
1716
1717/// A draining iterator for `SegmentedVec`.
1718///
1719/// This struct is created by the `drain` method on `SegmentedVec`.
1720pub struct Drain<'a, T> {
1721    vec: &'a mut SegmentedVec<T>,
1722    range_start: usize,
1723    range_end: usize,
1724    index: usize,
1725}
1726
1727impl<'a, T> Iterator for Drain<'a, T> {
1728    type Item = T;
1729
1730    fn next(&mut self) -> Option<Self::Item> {
1731        if self.index >= self.range_end {
1732            None
1733        } else {
1734            let value = unsafe { std::ptr::read(self.vec.unchecked_at(self.index)) };
1735            self.index += 1;
1736            Some(value)
1737        }
1738    }
1739
1740    fn size_hint(&self) -> (usize, Option<usize>) {
1741        let remaining = self.range_end - self.index;
1742        (remaining, Some(remaining))
1743    }
1744}
1745
1746impl<'a, T> DoubleEndedIterator for Drain<'a, T> {
1747    fn next_back(&mut self) -> Option<Self::Item> {
1748        if self.index >= self.range_end {
1749            None
1750        } else {
1751            self.range_end -= 1;
1752            Some(unsafe { std::ptr::read(self.vec.unchecked_at(self.range_end)) })
1753        }
1754    }
1755}
1756
1757impl<'a, T> ExactSizeIterator for Drain<'a, T> {}
1758
1759impl<'a, T> Drop for Drain<'a, T> {
1760    fn drop(&mut self) {
1761        // Drop any remaining elements in the range
1762        for i in self.index..self.range_end {
1763            unsafe {
1764                std::ptr::drop_in_place(self.vec.unchecked_at_mut(i));
1765            }
1766        }
1767
1768        // Shift elements after the range to fill the gap
1769        let original_range_end = self.range_end;
1770        let original_len = self.vec.len;
1771        let drain_count = original_range_end - self.range_start;
1772
1773        for i in 0..(original_len - original_range_end) {
1774            unsafe {
1775                let src = self.vec.unchecked_at(original_range_end + i) as *const T;
1776                let dst = self.vec.unchecked_at_mut(self.range_start + i) as *mut T;
1777                std::ptr::copy_nonoverlapping(src, dst, 1);
1778            }
1779        }
1780
1781        self.vec.len = original_len - drain_count;
1782    }
1783}
1784
1785#[cfg(test)]
1786mod tests {
1787    use super::*;
1788
1789    #[test]
1790    fn test_new_empty() {
1791        let vec: SegmentedVec<i32> = SegmentedVec::new();
1792        assert!(vec.is_empty());
1793        assert_eq!(vec.len(), 0);
1794    }
1795
1796    #[test]
1797    fn test_push_pop() {
1798        let mut vec: SegmentedVec<i32> = SegmentedVec::new();
1799        vec.push(1);
1800        vec.push(2);
1801        vec.push(3);
1802        assert_eq!(vec.len(), 3);
1803        assert_eq!(vec.pop(), Some(3));
1804        assert_eq!(vec.pop(), Some(2));
1805        assert_eq!(vec.pop(), Some(1));
1806        assert_eq!(vec.pop(), None);
1807    }
1808
1809    #[test]
1810    fn test_get() {
1811        let mut vec: SegmentedVec<i32> = SegmentedVec::new();
1812        vec.push(10);
1813        vec.push(20);
1814        vec.push(30);
1815        assert_eq!(vec.get(0), Some(&10));
1816        assert_eq!(vec.get(1), Some(&20));
1817        assert_eq!(vec.get(2), Some(&30));
1818        assert_eq!(vec.get(3), None);
1819    }
1820
1821    #[test]
1822    fn test_index() {
1823        let mut vec: SegmentedVec<i32> = SegmentedVec::new();
1824        vec.push(10);
1825        vec.push(20);
1826        assert_eq!(vec[0], 10);
1827        assert_eq!(vec[1], 20);
1828        vec[0] = 100;
1829        assert_eq!(vec[0], 100);
1830    }
1831
1832    #[test]
1833    fn test_stable_pointers() {
1834        let mut vec: SegmentedVec<i32> = SegmentedVec::new();
1835        vec.push(1);
1836        let ptr = &vec[0] as *const i32;
1837
1838        // Push many more elements to trigger segment allocations
1839        for i in 2..1000 {
1840            vec.push(i);
1841        }
1842
1843        // The pointer should still be valid
1844        assert_eq!(unsafe { *ptr }, 1);
1845    }
1846
1847    #[test]
1848    fn test_iter() {
1849        let mut vec: SegmentedVec<i32> = SegmentedVec::new();
1850        for i in 0..100 {
1851            vec.push(i);
1852        }
1853
1854        let collected: Vec<i32> = vec.iter().copied().collect();
1855        let expected: Vec<i32> = (0..100).collect();
1856        assert_eq!(collected, expected);
1857    }
1858
1859    #[test]
1860    fn test_iter_mut() {
1861        let mut vec: SegmentedVec<i32> = SegmentedVec::new();
1862        for i in 0..10 {
1863            vec.push(i);
1864        }
1865
1866        for item in vec.iter_mut() {
1867            *item *= 2;
1868        }
1869
1870        let collected: Vec<i32> = vec.iter().copied().collect();
1871        let expected: Vec<i32> = (0..10).map(|x| x * 2).collect();
1872        assert_eq!(collected, expected);
1873    }
1874
1875    #[test]
1876    fn test_into_iter() {
1877        let mut vec: SegmentedVec<i32> = SegmentedVec::new();
1878        for i in 0..10 {
1879            vec.push(i);
1880        }
1881
1882        let collected: Vec<i32> = vec.into_iter().collect();
1883        let expected: Vec<i32> = (0..10).collect();
1884        assert_eq!(collected, expected);
1885    }
1886
1887    #[test]
1888    fn test_from_iter() {
1889        let vec: SegmentedVec<i32> = (0..10).collect();
1890        assert_eq!(vec.len(), 10);
1891        for i in 0..10 {
1892            assert_eq!(vec[i], i as i32);
1893        }
1894    }
1895
1896    #[test]
1897    fn test_extend() {
1898        let mut vec: SegmentedVec<i32> = SegmentedVec::new();
1899        vec.extend(0..5);
1900        vec.extend(5..10);
1901        assert_eq!(vec.len(), 10);
1902        for i in 0..10 {
1903            assert_eq!(vec[i], i as i32);
1904        }
1905    }
1906
1907    #[test]
1908    fn test_clear() {
1909        let mut vec: SegmentedVec<i32> = SegmentedVec::new();
1910        vec.extend(0..10);
1911        vec.clear();
1912        assert!(vec.is_empty());
1913        assert_eq!(vec.len(), 0);
1914    }
1915
1916    #[test]
1917    fn test_truncate() {
1918        let mut vec: SegmentedVec<i32> = SegmentedVec::new();
1919        vec.extend(0..10);
1920        vec.truncate(5);
1921        assert_eq!(vec.len(), 5);
1922        for i in 0..5 {
1923            assert_eq!(vec[i], i as i32);
1924        }
1925    }
1926
1927    #[test]
1928    fn test_clone() {
1929        let mut vec: SegmentedVec<i32> = SegmentedVec::new();
1930        vec.extend(0..10);
1931        let vec2 = vec.clone();
1932        assert_eq!(vec, vec2);
1933    }
1934
1935    #[test]
1936    fn test_debug() {
1937        let mut vec: SegmentedVec<i32> = SegmentedVec::new();
1938        vec.extend(0..3);
1939        let debug_str = format!("{:?}", vec);
1940        assert_eq!(debug_str, "[0, 1, 2]");
1941    }
1942
1943    #[test]
1944    fn test_first_last() {
1945        let mut vec: SegmentedVec<i32> = SegmentedVec::new();
1946        assert_eq!(vec.first(), None);
1947        assert_eq!(vec.last(), None);
1948
1949        vec.push(1);
1950        assert_eq!(vec.first(), Some(&1));
1951        assert_eq!(vec.last(), Some(&1));
1952
1953        vec.push(2);
1954        vec.push(3);
1955        assert_eq!(vec.first(), Some(&1));
1956        assert_eq!(vec.last(), Some(&3));
1957    }
1958
1959    #[test]
1960    fn test_drop_elements() {
1961        use std::cell::Cell;
1962        use std::rc::Rc;
1963
1964        let drop_count = Rc::new(Cell::new(0));
1965
1966        struct DropCounter {
1967            counter: Rc<Cell<i32>>,
1968        }
1969
1970        impl Drop for DropCounter {
1971            fn drop(&mut self) {
1972                self.counter.set(self.counter.get() + 1);
1973            }
1974        }
1975
1976        {
1977            let mut vec: SegmentedVec<DropCounter> = SegmentedVec::new();
1978            for _ in 0..10 {
1979                vec.push(DropCounter {
1980                    counter: Rc::clone(&drop_count),
1981                });
1982            }
1983        }
1984
1985        assert_eq!(drop_count.get(), 10);
1986    }
1987
1988    #[test]
1989    fn test_shrink_to_fit() {
1990        let mut vec: SegmentedVec<i32> = SegmentedVec::new();
1991        vec.extend(0..100);
1992        vec.truncate(5);
1993        vec.shrink_to_fit();
1994        assert_eq!(vec.len(), 5);
1995        for i in 0..5 {
1996            assert_eq!(vec[i], i as i32);
1997        }
1998    }
1999
2000    #[test]
2001    fn test_sort() {
2002        let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2003        vec.extend([5, 2, 8, 1, 9, 3, 7, 4, 6, 0]);
2004        vec.sort();
2005        let result: Vec<i32> = vec.iter().copied().collect();
2006        assert_eq!(result, vec![0, 1, 2, 3, 4, 5, 6, 7, 8, 9]);
2007    }
2008
2009    #[test]
2010    fn test_sort_empty() {
2011        let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2012        vec.sort();
2013        assert!(vec.is_empty());
2014    }
2015
2016    #[test]
2017    fn test_sort_single() {
2018        let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2019        vec.push(42);
2020        vec.sort();
2021        assert_eq!(vec[0], 42);
2022    }
2023
2024    #[test]
2025    fn test_sort_already_sorted() {
2026        let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2027        vec.extend(0..10);
2028        vec.sort();
2029        let result: Vec<i32> = vec.iter().copied().collect();
2030        assert_eq!(result, (0..10).collect::<Vec<_>>());
2031    }
2032
2033    #[test]
2034    fn test_sort_reverse_sorted() {
2035        let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2036        vec.extend((0..10).rev());
2037        vec.sort();
2038        let result: Vec<i32> = vec.iter().copied().collect();
2039        assert_eq!(result, (0..10).collect::<Vec<_>>());
2040    }
2041
2042    #[test]
2043    fn test_sort_by() {
2044        let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2045        vec.extend([5, 2, 8, 1, 9, 3, 7, 4, 6, 0]);
2046        vec.sort_by(|a, b| b.cmp(a)); // reverse order
2047        let result: Vec<i32> = vec.iter().copied().collect();
2048        assert_eq!(result, vec![9, 8, 7, 6, 5, 4, 3, 2, 1, 0]);
2049    }
2050
2051    #[test]
2052    fn test_sort_by_key() {
2053        let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2054        vec.extend([-5, 2, -8, 1, -9, 3, -7, 4, -6, 0]);
2055        vec.sort_by_key(|k| k.abs());
2056        let result: Vec<i32> = vec.iter().copied().collect();
2057        assert_eq!(result, vec![0, 1, 2, 3, 4, -5, -6, -7, -8, -9]);
2058    }
2059
2060    #[test]
2061    fn test_sort_stable() {
2062        // Test that sort is stable (equal elements maintain their relative order)
2063        #[derive(Debug, Clone, Eq, PartialEq)]
2064        struct Item {
2065            key: i32,
2066            order: usize,
2067        }
2068
2069        let mut vec: SegmentedVec<Item> = SegmentedVec::new();
2070        vec.push(Item { key: 1, order: 0 });
2071        vec.push(Item { key: 2, order: 1 });
2072        vec.push(Item { key: 1, order: 2 });
2073        vec.push(Item { key: 2, order: 3 });
2074        vec.push(Item { key: 1, order: 4 });
2075
2076        vec.sort_by_key(|item| item.key);
2077
2078        // All items with key=1 should come first, in original order
2079        assert_eq!(vec[0], Item { key: 1, order: 0 });
2080        assert_eq!(vec[1], Item { key: 1, order: 2 });
2081        assert_eq!(vec[2], Item { key: 1, order: 4 });
2082        // Then items with key=2, in original order
2083        assert_eq!(vec[3], Item { key: 2, order: 1 });
2084        assert_eq!(vec[4], Item { key: 2, order: 3 });
2085    }
2086
2087    #[test]
2088    fn test_sort_unstable() {
2089        let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2090        vec.extend([5, 2, 8, 1, 9, 3, 7, 4, 6, 0]);
2091        vec.sort_unstable();
2092        let result: Vec<i32> = vec.iter().copied().collect();
2093        assert_eq!(result, vec![0, 1, 2, 3, 4, 5, 6, 7, 8, 9]);
2094    }
2095
2096    #[test]
2097    fn test_sort_unstable_by() {
2098        let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2099        vec.extend([5, 2, 8, 1, 9, 3, 7, 4, 6, 0]);
2100        vec.sort_unstable_by(|a, b| b.cmp(a)); // reverse order
2101        let result: Vec<i32> = vec.iter().copied().collect();
2102        assert_eq!(result, vec![9, 8, 7, 6, 5, 4, 3, 2, 1, 0]);
2103    }
2104
2105    #[test]
2106    fn test_sort_unstable_by_key() {
2107        let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2108        vec.extend([-5, 2, -8, 1, -9, 3, -7, 4, -6, 0]);
2109        vec.sort_unstable_by_key(|k| k.abs());
2110        // Just verify it's sorted by absolute value (unstable may reorder equal elements)
2111        let result: Vec<i32> = vec.iter().copied().collect();
2112        for i in 1..result.len() {
2113            assert!(result[i - 1].abs() <= result[i].abs());
2114        }
2115    }
2116
2117    #[test]
2118    fn test_sort_large() {
2119        let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2120        // Add numbers in reverse order
2121        vec.extend((0..1000).rev());
2122        vec.sort();
2123        for i in 0..1000 {
2124            assert_eq!(vec[i], i as i32);
2125        }
2126    }
2127
2128    #[test]
2129    fn test_sort_unstable_large() {
2130        let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2131        // Add numbers in reverse order
2132        vec.extend((0..1000).rev());
2133        vec.sort_unstable();
2134        for i in 0..1000 {
2135            assert_eq!(vec[i], i as i32);
2136        }
2137    }
2138
2139    #[test]
2140    fn test_sort_pointers_remain_stable_after_sort() {
2141        // Verify that after sorting, pointers to elements are still valid
2142        // (they point to different values, but the memory locations are stable)
2143        let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2144        vec.extend([5, 2, 8, 1, 9]);
2145
2146        // Get pointer to first element before sort
2147        let ptr = &vec[0] as *const i32;
2148
2149        vec.sort();
2150
2151        // After sort, the pointer still points to valid memory (now contains sorted value)
2152        assert_eq!(unsafe { *ptr }, 1); // First element after sort is 1
2153    }
2154
2155    // --- Slice tests ---
2156
2157    #[test]
2158    fn test_as_slice() {
2159        let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2160        vec.extend(0..10);
2161
2162        let slice = vec.as_slice();
2163        assert_eq!(slice.len(), 10);
2164        assert_eq!(slice[0], 0);
2165        assert_eq!(slice[9], 9);
2166    }
2167
2168    #[test]
2169    fn test_as_mut_slice() {
2170        let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2171        vec.extend(0..10);
2172
2173        {
2174            let mut slice = vec.as_mut_slice();
2175            slice[0] = 100;
2176            slice[9] = 200;
2177        }
2178
2179        assert_eq!(vec[0], 100);
2180        assert_eq!(vec[9], 200);
2181    }
2182
2183    #[test]
2184    fn test_slice_range() {
2185        let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2186        vec.extend(0..10);
2187
2188        let slice = vec.slice(2..5);
2189        assert_eq!(slice.len(), 3);
2190        assert_eq!(slice[0], 2);
2191        assert_eq!(slice[1], 3);
2192        assert_eq!(slice[2], 4);
2193    }
2194
2195    #[test]
2196    fn test_slice_mut_range() {
2197        let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2198        vec.extend(0..10);
2199
2200        {
2201            let mut slice = vec.slice_mut(2..5);
2202            slice[0] = 100;
2203            slice[2] = 200;
2204        }
2205
2206        assert_eq!(vec[2], 100);
2207        assert_eq!(vec[4], 200);
2208    }
2209
2210    #[test]
2211    fn test_slice_first_last() {
2212        let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2213        vec.extend(0..10);
2214
2215        let slice = vec.as_slice();
2216        assert_eq!(slice.first(), Some(&0));
2217        assert_eq!(slice.last(), Some(&9));
2218    }
2219
2220    #[test]
2221    fn test_slice_iter() {
2222        let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2223        vec.extend(0..10);
2224
2225        let slice = vec.as_slice();
2226        let collected: Vec<i32> = slice.iter().copied().collect();
2227        assert_eq!(collected, (0..10).collect::<Vec<_>>());
2228    }
2229
2230    #[test]
2231    fn test_slice_iter_rev() {
2232        let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2233        vec.extend(0..10);
2234
2235        let slice = vec.as_slice();
2236        let collected: Vec<i32> = slice.iter().rev().copied().collect();
2237        assert_eq!(collected, (0..10).rev().collect::<Vec<_>>());
2238    }
2239
2240    #[test]
2241    fn test_slice_contains() {
2242        let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2243        vec.extend(0..10);
2244
2245        let slice = vec.as_slice();
2246        assert!(slice.contains(&5));
2247        assert!(!slice.contains(&100));
2248    }
2249
2250    #[test]
2251    fn test_slice_binary_search() {
2252        let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2253        vec.extend(0..100);
2254
2255        let slice = vec.as_slice();
2256        assert_eq!(slice.binary_search(&50), Ok(50));
2257        assert_eq!(slice.binary_search(&0), Ok(0));
2258        assert_eq!(slice.binary_search(&99), Ok(99));
2259        assert_eq!(slice.binary_search(&100), Err(100));
2260    }
2261
2262    #[test]
2263    fn test_slice_split_at() {
2264        let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2265        vec.extend(0..10);
2266
2267        let slice = vec.as_slice();
2268        let (left, right) = slice.split_at(5);
2269        assert_eq!(left.len(), 5);
2270        assert_eq!(right.len(), 5);
2271        assert_eq!(left[0], 0);
2272        assert_eq!(right[0], 5);
2273    }
2274
2275    #[test]
2276    fn test_slice_to_vec() {
2277        let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2278        vec.extend(0..10);
2279
2280        let slice = vec.as_slice();
2281        let converted = slice.to_vec();
2282        assert_eq!(converted, (0..10).collect::<Vec<_>>());
2283    }
2284
2285    #[test]
2286    fn test_slice_mut_sort() {
2287        let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2288        vec.extend([5, 3, 1, 4, 2, 8, 7, 6, 0, 9]);
2289
2290        // Sort only the middle part
2291        {
2292            let mut slice = vec.slice_mut(2..8);
2293            slice.sort();
2294        }
2295
2296        let result: Vec<i32> = vec.iter().copied().collect();
2297        assert_eq!(result, vec![5, 3, 1, 2, 4, 6, 7, 8, 0, 9]);
2298    }
2299
2300    #[test]
2301    fn test_slice_mut_reverse() {
2302        let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2303        vec.extend(0..10);
2304
2305        {
2306            let mut slice = vec.slice_mut(2..8);
2307            slice.reverse();
2308        }
2309
2310        let result: Vec<i32> = vec.iter().copied().collect();
2311        assert_eq!(result, vec![0, 1, 7, 6, 5, 4, 3, 2, 8, 9]);
2312    }
2313
2314    #[test]
2315    fn test_slice_mut_fill() {
2316        let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2317        vec.extend(0..10);
2318
2319        {
2320            let mut slice = vec.slice_mut(2..5);
2321            slice.fill(99);
2322        }
2323
2324        let result: Vec<i32> = vec.iter().copied().collect();
2325        assert_eq!(result, vec![0, 1, 99, 99, 99, 5, 6, 7, 8, 9]);
2326    }
2327
2328    #[test]
2329    fn test_slice_starts_with() {
2330        let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2331        vec.extend(0..10);
2332
2333        let slice = vec.as_slice();
2334        assert!(slice.starts_with(&[0, 1, 2]));
2335        assert!(!slice.starts_with(&[1, 2, 3]));
2336    }
2337
2338    #[test]
2339    fn test_slice_ends_with() {
2340        let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2341        vec.extend(0..10);
2342
2343        let slice = vec.as_slice();
2344        assert!(slice.ends_with(&[7, 8, 9]));
2345        assert!(!slice.ends_with(&[6, 7, 8]));
2346    }
2347
2348    #[test]
2349    fn test_slice_eq() {
2350        let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2351        vec.extend(0..10);
2352
2353        let slice = vec.as_slice();
2354        assert_eq!(slice, (0..10).collect::<Vec<_>>());
2355    }
2356
2357    #[test]
2358    fn test_min_non_zero_cap() {
2359        // For u8 (1 byte), first segment should be 8 elements
2360        let mut vec_u8: SegmentedVec<u8> = SegmentedVec::new();
2361        vec_u8.push(1);
2362        assert_eq!(vec_u8.capacity(), 8);
2363
2364        // For i32 (4 bytes, <= 1024), first segment should be 4 elements
2365        let mut vec_i32: SegmentedVec<i32> = SegmentedVec::new();
2366        vec_i32.push(1);
2367        assert_eq!(vec_i32.capacity(), 4);
2368
2369        // Verify indexing still works correctly with the larger first segment
2370        for i in 0u8..100 {
2371            vec_u8.push(i);
2372        }
2373        for i in 0..101 {
2374            assert_eq!(vec_u8[i], if i == 0 { 1 } else { (i - 1) as u8 });
2375        }
2376    }
2377
2378    #[test]
2379    fn test_extend_from_copy_slice() {
2380        let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2381        let data: Vec<i32> = (0..100).collect();
2382        vec.extend_from_copy_slice(&data);
2383        assert_eq!(vec.len(), 100);
2384        for i in 0..100 {
2385            assert_eq!(vec[i], i as i32);
2386        }
2387
2388        let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2389        vec.push(999);
2390        vec.push(998);
2391        vec.extend_from_copy_slice(&data[..10]);
2392        assert_eq!(vec.len(), 12);
2393        assert_eq!(vec[0], 999);
2394        assert_eq!(vec[1], 998);
2395        for i in 0..10 {
2396            assert_eq!(vec[i + 2], i as i32);
2397        }
2398
2399        let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2400        vec.extend_from_copy_slice(&[]);
2401        assert!(vec.is_empty());
2402
2403        let mut vec: SegmentedVec<i32> = SegmentedVec::new();
2404        vec.extend_from_copy_slice(&[1, 2, 3]); // 3 elements, not at boundary
2405        assert_eq!(vec.len(), 3);
2406        vec.push(4); // Should use fast path, not crash in push_slow
2407        vec.push(5);
2408        vec.push(6);
2409        assert_eq!(vec.len(), 6);
2410        for i in 0..6 {
2411            assert_eq!(vec[i], (i + 1) as i32);
2412        }
2413    }
2414}