segmented_vec/
lib.rs

1//! A segmented growable vector with stable pointers.
2//!
3//! Unlike `Vec`, pushing new elements never invalidates pointers to existing elements.
4//! This is achieved by storing elements in segments of exponentially growing sizes.
5//!
6//! # Example
7//!
8//! ```
9//! use segmented_vec::SegmentedVec;
10//!
11//! let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
12//! vec.push(1);
13//! vec.push(2);
14//!
15//! // Get a pointer to the first element
16//! let ptr = &vec[0] as *const i32;
17//!
18//! // Push more elements - the pointer remains valid!
19//! for i in 3..100 {
20//!     vec.push(i);
21//! }
22//!
23//! // The pointer is still valid
24//! assert_eq!(unsafe { *ptr }, 1);
25//! ```
26
27mod slice;
28mod sort;
29
30pub use slice::{
31    Chunks, ChunksExact, RChunks, SegmentedSlice, SegmentedSliceMut, SliceIter, SliceIterMut,
32    Windows,
33};
34
35use std::alloc::{self, Layout};
36use std::cmp::Ordering;
37use std::marker::PhantomData;
38use std::mem::MaybeUninit;
39use std::ops::{Index, IndexMut};
40use std::ptr::NonNull;
41
42/// A segmented vector with stable pointers.
43///
44/// `PREALLOC` specifies the number of elements to store inline (must be 0 or a power of 2).
45/// Elements beyond the preallocation are stored in dynamically allocated segments.
46pub struct SegmentedVec<T, const PREALLOC: usize = 0> {
47    prealloc_segment: MaybeUninit<[T; PREALLOC]>,
48    dynamic_segments: Vec<NonNull<T>>,
49    len: usize,
50    /// Cached pointer to the next write position (for fast push)
51    write_ptr: *mut T,
52    /// Pointer to the end of the current segment
53    segment_end: *mut T,
54    /// Pointer to the base of the current segment
55    segment_base: *mut T,
56    _marker: PhantomData<T>,
57}
58
59// Safety: SegmentedVec is Send if T is Send
60unsafe impl<T: Send, const PREALLOC: usize> Send for SegmentedVec<T, PREALLOC> {}
61// Safety: SegmentedVec is Sync if T is Sync
62unsafe impl<T: Sync, const PREALLOC: usize> Sync for SegmentedVec<T, PREALLOC> {}
63
64impl<T, const PREALLOC: usize> SegmentedVec<T, PREALLOC> {
65    const PREALLOC_EXP: u32 = if PREALLOC == 0 {
66        0
67    } else {
68        assert!(
69            PREALLOC.is_power_of_two(),
70            "PREALLOC must be 0 or a power of 2"
71        );
72        PREALLOC.trailing_zeros()
73    };
74
75    /// Minimum capacity for the first dynamic segment when PREALLOC=0.
76    /// Avoids tiny allocations that heap allocators round up anyway.
77    /// - 8 for 1-byte elements (allocators round up small requests)
78    /// - 4 for moderate elements (<= 1 KiB)
79    /// - 1 for large elements (avoid wasting space)
80    const MIN_NON_ZERO_CAP: usize = {
81        let size = std::mem::size_of::<T>();
82        if size == 1 {
83            8
84        } else if size <= 1024 {
85            4
86        } else {
87            1
88        }
89    };
90
91    const MIN_CAP_EXP: u32 = Self::MIN_NON_ZERO_CAP.trailing_zeros();
92
93    const BIAS: usize = if PREALLOC > 0 {
94        PREALLOC
95    } else {
96        Self::MIN_NON_ZERO_CAP
97    };
98
99    /// Offset to subtract from MSB to get shelf index.
100    const SHELF_OFFSET: u32 = if PREALLOC == 0 {
101        Self::MIN_CAP_EXP
102    } else {
103        Self::PREALLOC_EXP + 1
104    };
105
106    /// Creates a new empty `SegmentedVec`.
107    ///
108    /// # Example
109    ///
110    /// ```
111    /// use segmented_vec::SegmentedVec;
112    /// let vec: SegmentedVec<i32, 4> = SegmentedVec::new();
113    /// assert!(vec.is_empty());
114    /// ```
115    #[inline]
116    pub const fn new() -> Self {
117        Self {
118            prealloc_segment: MaybeUninit::uninit(),
119            dynamic_segments: Vec::new(),
120            len: 0,
121            write_ptr: std::ptr::null_mut(),
122            segment_end: std::ptr::null_mut(),
123            segment_base: std::ptr::null_mut(),
124            _marker: PhantomData,
125        }
126    }
127
128    /// Initialize or update the write pointer cache.
129    #[inline]
130    fn init_write_ptr(&mut self) {
131        if PREALLOC > 0 && self.len < PREALLOC {
132            let base = unsafe { (*self.prealloc_segment.as_mut_ptr()).as_mut_ptr() };
133            self.segment_base = base;
134            self.write_ptr = unsafe { base.add(self.len) };
135            self.segment_end = unsafe { base.add(PREALLOC) };
136        } else if !self.dynamic_segments.is_empty() {
137            let (shelf, box_idx) = Self::location(self.len);
138            let shelf_size = Self::shelf_size(shelf as u32);
139            let base = unsafe { self.dynamic_segments.get_unchecked(shelf).as_ptr() };
140            self.segment_base = base;
141            self.write_ptr = unsafe { base.add(box_idx) };
142            self.segment_end = unsafe { base.add(shelf_size) };
143        } else if PREALLOC > 0 {
144            let base = unsafe { (*self.prealloc_segment.as_mut_ptr()).as_mut_ptr() };
145            self.segment_base = base;
146            self.write_ptr = base;
147            self.segment_end = unsafe { base.add(PREALLOC) };
148        } else {
149            self.segment_base = std::ptr::null_mut();
150            self.write_ptr = std::ptr::null_mut();
151            self.segment_end = std::ptr::null_mut();
152        }
153    }
154
155    /// Returns the number of elements in the vector.
156    #[inline]
157    pub const fn len(&self) -> usize {
158        self.len
159    }
160
161    /// Returns `true` if the vector contains no elements.
162    #[inline]
163    pub const fn is_empty(&self) -> bool {
164        self.len == 0
165    }
166
167    /// Returns the current capacity of the vector.
168    #[inline]
169    pub fn capacity(&self) -> usize {
170        Self::compute_capacity(self.dynamic_segments.len() as u32)
171    }
172
173    /// Appends an element to the back of the vector.
174    ///
175    /// # Panics
176    ///
177    /// Panics if allocation fails.
178    ///
179    /// # Example
180    ///
181    /// ```
182    /// use segmented_vec::SegmentedVec;
183    /// let mut vec: SegmentedVec<i32> = SegmentedVec::new();
184    /// vec.push(1);
185    /// vec.push(2);
186    /// assert_eq!(vec.len(), 2);
187    /// ```
188    #[inline]
189    pub fn push(&mut self, value: T) {
190        // Fast path: we have space in the current segment
191        if self.write_ptr < self.segment_end {
192            unsafe {
193                std::ptr::write(self.write_ptr, value);
194                self.write_ptr = self.write_ptr.add(1);
195            }
196            self.len += 1;
197            return;
198        }
199
200        // Slow path: need to grow or move to next segment
201        self.push_slow(value);
202    }
203
204    #[cold]
205    #[inline(never)]
206    fn push_slow(&mut self, value: T) {
207        let new_len = self.len + 1;
208
209        // Special case: first push into prealloc segment
210        if PREALLOC > 0 && self.len == 0 {
211            unsafe {
212                let base = (*self.prealloc_segment.as_mut_ptr()).as_mut_ptr();
213                std::ptr::write(base, value);
214                self.segment_base = base;
215                self.write_ptr = base.add(1);
216                self.segment_end = base.add(PREALLOC);
217            }
218            self.len = 1;
219            return;
220        }
221
222        // We're at a segment boundary (box_index = 0), so biased = len + BIAS is a power of 2
223        // Calculate shelf directly without full location()
224        let biased = self.len + Self::BIAS;
225        let shelf = (biased.trailing_zeros() - Self::SHELF_OFFSET) as usize;
226        let shelf_size = Self::shelf_size(shelf as u32);
227
228        self.grow_capacity(new_len);
229
230        unsafe {
231            let base = self.dynamic_segments.get_unchecked(shelf).as_ptr();
232            std::ptr::write(base, value);
233            self.segment_base = base;
234            self.write_ptr = base.add(1);
235            self.segment_end = base.add(shelf_size);
236        }
237        self.len = new_len;
238    }
239
240    /// Removes the last element from the vector and returns it, or `None` if empty.
241    ///
242    /// # Example
243    ///
244    /// ```
245    /// use segmented_vec::SegmentedVec;
246    /// let mut vec: SegmentedVec<i32> = SegmentedVec::new();
247    /// vec.push(1);
248    /// vec.push(2);
249    /// assert_eq!(vec.pop(), Some(2));
250    /// assert_eq!(vec.pop(), Some(1));
251    /// assert_eq!(vec.pop(), None);
252    /// ```
253    #[inline]
254    pub fn pop(&mut self) -> Option<T> {
255        // Fast path: current segment still have elements
256        if self.write_ptr > self.segment_base {
257            unsafe {
258                let new_len = self.len - 1;
259                self.len = new_len;
260                // Move back to the last written element
261                self.write_ptr = self.write_ptr.sub(1);
262                // Return the value
263                Some(std::ptr::read(self.write_ptr))
264            }
265        } else {
266            // Slow path: current segment doesn't have any element left, cross the boundary
267            self.pop_slow_path()
268        }
269    }
270
271    #[cold]
272    #[inline(never)]
273    fn pop_slow_path(&mut self) -> Option<T> {
274        if self.len == 0 {
275            return None;
276        }
277
278        unsafe {
279            // Because write_ptr == segment_base, the element we want
280            // is actually at the very first slot of this segment.
281            let val = std::ptr::read(self.write_ptr);
282
283            let new_len = self.len - 1;
284            self.len = new_len;
285            self.init_write_ptr();
286
287            Some(val)
288        }
289    }
290
291    /// Returns a reference to the element at the given index.
292    ///
293    /// Returns `None` if the index is out of bounds.
294    #[inline]
295    pub fn get(&self, index: usize) -> Option<&T> {
296        if index < self.len {
297            Some(unsafe { self.unchecked_at(index) })
298        } else {
299            None
300        }
301    }
302
303    /// Returns a mutable reference to the element at the given index.
304    ///
305    /// Returns `None` if the index is out of bounds.
306    #[inline]
307    pub fn get_mut(&mut self, index: usize) -> Option<&mut T> {
308        if index < self.len {
309            Some(unsafe { self.unchecked_at_mut(index) })
310        } else {
311            None
312        }
313    }
314
315    /// Returns a reference to the first element, or `None` if empty.
316    #[inline]
317    pub fn first(&self) -> Option<&T> {
318        self.get(0)
319    }
320
321    /// Returns a mutable reference to the first element, or `None` if empty.
322    #[inline]
323    pub fn first_mut(&mut self) -> Option<&mut T> {
324        self.get_mut(0)
325    }
326
327    /// Returns a reference to the last element, or `None` if empty.
328    #[inline]
329    pub fn last(&self) -> Option<&T> {
330        if self.len == 0 {
331            None
332        } else {
333            self.get(self.len - 1)
334        }
335    }
336
337    /// Returns a mutable reference to the last element, or `None` if empty.
338    #[inline]
339    pub fn last_mut(&mut self) -> Option<&mut T> {
340        if self.len == 0 {
341            None
342        } else {
343            self.get_mut(self.len - 1)
344        }
345    }
346
347    /// Returns `true` if the slice contains an element with the given value.
348    #[inline]
349    pub fn contains(&self, x: &T) -> bool
350    where
351        T: PartialEq,
352    {
353        self.as_slice().contains(x)
354    }
355
356    /// Returns `true` if `needle` is a prefix of the vector.
357    pub fn starts_with(&self, needle: &[T]) -> bool
358    where
359        T: PartialEq,
360    {
361        self.as_slice().starts_with(needle)
362    }
363
364    /// Returns `true` if `needle` is a suffix of the vector.
365    pub fn ends_with(&self, needle: &[T]) -> bool
366    where
367        T: PartialEq,
368    {
369        self.as_slice().ends_with(needle)
370    }
371
372    /// Binary searches this vector for a given element.
373    ///
374    /// If the value is found, returns `Ok(index)`. If not found, returns
375    /// `Err(index)` where `index` is the position where the element could be inserted.
376    pub fn binary_search(&self, x: &T) -> Result<usize, usize>
377    where
378        T: Ord,
379    {
380        self.as_slice().binary_search(x)
381    }
382
383    /// Binary searches this vector with a comparator function.
384    pub fn binary_search_by<F>(&self, f: F) -> Result<usize, usize>
385    where
386        F: FnMut(&T) -> Ordering,
387    {
388        self.as_slice().binary_search_by(f)
389    }
390
391    /// Binary searches this vector with a key extraction function.
392    pub fn binary_search_by_key<B, F>(&self, b: &B, f: F) -> Result<usize, usize>
393    where
394        F: FnMut(&T) -> B,
395        B: Ord,
396    {
397        self.as_slice().binary_search_by_key(b, f)
398    }
399
400    /// Swaps two elements in the vector.
401    ///
402    /// # Panics
403    ///
404    /// Panics if `a` or `b` are out of bounds.
405    #[inline]
406    pub fn swap(&mut self, a: usize, b: usize) {
407        self.as_mut_slice().swap(a, b)
408    }
409
410    /// Reverses the order of elements in the vector.
411    pub fn reverse(&mut self) {
412        self.as_mut_slice().reverse()
413    }
414
415    /// Fills the vector with the given value.
416    pub fn fill(&mut self, value: T)
417    where
418        T: Clone,
419    {
420        self.as_mut_slice().fill(value)
421    }
422
423    /// Fills the vector with values produced by a function.
424    pub fn fill_with<F>(&mut self, f: F)
425    where
426        F: FnMut() -> T,
427    {
428        self.as_mut_slice().fill_with(f)
429    }
430
431    /// Clears the vector, removing all elements.
432    ///
433    /// This does not deallocate the dynamic segments.
434    pub fn clear(&mut self) {
435        if self.len == 0 {
436            return;
437        }
438
439        if std::mem::needs_drop::<T>() {
440            // Drop elements in prealloc segment
441            if PREALLOC > 0 {
442                let count = self.len.min(PREALLOC);
443                let ptr = unsafe { (*self.prealloc_segment.as_mut_ptr()).as_mut_ptr() };
444                unsafe { std::ptr::drop_in_place(std::ptr::slice_from_raw_parts_mut(ptr, count)) };
445            }
446
447            // Drop elements in dynamic segments
448            if self.len > PREALLOC {
449                let mut remaining = self.len - PREALLOC;
450                for shelf in 0..self.dynamic_segments.len() {
451                    let size = Self::shelf_size(shelf as u32);
452                    let count = remaining.min(size);
453                    let ptr = unsafe { self.dynamic_segments.get_unchecked(shelf).as_ptr() };
454                    unsafe {
455                        std::ptr::drop_in_place(std::ptr::slice_from_raw_parts_mut(ptr, count))
456                    };
457                    remaining -= count;
458                    if remaining == 0 {
459                        break;
460                    }
461                }
462            }
463        }
464
465        self.len = 0;
466        // Reset write pointer cache
467        if PREALLOC > 0 {
468            let base = unsafe { (*self.prealloc_segment.as_mut_ptr()).as_mut_ptr() };
469            self.write_ptr = base;
470            self.segment_end = unsafe { base.add(PREALLOC) };
471        } else {
472            self.write_ptr = std::ptr::null_mut();
473            self.segment_end = std::ptr::null_mut();
474        }
475    }
476
477    /// Shortens the vector, keeping the first `new_len` elements.
478    ///
479    /// If `new_len` is greater than or equal to the current length, this has no effect.
480    pub fn truncate(&mut self, new_len: usize) {
481        if new_len >= self.len {
482            return;
483        }
484
485        if std::mem::needs_drop::<T>() {
486            // Drop elements in prealloc segment (from new_len to min(self.len, PREALLOC))
487            if PREALLOC > 0 && new_len < PREALLOC {
488                let start = new_len;
489                let end = self.len.min(PREALLOC);
490                if start < end {
491                    let ptr = unsafe {
492                        (*self.prealloc_segment.as_mut_ptr())
493                            .as_mut_ptr()
494                            .add(start)
495                    };
496                    unsafe {
497                        std::ptr::drop_in_place(std::ptr::slice_from_raw_parts_mut(
498                            ptr,
499                            end - start,
500                        ))
501                    };
502                }
503            }
504
505            // Drop elements in dynamic segments
506            if self.len > PREALLOC {
507                let dynamic_new_len = new_len.saturating_sub(PREALLOC);
508                let dynamic_old_len = self.len - PREALLOC;
509
510                if dynamic_new_len < dynamic_old_len {
511                    let mut pos = 0usize;
512                    for shelf in 0..self.dynamic_segments.len() {
513                        let size = Self::shelf_size(shelf as u32);
514                        let seg_end = pos + size;
515
516                        // Calculate overlap with [dynamic_new_len, dynamic_old_len)
517                        let drop_start = dynamic_new_len.max(pos);
518                        let drop_end = dynamic_old_len.min(seg_end);
519
520                        if drop_start < drop_end {
521                            let offset = drop_start - pos;
522                            let count = drop_end - drop_start;
523                            let ptr = unsafe {
524                                self.dynamic_segments
525                                    .get_unchecked(shelf)
526                                    .as_ptr()
527                                    .add(offset)
528                            };
529                            unsafe {
530                                std::ptr::drop_in_place(std::ptr::slice_from_raw_parts_mut(
531                                    ptr, count,
532                                ))
533                            };
534                        }
535
536                        pos = seg_end;
537                        if pos >= dynamic_old_len {
538                            break;
539                        }
540                    }
541                }
542            }
543        }
544
545        self.len = new_len;
546        // Update write pointer cache
547        if new_len > 0 {
548            self.init_write_ptr();
549        } else if PREALLOC > 0 {
550            let base = unsafe { (*self.prealloc_segment.as_mut_ptr()).as_mut_ptr() };
551            self.write_ptr = base;
552            self.segment_end = unsafe { base.add(PREALLOC) };
553        } else {
554            self.write_ptr = std::ptr::null_mut();
555            self.segment_end = std::ptr::null_mut();
556        }
557    }
558
559    /// Reserves capacity for at least `additional` more elements.
560    pub fn reserve(&mut self, additional: usize) {
561        let old_capacity = self.capacity();
562        self.grow_capacity(self.len + additional);
563        // Initialize write pointer if we didn't have capacity before
564        if old_capacity == 0 && self.capacity() > 0 && self.segment_end.is_null() {
565            self.init_write_ptr();
566        }
567    }
568
569    /// Shrinks the capacity to match the current length.
570    ///
571    /// This deallocates unused dynamic segments.
572    pub fn shrink_to_fit(&mut self) {
573        self.shrink_capacity(self.len);
574    }
575
576    /// Returns an iterator over references to the elements.
577    #[inline]
578    pub fn iter(&self) -> Iter<'_, T, PREALLOC> {
579        // Initialize with null pointers - first next() call will set up the segment
580        Iter {
581            vec: self,
582            ptr: std::ptr::null(),
583            segment_end: std::ptr::null(),
584            index: 0,
585            shelf_index: 0,
586        }
587    }
588
589    /// Returns an iterator over mutable references to the elements.
590    #[inline]
591    pub fn iter_mut(&mut self) -> IterMut<'_, T, PREALLOC> {
592        // Initialize with null pointers - first next() call will set up the segment
593        IterMut {
594            vec: self,
595            ptr: std::ptr::null_mut(),
596            segment_end: std::ptr::null_mut(),
597            index: 0,
598            shelf_index: 0,
599        }
600    }
601
602    /// Returns an immutable slice view of the entire vector.
603    ///
604    /// # Example
605    ///
606    /// ```
607    /// use segmented_vec::SegmentedVec;
608    ///
609    /// let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
610    /// vec.extend(0..10);
611    ///
612    /// let slice = vec.as_slice();
613    /// assert_eq!(slice.len(), 10);
614    /// assert_eq!(slice[0], 0);
615    /// ```
616    #[inline]
617    pub fn as_slice(&self) -> SegmentedSlice<'_, T, PREALLOC> {
618        SegmentedSlice::new(self)
619    }
620
621    /// Returns a mutable slice view of the entire vector.
622    ///
623    /// # Example
624    ///
625    /// ```
626    /// use segmented_vec::SegmentedVec;
627    ///
628    /// let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
629    /// vec.extend(0..10);
630    ///
631    /// let mut slice = vec.as_mut_slice();
632    /// slice[0] = 100;
633    /// assert_eq!(vec[0], 100);
634    /// ```
635    #[inline]
636    pub fn as_mut_slice(&mut self) -> SegmentedSliceMut<'_, T, PREALLOC> {
637        SegmentedSliceMut::new(self)
638    }
639
640    /// Returns an immutable slice view of a range of the vector.
641    ///
642    /// # Panics
643    ///
644    /// Panics if the range is out of bounds.
645    ///
646    /// # Example
647    ///
648    /// ```
649    /// use segmented_vec::SegmentedVec;
650    ///
651    /// let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
652    /// vec.extend(0..10);
653    ///
654    /// let slice = vec.slice(2..5);
655    /// assert_eq!(slice.len(), 3);
656    /// assert_eq!(slice[0], 2);
657    /// ```
658    #[inline]
659    pub fn slice(&self, range: std::ops::Range<usize>) -> SegmentedSlice<'_, T, PREALLOC> {
660        assert!(range.start <= range.end && range.end <= self.len);
661        SegmentedSlice::from_range(self, range.start, range.end)
662    }
663
664    /// Returns a mutable slice view of a range of the vector.
665    ///
666    /// # Panics
667    ///
668    /// Panics if the range is out of bounds.
669    ///
670    /// # Example
671    ///
672    /// ```
673    /// use segmented_vec::SegmentedVec;
674    ///
675    /// let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
676    /// vec.extend(0..10);
677    ///
678    /// let mut slice = vec.slice_mut(2..5);
679    /// slice[0] = 100;
680    /// assert_eq!(vec[2], 100);
681    /// ```
682    #[inline]
683    pub fn slice_mut(
684        &mut self,
685        range: std::ops::Range<usize>,
686    ) -> SegmentedSliceMut<'_, T, PREALLOC> {
687        assert!(range.start <= range.end && range.end <= self.len);
688        SegmentedSliceMut::from_range(self, range.start, range.end)
689    }
690
691    /// Extends the vector by cloning elements from a slice.
692    pub fn extend_from_slice(&mut self, other: &[T])
693    where
694        T: Clone,
695    {
696        if other.is_empty() {
697            return;
698        }
699        self.reserve(other.len());
700
701        let mut src = other;
702
703        // Fill prealloc segment if there's room
704        if PREALLOC > 0 && self.len < PREALLOC {
705            let prealloc_remaining = PREALLOC - self.len;
706            let to_copy = src.len().min(prealloc_remaining);
707            let dest = unsafe {
708                (*self.prealloc_segment.as_mut_ptr())
709                    .as_mut_ptr()
710                    .add(self.len)
711            };
712            for (i, item) in src.iter().take(to_copy).enumerate() {
713                unsafe { std::ptr::write(dest.add(i), item.clone()) };
714            }
715            self.len += to_copy;
716            src = &src[to_copy..];
717        }
718
719        // Fill dynamic segments
720        while !src.is_empty() {
721            let (shelf, box_idx, remaining) = Self::location_with_capacity(self.len);
722            let to_copy = src.len().min(remaining);
723            let dest = unsafe {
724                self.dynamic_segments
725                    .get_unchecked(shelf)
726                    .as_ptr()
727                    .add(box_idx)
728            };
729            for (i, item) in src.iter().take(to_copy).enumerate() {
730                unsafe { std::ptr::write(dest.add(i), item.clone()) };
731            }
732            self.len += to_copy;
733            src = &src[to_copy..];
734        }
735
736        // Set up write pointer cache for next push
737        if self.len < self.capacity() {
738            self.init_write_ptr();
739        } else {
740            self.write_ptr = std::ptr::null_mut();
741            self.segment_end = std::ptr::null_mut();
742        }
743    }
744
745    /// Extends the vector by copying elements from a slice (for `Copy` types).
746    ///
747    /// This is more efficient than `extend_from_slice` for `Copy` types
748    /// as it uses bulk memory copy operations.
749    pub fn extend_from_copy_slice(&mut self, other: &[T])
750    where
751        T: Copy,
752    {
753        if other.is_empty() {
754            return;
755        }
756        self.reserve(other.len());
757
758        let mut src = other;
759
760        // Fill prealloc segment if there's room
761        if PREALLOC > 0 && self.len < PREALLOC {
762            let prealloc_remaining = PREALLOC - self.len;
763            let to_copy = src.len().min(prealloc_remaining);
764            unsafe {
765                let dest = (*self.prealloc_segment.as_mut_ptr())
766                    .as_mut_ptr()
767                    .add(self.len);
768                std::ptr::copy_nonoverlapping(src.as_ptr(), dest, to_copy);
769            };
770            self.len += to_copy;
771            src = &src[to_copy..];
772        }
773
774        // Fill dynamic segments
775        while !src.is_empty() {
776            let (shelf, box_idx, remaining) = Self::location_with_capacity(self.len);
777            let to_copy = src.len().min(remaining);
778            unsafe {
779                let dest = self
780                    .dynamic_segments
781                    .get_unchecked(shelf)
782                    .as_ptr()
783                    .add(box_idx);
784                std::ptr::copy_nonoverlapping(src.as_ptr(), dest, to_copy);
785            };
786            self.len += to_copy;
787            src = &src[to_copy..];
788        }
789
790        // Set up write pointer cache for next push
791        if self.len < self.capacity() {
792            self.init_write_ptr();
793        } else {
794            self.write_ptr = std::ptr::null_mut();
795            self.segment_end = std::ptr::null_mut();
796        }
797    }
798
799    /// Sorts the vector in place using a stable sorting algorithm.
800    ///
801    /// This sort is stable (i.e., does not reorder equal elements) and O(n * log(n)) worst-case.
802    ///
803    /// The algorithm is a merge sort adapted from the Rust standard library.
804    ///
805    /// # Examples
806    ///
807    /// ```
808    /// use segmented_vec::SegmentedVec;
809    ///
810    /// let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
811    /// vec.extend([3, 1, 4, 1, 5, 9, 2, 6]);
812    /// vec.sort();
813    /// assert_eq!(vec.iter().copied().collect::<Vec<_>>(), vec![1, 1, 2, 3, 4, 5, 6, 9]);
814    /// ```
815    #[inline]
816    pub fn sort(&mut self)
817    where
818        T: Ord,
819    {
820        self.as_mut_slice().sort();
821    }
822
823    /// Sorts the vector in place with a comparator function.
824    ///
825    /// This sort is stable (i.e., does not reorder equal elements) and O(n * log(n)) worst-case.
826    ///
827    /// The comparator function must define a total ordering for the elements.
828    ///
829    /// # Examples
830    ///
831    /// ```
832    /// use segmented_vec::SegmentedVec;
833    ///
834    /// let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
835    /// vec.extend([3, 1, 4, 1, 5, 9, 2, 6]);
836    /// vec.sort_by(|a, b| b.cmp(a)); // reverse order
837    /// assert_eq!(vec.iter().copied().collect::<Vec<_>>(), vec![9, 6, 5, 4, 3, 2, 1, 1]);
838    /// ```
839    pub fn sort_by<F>(&mut self, compare: F)
840    where
841        F: FnMut(&T, &T) -> Ordering,
842    {
843        self.as_mut_slice().sort_by(compare);
844    }
845
846    /// Sorts the vector in place with a key extraction function.
847    ///
848    /// This sort is stable (i.e., does not reorder equal elements) and O(m * n * log(n)) worst-case,
849    /// where the key function is O(m).
850    ///
851    /// # Examples
852    ///
853    /// ```
854    /// use segmented_vec::SegmentedVec;
855    ///
856    /// let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
857    /// vec.extend([-3, 1, -4, 1, 5, -9, 2, 6]);
858    /// vec.sort_by_key(|k| k.abs());
859    /// assert_eq!(vec.iter().copied().collect::<Vec<_>>(), vec![1, 1, 2, -3, -4, 5, 6, -9]);
860    /// ```
861    #[inline]
862    pub fn sort_by_key<K, F>(&mut self, f: F)
863    where
864        F: FnMut(&T) -> K,
865        K: Ord,
866    {
867        self.as_mut_slice().sort_by_key(f);
868    }
869
870    /// Sorts the vector in place using an unstable sorting algorithm.
871    ///
872    /// This sort is unstable (i.e., may reorder equal elements), in-place, and O(n * log(n)) worst-case.
873    ///
874    /// The algorithm is a quicksort with heapsort fallback, adapted from the Rust standard library.
875    ///
876    /// # Examples
877    ///
878    /// ```
879    /// use segmented_vec::SegmentedVec;
880    ///
881    /// let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
882    /// vec.extend([3, 1, 4, 1, 5, 9, 2, 6]);
883    /// vec.sort_unstable();
884    /// assert_eq!(vec.iter().copied().collect::<Vec<_>>(), vec![1, 1, 2, 3, 4, 5, 6, 9]);
885    /// ```
886    #[inline]
887    pub fn sort_unstable(&mut self)
888    where
889        T: Ord,
890    {
891        self.as_mut_slice().sort_unstable();
892    }
893
894    /// Sorts the vector in place with a comparator function using an unstable sorting algorithm.
895    ///
896    /// This sort is unstable (i.e., may reorder equal elements), in-place, and O(n * log(n)) worst-case.
897    ///
898    /// The comparator function must define a total ordering for the elements.
899    ///
900    /// # Examples
901    ///
902    /// ```
903    /// use segmented_vec::SegmentedVec;
904    ///
905    /// let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
906    /// vec.extend([3, 1, 4, 1, 5, 9, 2, 6]);
907    /// vec.sort_unstable_by(|a, b| b.cmp(a)); // reverse order
908    /// assert_eq!(vec.iter().copied().collect::<Vec<_>>(), vec![9, 6, 5, 4, 3, 2, 1, 1]);
909    /// ```
910    pub fn sort_unstable_by<F>(&mut self, compare: F)
911    where
912        F: FnMut(&T, &T) -> Ordering,
913    {
914        self.as_mut_slice().sort_unstable_by(compare);
915    }
916
917    /// Sorts the vector in place with a key extraction function using an unstable sorting algorithm.
918    ///
919    /// This sort is unstable (i.e., may reorder equal elements), in-place, and O(n * log(n)) worst-case,
920    /// where the key function is O(m).
921    ///
922    /// # Examples
923    ///
924    /// ```
925    /// use segmented_vec::SegmentedVec;
926    ///
927    /// let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
928    /// vec.extend([-3, 1, -4, 1, 5, -9, 2, 6]);
929    /// vec.sort_unstable_by_key(|k| k.abs());
930    /// // Note: unstable sort may reorder equal elements, so we just check it's sorted by abs value
931    /// let result: Vec<i32> = vec.iter().copied().collect();
932    /// for i in 1..result.len() {
933    ///     assert!(result[i-1].abs() <= result[i].abs());
934    /// }
935    /// ```
936    #[inline]
937    pub fn sort_unstable_by_key<K, F>(&mut self, f: F)
938    where
939        F: FnMut(&T) -> K,
940        K: Ord,
941    {
942        self.as_mut_slice().sort_unstable_by_key(f);
943    }
944
945    /// Checks if the elements of this vector are sorted.
946    pub fn is_sorted(&self) -> bool
947    where
948        T: PartialOrd,
949    {
950        self.as_slice().is_sorted()
951    }
952
953    /// Checks if the elements of this vector are sorted using the given comparator function.
954    pub fn is_sorted_by<F>(&self, compare: F) -> bool
955    where
956        F: FnMut(&T, &T) -> bool,
957    {
958        self.as_slice().is_sorted_by(compare)
959    }
960
961    /// Checks if the elements of this vector are sorted using the given key extraction function.
962    pub fn is_sorted_by_key<K, F>(&self, f: F) -> bool
963    where
964        F: FnMut(&T) -> K,
965        K: PartialOrd,
966    {
967        self.as_slice().is_sorted_by_key(f)
968    }
969
970    /// Returns the index of the partition point according to the given predicate.
971    pub fn partition_point<P>(&self, pred: P) -> usize
972    where
973        P: FnMut(&T) -> bool,
974    {
975        self.as_slice().partition_point(pred)
976    }
977
978    /// Rotates the vector in-place such that the first `mid` elements move to the end.
979    pub fn rotate_left(&mut self, mid: usize) {
980        self.as_mut_slice().rotate_left(mid);
981    }
982
983    /// Rotates the vector in-place such that the last `k` elements move to the front.
984    pub fn rotate_right(&mut self, k: usize) {
985        self.as_mut_slice().rotate_right(k);
986    }
987
988    /// Creates a new `SegmentedVec` with at least the specified capacity.
989    pub fn with_capacity(capacity: usize) -> Self {
990        let mut vec = Self::new();
991        vec.reserve(capacity);
992        vec
993    }
994
995    /// Inserts an element at position `index`, shifting all elements after it to the right.
996    ///
997    /// # Panics
998    ///
999    /// Panics if `index > len`.
1000    pub fn insert(&mut self, index: usize, element: T) {
1001        assert!(index <= self.len);
1002        self.push(element);
1003        // Rotate the new element into place
1004        if index < self.len - 1 {
1005            for i in (index..self.len - 1).rev() {
1006                unsafe {
1007                    let ptr_a = self.unchecked_at_mut(i) as *mut T;
1008                    let ptr_b = self.unchecked_at_mut(i + 1) as *mut T;
1009                    std::ptr::swap(ptr_a, ptr_b);
1010                }
1011            }
1012        }
1013    }
1014
1015    /// Removes and returns the element at position `index`, shifting all elements after it to the left.
1016    ///
1017    /// # Panics
1018    ///
1019    /// Panics if `index >= len`.
1020    pub fn remove(&mut self, index: usize) -> T {
1021        assert!(index < self.len);
1022        // Shift elements left
1023        for i in index..self.len - 1 {
1024            unsafe {
1025                let ptr_a = self.unchecked_at_mut(i) as *mut T;
1026                let ptr_b = self.unchecked_at_mut(i + 1) as *mut T;
1027                std::ptr::swap(ptr_a, ptr_b);
1028            }
1029        }
1030        self.pop().unwrap()
1031    }
1032
1033    /// Removes an element from the vector and returns it.
1034    ///
1035    /// The removed element is replaced by the last element of the vector.
1036    /// This does not preserve ordering, but is O(1).
1037    ///
1038    /// # Panics
1039    ///
1040    /// Panics if `index >= len`.
1041    pub fn swap_remove(&mut self, index: usize) -> T {
1042        assert!(index < self.len);
1043        let last_index = self.len - 1;
1044        if index != last_index {
1045            unsafe {
1046                let ptr_a = self.unchecked_at_mut(index) as *mut T;
1047                let ptr_b = self.unchecked_at_mut(last_index) as *mut T;
1048                std::ptr::swap(ptr_a, ptr_b);
1049            }
1050        }
1051        self.pop().unwrap()
1052    }
1053
1054    /// Retains only the elements specified by the predicate.
1055    ///
1056    /// Removes all elements for which `f(&element)` returns `false`.
1057    pub fn retain<F>(&mut self, mut f: F)
1058    where
1059        F: FnMut(&T) -> bool,
1060    {
1061        let mut i = 0;
1062        while i < self.len {
1063            if f(unsafe { self.unchecked_at(i) }) {
1064                i += 1;
1065            } else {
1066                self.remove(i);
1067            }
1068        }
1069    }
1070
1071    /// Retains only the elements specified by the predicate, passing a mutable reference.
1072    pub fn retain_mut<F>(&mut self, mut f: F)
1073    where
1074        F: FnMut(&mut T) -> bool,
1075    {
1076        let mut i = 0;
1077        while i < self.len {
1078            if f(unsafe { self.unchecked_at_mut(i) }) {
1079                i += 1;
1080            } else {
1081                self.remove(i);
1082            }
1083        }
1084    }
1085
1086    /// Removes consecutive duplicate elements.
1087    ///
1088    /// If the vector is sorted, this removes all duplicates.
1089    pub fn dedup(&mut self)
1090    where
1091        T: PartialEq,
1092    {
1093        self.dedup_by(|a, b| a == b);
1094    }
1095
1096    /// Removes consecutive elements that satisfy the given equality relation.
1097    pub fn dedup_by<F>(&mut self, mut same_bucket: F)
1098    where
1099        F: FnMut(&mut T, &mut T) -> bool,
1100    {
1101        if self.len <= 1 {
1102            return;
1103        }
1104        let mut write = 1;
1105        for read in 1..self.len {
1106            let should_keep = unsafe {
1107                let prev_ptr = self.unchecked_at_mut(write - 1) as *mut T;
1108                let curr_ptr = self.unchecked_at_mut(read) as *mut T;
1109                !same_bucket(&mut *prev_ptr, &mut *curr_ptr)
1110            };
1111            if should_keep {
1112                if read != write {
1113                    unsafe {
1114                        let ptr_src = self.unchecked_at_mut(read) as *mut T;
1115                        let ptr_dst = self.unchecked_at_mut(write) as *mut T;
1116                        std::ptr::swap(ptr_dst, ptr_src);
1117                    }
1118                }
1119                write += 1;
1120            } else {
1121                // Drop the duplicate
1122                unsafe {
1123                    std::ptr::drop_in_place(self.unchecked_at_mut(read));
1124                }
1125            }
1126        }
1127        self.len = write;
1128    }
1129
1130    /// Removes consecutive elements that map to the same key.
1131    pub fn dedup_by_key<K, F>(&mut self, mut key: F)
1132    where
1133        F: FnMut(&mut T) -> K,
1134        K: PartialEq,
1135    {
1136        self.dedup_by(|a, b| key(a) == key(b));
1137    }
1138
1139    /// Resizes the vector in-place so that `len` is equal to `new_len`.
1140    ///
1141    /// If `new_len` is greater than `len`, the vector is extended by the difference,
1142    /// with each additional slot filled with `value`.
1143    /// If `new_len` is less than `len`, the vector is simply truncated.
1144    pub fn resize(&mut self, new_len: usize, value: T)
1145    where
1146        T: Clone,
1147    {
1148        if new_len > self.len {
1149            self.reserve(new_len - self.len);
1150            while self.len < new_len {
1151                self.push(value.clone());
1152            }
1153        } else {
1154            self.truncate(new_len);
1155        }
1156    }
1157
1158    /// Resizes the vector in-place so that `len` is equal to `new_len`.
1159    ///
1160    /// If `new_len` is greater than `len`, the vector is extended by the difference,
1161    /// with each additional slot filled with the result of calling the closure `f`.
1162    pub fn resize_with<F>(&mut self, new_len: usize, mut f: F)
1163    where
1164        F: FnMut() -> T,
1165    {
1166        if new_len > self.len {
1167            self.reserve(new_len - self.len);
1168            while self.len < new_len {
1169                self.push(f());
1170            }
1171        } else {
1172            self.truncate(new_len);
1173        }
1174    }
1175
1176    /// Moves all the elements of `other` into `self`, leaving `other` empty.
1177    pub fn append(&mut self, other: &mut Self) {
1178        let other_len = other.len;
1179        self.reserve(other_len);
1180        let start = self.len;
1181        while let Some(item) = other.pop() {
1182            self.push(item);
1183        }
1184        // Reverse the appended portion since pop() returns in reverse order
1185        let mut left = start;
1186        let mut right = self.len;
1187        while left < right {
1188            right -= 1;
1189            if left < right {
1190                unsafe {
1191                    let ptr_a = self.unchecked_at_mut(left) as *mut T;
1192                    let ptr_b = self.unchecked_at_mut(right) as *mut T;
1193                    std::ptr::swap(ptr_a, ptr_b);
1194                }
1195                left += 1;
1196            }
1197        }
1198    }
1199
1200    /// Splits the vector into two at the given index.
1201    ///
1202    /// Returns a newly allocated vector containing the elements in the range `[at, len)`.
1203    /// After the call, the original vector will contain elements `[0, at)`.
1204    ///
1205    /// # Panics
1206    ///
1207    /// Panics if `at > len`.
1208    pub fn split_off(&mut self, at: usize) -> Self {
1209        assert!(at <= self.len);
1210        let mut other = Self::new();
1211        other.reserve(self.len - at);
1212        for i in at..self.len {
1213            other.push(unsafe { self.unchecked_read(i) });
1214        }
1215        self.len = at;
1216        other
1217    }
1218
1219    /// Returns an iterator over `chunk_size` elements of the vector at a time.
1220    ///
1221    /// # Panics
1222    ///
1223    /// Panics if `chunk_size` is 0.
1224    pub fn chunks(&self, chunk_size: usize) -> Chunks<'_, T, PREALLOC> {
1225        self.as_slice().chunks(chunk_size)
1226    }
1227
1228    /// Returns an iterator over all contiguous windows of length `size`.
1229    ///
1230    /// # Panics
1231    ///
1232    /// Panics if `size` is 0.
1233    pub fn windows(&self, size: usize) -> Windows<'_, T, PREALLOC> {
1234        self.as_slice().windows(size)
1235    }
1236
1237    /// Returns an iterator over `chunk_size` elements at a time, starting from the end.
1238    ///
1239    /// # Panics
1240    ///
1241    /// Panics if `chunk_size` is 0.
1242    pub fn rchunks(&self, chunk_size: usize) -> RChunks<'_, T, PREALLOC> {
1243        self.as_slice().rchunks(chunk_size)
1244    }
1245
1246    /// Returns an iterator over exactly `chunk_size` elements at a time.
1247    ///
1248    /// # Panics
1249    ///
1250    /// Panics if `chunk_size` is 0.
1251    pub fn chunks_exact(&self, chunk_size: usize) -> ChunksExact<'_, T, PREALLOC> {
1252        self.as_slice().chunks_exact(chunk_size)
1253    }
1254
1255    /// Creates a draining iterator that removes the specified range and yields the removed items.
1256    ///
1257    /// # Panics
1258    ///
1259    /// Panics if the starting point is greater than the end point or if the end point is greater than the length.
1260    pub fn drain(&mut self, range: std::ops::Range<usize>) -> Drain<'_, T, PREALLOC> {
1261        assert!(range.start <= range.end && range.end <= self.len);
1262        Drain {
1263            vec: self,
1264            range_start: range.start,
1265            range_end: range.end,
1266            index: range.start,
1267        }
1268    }
1269
1270    /// Copies the elements to a new `Vec<T>`.
1271    pub fn to_vec(&self) -> Vec<T>
1272    where
1273        T: Clone,
1274    {
1275        self.as_slice().to_vec()
1276    }
1277
1278    // --- Internal helper methods ---
1279
1280    /// Calculate the number of shelves needed for a given capacity.
1281    #[inline]
1282    fn shelf_count(box_count: usize) -> u32 {
1283        if box_count == 0 {
1284            return 0;
1285        }
1286        if PREALLOC == 0 {
1287            let val = box_count + Self::MIN_NON_ZERO_CAP;
1288            val.next_power_of_two().trailing_zeros() - Self::MIN_CAP_EXP
1289        } else {
1290            let val = box_count + PREALLOC;
1291            val.next_power_of_two().trailing_zeros() - Self::PREALLOC_EXP - 1
1292        }
1293    }
1294
1295    /// Calculate the size of a shelf at a given index.
1296    #[inline]
1297    fn shelf_size(shelf_index: u32) -> usize {
1298        if PREALLOC == 0 {
1299            Self::MIN_NON_ZERO_CAP << shelf_index
1300        } else {
1301            1usize << (shelf_index + Self::PREALLOC_EXP + 1)
1302        }
1303    }
1304
1305    /// Calculate which shelf and box index a list index falls into.
1306    /// Returns (shelf_index, box_index).
1307    #[inline]
1308    fn location(list_index: usize) -> (usize, usize) {
1309        let biased = list_index + Self::BIAS;
1310        let msb = biased.ilog2();
1311        let shelf = msb - Self::SHELF_OFFSET;
1312        // Clear the most significant bit to get box_index
1313        let box_idx = biased ^ (1usize << msb);
1314        (shelf as usize, box_idx)
1315    }
1316
1317    /// Calculate shelf, box index, and remaining capacity in one go.
1318    /// Returns (shelf_index, box_index, segment_remaining).
1319    #[inline]
1320    fn location_with_capacity(list_index: usize) -> (usize, usize, usize) {
1321        let biased = list_index + Self::BIAS;
1322        let msb = biased.ilog2();
1323        let shelf = msb - Self::SHELF_OFFSET;
1324        let box_idx = biased ^ (1usize << msb);
1325        // segment_remaining = shelf_size - box_idx = (1 << msb) - box_idx
1326        //                   = (1 << msb) - (biased - (1 << msb))
1327        //                   = (2 << msb) - biased
1328        let segment_remaining = (2usize << msb) - biased;
1329        (shelf as usize, box_idx, segment_remaining)
1330    }
1331
1332    /// Get an unchecked reference to an element.
1333    ///
1334    /// # Safety
1335    ///
1336    /// `index` must be less than `self.len`.
1337    #[inline]
1338    pub(crate) unsafe fn unchecked_at(&self, index: usize) -> &T {
1339        unsafe {
1340            if index < PREALLOC {
1341                &(*self.prealloc_segment.as_ptr())[index]
1342            } else {
1343                let (shelf, box_idx) = Self::location(index);
1344                &*self
1345                    .dynamic_segments
1346                    .get_unchecked(shelf)
1347                    .as_ptr()
1348                    .add(box_idx)
1349            }
1350        }
1351    }
1352
1353    /// Get an unchecked mutable reference to an element.
1354    ///
1355    /// # Safety
1356    ///
1357    /// `index` must be less than `self.len`.
1358    #[inline]
1359    pub(crate) unsafe fn unchecked_at_mut(&mut self, index: usize) -> &mut T {
1360        unsafe {
1361            if index < PREALLOC {
1362                &mut (*self.prealloc_segment.as_mut_ptr())[index]
1363            } else {
1364                let (shelf, box_idx) = Self::location(index);
1365                &mut *self
1366                    .dynamic_segments
1367                    .get_unchecked(shelf)
1368                    .as_ptr()
1369                    .add(box_idx)
1370            }
1371        }
1372    }
1373
1374    /// Read a value from an index.
1375    ///
1376    /// # Safety
1377    ///
1378    /// `index` must be less than `self.len`, and the value must not be read again.
1379    #[inline]
1380    unsafe fn unchecked_read(&self, index: usize) -> T {
1381        unsafe {
1382            if index < PREALLOC {
1383                std::ptr::read(&(*self.prealloc_segment.as_ptr())[index])
1384            } else {
1385                let (shelf, box_idx) = Self::location(index);
1386                std::ptr::read(
1387                    self.dynamic_segments
1388                        .get_unchecked(shelf)
1389                        .as_ptr()
1390                        .add(box_idx),
1391                )
1392            }
1393        }
1394    }
1395
1396    /// Grow capacity to accommodate at least `new_capacity` elements.
1397    fn grow_capacity(&mut self, new_capacity: usize) {
1398        let new_shelf_count = Self::shelf_count(new_capacity);
1399        let old_shelf_count = self.dynamic_segments.len() as u32;
1400
1401        if new_shelf_count > old_shelf_count {
1402            self.dynamic_segments
1403                .reserve((new_shelf_count - old_shelf_count) as usize);
1404
1405            for i in old_shelf_count..new_shelf_count {
1406                let size = Self::shelf_size(i);
1407                let layout = Layout::array::<T>(size).expect("Layout overflow");
1408                let ptr = if layout.size() == 0 {
1409                    NonNull::dangling()
1410                } else {
1411                    let ptr = unsafe { alloc::alloc(layout) };
1412                    NonNull::new(ptr as *mut T).expect("Allocation failed")
1413                };
1414                self.dynamic_segments.push(ptr);
1415            }
1416        }
1417    }
1418
1419    /// Compute total capacity given the number of dynamic shelves.
1420    #[inline]
1421    fn compute_capacity(shelf_count: u32) -> usize {
1422        if shelf_count == 0 {
1423            PREALLOC
1424        } else if PREALLOC == 0 {
1425            (Self::MIN_NON_ZERO_CAP << shelf_count) - Self::MIN_NON_ZERO_CAP
1426        } else {
1427            (1usize << (Self::PREALLOC_EXP + 1 + shelf_count)) - PREALLOC
1428        }
1429    }
1430
1431    /// Shrink capacity to the minimum needed for `new_capacity` elements.
1432    fn shrink_capacity(&mut self, new_capacity: usize) {
1433        if new_capacity <= PREALLOC {
1434            // Free all dynamic segments
1435            self.free_shelves(self.dynamic_segments.len() as u32, 0);
1436            self.dynamic_segments.clear();
1437            self.dynamic_segments.shrink_to_fit();
1438            return;
1439        }
1440
1441        let new_shelf_count = Self::shelf_count(new_capacity);
1442        let old_shelf_count = self.dynamic_segments.len() as u32;
1443
1444        if new_shelf_count < old_shelf_count {
1445            self.free_shelves(old_shelf_count, new_shelf_count);
1446            self.dynamic_segments.truncate(new_shelf_count as usize);
1447            self.dynamic_segments.shrink_to_fit();
1448        }
1449    }
1450
1451    /// Free shelves from `from_count` down to `to_count` (exclusive).
1452    fn free_shelves(&mut self, from_count: u32, to_count: u32) {
1453        for i in (to_count..from_count).rev() {
1454            let size = Self::shelf_size(i);
1455            let layout = Layout::array::<T>(size).expect("Layout overflow");
1456            if layout.size() > 0 {
1457                unsafe {
1458                    alloc::dealloc(
1459                        self.dynamic_segments[i as usize].as_ptr() as *mut u8,
1460                        layout,
1461                    );
1462                }
1463            }
1464        }
1465    }
1466}
1467
1468impl<T, const PREALLOC: usize> Drop for SegmentedVec<T, PREALLOC> {
1469    fn drop(&mut self) {
1470        // Drop all elements
1471        self.clear();
1472        // Free all dynamic segments
1473        self.free_shelves(self.dynamic_segments.len() as u32, 0);
1474    }
1475}
1476
1477impl<T, const PREALLOC: usize> sort::IndexedAccess<T> for SegmentedVec<T, PREALLOC> {
1478    #[inline]
1479    fn get_ref(&self, index: usize) -> &T {
1480        debug_assert!(index < self.len);
1481        unsafe { self.unchecked_at(index) }
1482    }
1483
1484    #[inline]
1485    fn get_ptr(&self, index: usize) -> *const T {
1486        debug_assert!(index < self.len);
1487        if index < PREALLOC {
1488            unsafe { (*self.prealloc_segment.as_ptr()).as_ptr().add(index) }
1489        } else {
1490            let (shelf, box_idx) = Self::location(index);
1491            unsafe {
1492                self.dynamic_segments
1493                    .get_unchecked(shelf)
1494                    .as_ptr()
1495                    .add(box_idx)
1496            }
1497        }
1498    }
1499
1500    #[inline]
1501    fn get_ptr_mut(&mut self, index: usize) -> *mut T {
1502        debug_assert!(index < self.len);
1503        if index < PREALLOC {
1504            unsafe {
1505                (*self.prealloc_segment.as_mut_ptr())
1506                    .as_mut_ptr()
1507                    .add(index)
1508            }
1509        } else {
1510            let (shelf, box_idx) = Self::location(index);
1511            unsafe {
1512                self.dynamic_segments
1513                    .get_unchecked(shelf)
1514                    .as_ptr()
1515                    .add(box_idx)
1516            }
1517        }
1518    }
1519
1520    #[inline]
1521    fn swap(&mut self, a: usize, b: usize) {
1522        if a == b {
1523            return;
1524        }
1525        debug_assert!(a < self.len && b < self.len);
1526        unsafe {
1527            let ptr_a = self.get_ptr_mut(a);
1528            let ptr_b = self.get_ptr_mut(b);
1529            std::ptr::swap(ptr_a, ptr_b);
1530        }
1531    }
1532}
1533
1534impl<T, const PREALLOC: usize> Default for SegmentedVec<T, PREALLOC> {
1535    fn default() -> Self {
1536        Self::new()
1537    }
1538}
1539
1540impl<T, const PREALLOC: usize> Index<usize> for SegmentedVec<T, PREALLOC> {
1541    type Output = T;
1542
1543    fn index(&self, index: usize) -> &Self::Output {
1544        self.get(index).expect("index out of bounds")
1545    }
1546}
1547
1548impl<T, const PREALLOC: usize> IndexMut<usize> for SegmentedVec<T, PREALLOC> {
1549    fn index_mut(&mut self, index: usize) -> &mut Self::Output {
1550        self.get_mut(index).expect("index out of bounds")
1551    }
1552}
1553
1554impl<T: Clone, const PREALLOC: usize> Clone for SegmentedVec<T, PREALLOC> {
1555    fn clone(&self) -> Self {
1556        if self.len == 0 {
1557            return Self::new();
1558        }
1559
1560        let mut new_vec = Self::new();
1561        new_vec.reserve(self.len);
1562
1563        // Clone prealloc segment
1564        if PREALLOC > 0 {
1565            let count = self.len.min(PREALLOC);
1566            let src = unsafe { (*self.prealloc_segment.as_ptr()).as_ptr() };
1567            let dst = unsafe { (*new_vec.prealloc_segment.as_mut_ptr()).as_mut_ptr() };
1568            for i in 0..count {
1569                unsafe { std::ptr::write(dst.add(i), (*src.add(i)).clone()) };
1570            }
1571            new_vec.len = count;
1572        }
1573
1574        // Clone dynamic segments
1575        if self.len > PREALLOC {
1576            let mut remaining = self.len - PREALLOC;
1577            for shelf in 0..self.dynamic_segments.len() {
1578                let size = Self::shelf_size(shelf as u32);
1579                let count = remaining.min(size);
1580                let src = unsafe { self.dynamic_segments.get_unchecked(shelf).as_ptr() };
1581                let dst = unsafe { new_vec.dynamic_segments.get_unchecked(shelf).as_ptr() };
1582                for i in 0..count {
1583                    unsafe { std::ptr::write(dst.add(i), (*src.add(i)).clone()) };
1584                }
1585                new_vec.len += count;
1586                remaining -= count;
1587                if remaining == 0 {
1588                    break;
1589                }
1590            }
1591        }
1592
1593        // Set up write pointer
1594        if new_vec.len < new_vec.capacity() {
1595            new_vec.init_write_ptr();
1596        } else {
1597            new_vec.write_ptr = std::ptr::null_mut();
1598            new_vec.segment_end = std::ptr::null_mut();
1599        }
1600
1601        new_vec
1602    }
1603}
1604
1605impl<T: std::fmt::Debug, const PREALLOC: usize> std::fmt::Debug for SegmentedVec<T, PREALLOC> {
1606    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
1607        f.debug_list().entries(self.iter()).finish()
1608    }
1609}
1610
1611impl<T: PartialEq, const PREALLOC: usize> PartialEq for SegmentedVec<T, PREALLOC> {
1612    fn eq(&self, other: &Self) -> bool {
1613        if self.len != other.len {
1614            return false;
1615        }
1616        for i in 0..self.len {
1617            if unsafe { self.unchecked_at(i) } != unsafe { other.unchecked_at(i) } {
1618                return false;
1619            }
1620        }
1621        true
1622    }
1623}
1624
1625impl<T: Eq, const PREALLOC: usize> Eq for SegmentedVec<T, PREALLOC> {}
1626
1627impl<T, const PREALLOC: usize> FromIterator<T> for SegmentedVec<T, PREALLOC> {
1628    fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> Self {
1629        let iter = iter.into_iter();
1630        let (lower, _) = iter.size_hint();
1631        let mut vec = Self::new();
1632        vec.reserve(lower);
1633        for item in iter {
1634            vec.push(item);
1635        }
1636        vec
1637    }
1638}
1639
1640impl<T, const PREALLOC: usize> Extend<T> for SegmentedVec<T, PREALLOC> {
1641    fn extend<I: IntoIterator<Item = T>>(&mut self, iter: I) {
1642        let iter = iter.into_iter();
1643        let (lower, _) = iter.size_hint();
1644        self.reserve(lower);
1645        for item in iter {
1646            self.push(item);
1647        }
1648    }
1649}
1650
1651// --- Iterator implementations ---
1652
1653/// An iterator over references to elements of a `SegmentedVec`.
1654pub struct Iter<'a, T, const PREALLOC: usize> {
1655    vec: &'a SegmentedVec<T, PREALLOC>,
1656    /// Current pointer within segment
1657    ptr: *const T,
1658    /// End of current segment (min of segment capacity and vec.len)
1659    segment_end: *const T,
1660    /// Current logical index
1661    index: usize,
1662    /// Current shelf index (for dynamic segments)
1663    shelf_index: u32,
1664}
1665
1666impl<'a, T, const PREALLOC: usize> Iterator for Iter<'a, T, PREALLOC> {
1667    type Item = &'a T;
1668
1669    #[inline]
1670    fn next(&mut self) -> Option<Self::Item> {
1671        if self.ptr < self.segment_end {
1672            let result = unsafe { &*self.ptr };
1673            self.ptr = unsafe { self.ptr.add(1) };
1674            self.index += 1;
1675            return Some(result);
1676        }
1677        self.next_segment()
1678    }
1679
1680    #[inline]
1681    fn size_hint(&self) -> (usize, Option<usize>) {
1682        let remaining = self.vec.len.saturating_sub(self.index);
1683        (remaining, Some(remaining))
1684    }
1685}
1686
1687impl<'a, T, const PREALLOC: usize> Iter<'a, T, PREALLOC> {
1688    #[inline]
1689    fn next_segment(&mut self) -> Option<&'a T> {
1690        if self.index >= self.vec.len {
1691            return None;
1692        }
1693
1694        // Move to next segment
1695        if self.index < PREALLOC {
1696            // In prealloc segment
1697            let ptr = unsafe {
1698                (*self.vec.prealloc_segment.as_ptr())
1699                    .as_ptr()
1700                    .add(self.index)
1701            };
1702            let remaining = PREALLOC - self.index;
1703            let segment_len = remaining.min(self.vec.len - self.index);
1704            self.ptr = ptr;
1705            self.segment_end = unsafe { ptr.add(segment_len) };
1706        } else {
1707            // In dynamic segments
1708            let shelf_idx = self.shelf_index as usize;
1709            let shelf_size = SegmentedVec::<T, PREALLOC>::shelf_size(self.shelf_index);
1710            let ptr = self.vec.dynamic_segments[shelf_idx].as_ptr();
1711            let segment_len = shelf_size.min(self.vec.len - self.index);
1712            self.ptr = ptr;
1713            self.segment_end = unsafe { ptr.add(segment_len) };
1714            self.shelf_index += 1;
1715        }
1716
1717        let result = unsafe { &*self.ptr };
1718        self.ptr = unsafe { self.ptr.add(1) };
1719        self.index += 1;
1720        Some(result)
1721    }
1722}
1723
1724impl<'a, T, const PREALLOC: usize> ExactSizeIterator for Iter<'a, T, PREALLOC> {}
1725
1726/// An iterator over mutable references to elements of a `SegmentedVec`.
1727pub struct IterMut<'a, T, const PREALLOC: usize> {
1728    vec: &'a mut SegmentedVec<T, PREALLOC>,
1729    /// Current pointer within segment
1730    ptr: *mut T,
1731    /// End of current segment (min of segment capacity and vec.len)
1732    segment_end: *mut T,
1733    /// Current logical index
1734    index: usize,
1735    /// Current shelf index (for dynamic segments)
1736    shelf_index: u32,
1737}
1738
1739impl<'a, T, const PREALLOC: usize> Iterator for IterMut<'a, T, PREALLOC> {
1740    type Item = &'a mut T;
1741
1742    #[inline]
1743    fn next(&mut self) -> Option<Self::Item> {
1744        if self.ptr < self.segment_end {
1745            let result = self.ptr;
1746            self.ptr = unsafe { self.ptr.add(1) };
1747            self.index += 1;
1748            return Some(unsafe { &mut *result });
1749        }
1750        self.next_segment()
1751    }
1752
1753    #[inline]
1754    fn size_hint(&self) -> (usize, Option<usize>) {
1755        let remaining = self.vec.len.saturating_sub(self.index);
1756        (remaining, Some(remaining))
1757    }
1758}
1759
1760impl<'a, T, const PREALLOC: usize> IterMut<'a, T, PREALLOC> {
1761    #[inline]
1762    fn next_segment(&mut self) -> Option<&'a mut T> {
1763        if self.index >= self.vec.len {
1764            return None;
1765        }
1766
1767        // Move to next segment
1768        if self.index < PREALLOC {
1769            // In prealloc segment
1770            let ptr = unsafe {
1771                (*self.vec.prealloc_segment.as_mut_ptr())
1772                    .as_mut_ptr()
1773                    .add(self.index)
1774            };
1775            let remaining = PREALLOC - self.index;
1776            let segment_len = remaining.min(self.vec.len - self.index);
1777            self.ptr = ptr;
1778            self.segment_end = unsafe { ptr.add(segment_len) };
1779        } else {
1780            // In dynamic segments
1781            let shelf_idx = self.shelf_index as usize;
1782            let shelf_size = SegmentedVec::<T, PREALLOC>::shelf_size(self.shelf_index);
1783            // as_ptr() returns *mut T since NonNull<T> stores *mut T
1784            let ptr = self.vec.dynamic_segments[shelf_idx].as_ptr();
1785            let segment_len = shelf_size.min(self.vec.len - self.index);
1786            self.ptr = ptr;
1787            self.segment_end = unsafe { ptr.add(segment_len) };
1788            self.shelf_index += 1;
1789        }
1790
1791        let result = self.ptr;
1792        self.ptr = unsafe { self.ptr.add(1) };
1793        self.index += 1;
1794        Some(unsafe { &mut *result })
1795    }
1796}
1797
1798impl<'a, T, const PREALLOC: usize> ExactSizeIterator for IterMut<'a, T, PREALLOC> {}
1799
1800impl<T, const PREALLOC: usize> IntoIterator for SegmentedVec<T, PREALLOC> {
1801    type Item = T;
1802    type IntoIter = IntoIter<T, PREALLOC>;
1803
1804    fn into_iter(self) -> Self::IntoIter {
1805        IntoIter {
1806            vec: self,
1807            index: 0,
1808        }
1809    }
1810}
1811
1812impl<'a, T, const PREALLOC: usize> IntoIterator for &'a SegmentedVec<T, PREALLOC> {
1813    type Item = &'a T;
1814    type IntoIter = Iter<'a, T, PREALLOC>;
1815
1816    fn into_iter(self) -> Self::IntoIter {
1817        self.iter()
1818    }
1819}
1820
1821impl<'a, T, const PREALLOC: usize> IntoIterator for &'a mut SegmentedVec<T, PREALLOC> {
1822    type Item = &'a mut T;
1823    type IntoIter = IterMut<'a, T, PREALLOC>;
1824
1825    fn into_iter(self) -> Self::IntoIter {
1826        self.iter_mut()
1827    }
1828}
1829
1830/// An owning iterator over elements of a `SegmentedVec`.
1831pub struct IntoIter<T, const PREALLOC: usize> {
1832    vec: SegmentedVec<T, PREALLOC>,
1833    index: usize,
1834}
1835
1836impl<T, const PREALLOC: usize> Iterator for IntoIter<T, PREALLOC> {
1837    type Item = T;
1838
1839    fn next(&mut self) -> Option<Self::Item> {
1840        if self.index >= self.vec.len {
1841            return None;
1842        }
1843        let value = unsafe { self.vec.unchecked_read(self.index) };
1844        self.index += 1;
1845        Some(value)
1846    }
1847
1848    fn size_hint(&self) -> (usize, Option<usize>) {
1849        let remaining = self.vec.len - self.index;
1850        (remaining, Some(remaining))
1851    }
1852}
1853
1854impl<T, const PREALLOC: usize> ExactSizeIterator for IntoIter<T, PREALLOC> {}
1855
1856impl<T, const PREALLOC: usize> Drop for IntoIter<T, PREALLOC> {
1857    fn drop(&mut self) {
1858        // Drop remaining elements that weren't consumed
1859        for i in self.index..self.vec.len {
1860            unsafe {
1861                std::ptr::drop_in_place(self.vec.unchecked_at_mut(i));
1862            }
1863        }
1864        // Prevent the Vec from dropping elements again
1865        self.vec.len = 0;
1866    }
1867}
1868
1869/// A draining iterator for `SegmentedVec`.
1870///
1871/// This struct is created by the `drain` method on `SegmentedVec`.
1872pub struct Drain<'a, T, const PREALLOC: usize> {
1873    vec: &'a mut SegmentedVec<T, PREALLOC>,
1874    range_start: usize,
1875    range_end: usize,
1876    index: usize,
1877}
1878
1879impl<'a, T, const PREALLOC: usize> Iterator for Drain<'a, T, PREALLOC> {
1880    type Item = T;
1881
1882    fn next(&mut self) -> Option<Self::Item> {
1883        if self.index >= self.range_end {
1884            None
1885        } else {
1886            let value = unsafe { std::ptr::read(self.vec.unchecked_at(self.index)) };
1887            self.index += 1;
1888            Some(value)
1889        }
1890    }
1891
1892    fn size_hint(&self) -> (usize, Option<usize>) {
1893        let remaining = self.range_end - self.index;
1894        (remaining, Some(remaining))
1895    }
1896}
1897
1898impl<'a, T, const PREALLOC: usize> DoubleEndedIterator for Drain<'a, T, PREALLOC> {
1899    fn next_back(&mut self) -> Option<Self::Item> {
1900        if self.index >= self.range_end {
1901            None
1902        } else {
1903            self.range_end -= 1;
1904            Some(unsafe { std::ptr::read(self.vec.unchecked_at(self.range_end)) })
1905        }
1906    }
1907}
1908
1909impl<'a, T, const PREALLOC: usize> ExactSizeIterator for Drain<'a, T, PREALLOC> {}
1910
1911impl<'a, T, const PREALLOC: usize> Drop for Drain<'a, T, PREALLOC> {
1912    fn drop(&mut self) {
1913        // Drop any remaining elements in the range
1914        for i in self.index..self.range_end {
1915            unsafe {
1916                std::ptr::drop_in_place(self.vec.unchecked_at_mut(i));
1917            }
1918        }
1919
1920        // Shift elements after the range to fill the gap
1921        let original_range_end = self.range_end;
1922        let original_len = self.vec.len;
1923        let drain_count = original_range_end - self.range_start;
1924
1925        for i in 0..(original_len - original_range_end) {
1926            unsafe {
1927                let src = self.vec.unchecked_at(original_range_end + i) as *const T;
1928                let dst = self.vec.unchecked_at_mut(self.range_start + i) as *mut T;
1929                std::ptr::copy_nonoverlapping(src, dst, 1);
1930            }
1931        }
1932
1933        self.vec.len = original_len - drain_count;
1934    }
1935}
1936
1937#[cfg(test)]
1938mod tests {
1939    use super::*;
1940
1941    #[test]
1942    fn test_new_empty() {
1943        let vec: SegmentedVec<i32, 4> = SegmentedVec::new();
1944        assert!(vec.is_empty());
1945        assert_eq!(vec.len(), 0);
1946    }
1947
1948    #[test]
1949    fn test_push_pop() {
1950        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
1951        vec.push(1);
1952        vec.push(2);
1953        vec.push(3);
1954        assert_eq!(vec.len(), 3);
1955        assert_eq!(vec.pop(), Some(3));
1956        assert_eq!(vec.pop(), Some(2));
1957        assert_eq!(vec.pop(), Some(1));
1958        assert_eq!(vec.pop(), None);
1959    }
1960
1961    #[test]
1962    fn test_get() {
1963        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
1964        vec.push(10);
1965        vec.push(20);
1966        vec.push(30);
1967        assert_eq!(vec.get(0), Some(&10));
1968        assert_eq!(vec.get(1), Some(&20));
1969        assert_eq!(vec.get(2), Some(&30));
1970        assert_eq!(vec.get(3), None);
1971    }
1972
1973    #[test]
1974    fn test_index() {
1975        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
1976        vec.push(10);
1977        vec.push(20);
1978        assert_eq!(vec[0], 10);
1979        assert_eq!(vec[1], 20);
1980        vec[0] = 100;
1981        assert_eq!(vec[0], 100);
1982    }
1983
1984    #[test]
1985    fn test_stable_pointers() {
1986        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
1987        vec.push(1);
1988        let ptr = &vec[0] as *const i32;
1989
1990        // Push many more elements to trigger segment allocations
1991        for i in 2..1000 {
1992            vec.push(i);
1993        }
1994
1995        // The pointer should still be valid
1996        assert_eq!(unsafe { *ptr }, 1);
1997    }
1998
1999    #[test]
2000    fn test_iter() {
2001        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2002        for i in 0..100 {
2003            vec.push(i);
2004        }
2005
2006        let collected: Vec<i32> = vec.iter().copied().collect();
2007        let expected: Vec<i32> = (0..100).collect();
2008        assert_eq!(collected, expected);
2009    }
2010
2011    #[test]
2012    fn test_iter_mut() {
2013        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2014        for i in 0..10 {
2015            vec.push(i);
2016        }
2017
2018        for item in vec.iter_mut() {
2019            *item *= 2;
2020        }
2021
2022        let collected: Vec<i32> = vec.iter().copied().collect();
2023        let expected: Vec<i32> = (0..10).map(|x| x * 2).collect();
2024        assert_eq!(collected, expected);
2025    }
2026
2027    #[test]
2028    fn test_into_iter() {
2029        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2030        for i in 0..10 {
2031            vec.push(i);
2032        }
2033
2034        let collected: Vec<i32> = vec.into_iter().collect();
2035        let expected: Vec<i32> = (0..10).collect();
2036        assert_eq!(collected, expected);
2037    }
2038
2039    #[test]
2040    fn test_from_iter() {
2041        let vec: SegmentedVec<i32, 4> = (0..10).collect();
2042        assert_eq!(vec.len(), 10);
2043        for i in 0..10 {
2044            assert_eq!(vec[i], i as i32);
2045        }
2046    }
2047
2048    #[test]
2049    fn test_extend() {
2050        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2051        vec.extend(0..5);
2052        vec.extend(5..10);
2053        assert_eq!(vec.len(), 10);
2054        for i in 0..10 {
2055            assert_eq!(vec[i], i as i32);
2056        }
2057    }
2058
2059    #[test]
2060    fn test_clear() {
2061        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2062        vec.extend(0..10);
2063        vec.clear();
2064        assert!(vec.is_empty());
2065        assert_eq!(vec.len(), 0);
2066    }
2067
2068    #[test]
2069    fn test_truncate() {
2070        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2071        vec.extend(0..10);
2072        vec.truncate(5);
2073        assert_eq!(vec.len(), 5);
2074        for i in 0..5 {
2075            assert_eq!(vec[i], i as i32);
2076        }
2077    }
2078
2079    #[test]
2080    fn test_zero_prealloc() {
2081        let mut vec: SegmentedVec<i32, 0> = SegmentedVec::new();
2082        for i in 0..100 {
2083            vec.push(i);
2084        }
2085
2086        for i in 0..100 {
2087            assert_eq!(vec[i], i as i32);
2088        }
2089
2090        assert_eq!(vec.pop(), Some(99));
2091        assert_eq!(vec.len(), 99);
2092    }
2093
2094    #[test]
2095    fn test_various_prealloc_sizes() {
2096        fn test_prealloc<const N: usize>() {
2097            let mut vec: SegmentedVec<i32, N> = SegmentedVec::new();
2098            for i in 0..100 {
2099                vec.push(i);
2100            }
2101            for i in 0..100 {
2102                assert_eq!(vec[i], i as i32);
2103            }
2104        }
2105
2106        test_prealloc::<0>();
2107        test_prealloc::<1>();
2108        test_prealloc::<2>();
2109        test_prealloc::<4>();
2110        test_prealloc::<8>();
2111        test_prealloc::<16>();
2112    }
2113
2114    #[test]
2115    fn test_clone() {
2116        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2117        vec.extend(0..10);
2118        let vec2 = vec.clone();
2119        assert_eq!(vec, vec2);
2120    }
2121
2122    #[test]
2123    fn test_debug() {
2124        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2125        vec.extend(0..3);
2126        let debug_str = format!("{:?}", vec);
2127        assert_eq!(debug_str, "[0, 1, 2]");
2128    }
2129
2130    #[test]
2131    fn test_first_last() {
2132        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2133        assert_eq!(vec.first(), None);
2134        assert_eq!(vec.last(), None);
2135
2136        vec.push(1);
2137        assert_eq!(vec.first(), Some(&1));
2138        assert_eq!(vec.last(), Some(&1));
2139
2140        vec.push(2);
2141        vec.push(3);
2142        assert_eq!(vec.first(), Some(&1));
2143        assert_eq!(vec.last(), Some(&3));
2144    }
2145
2146    #[test]
2147    fn test_drop_elements() {
2148        use std::cell::Cell;
2149        use std::rc::Rc;
2150
2151        let drop_count = Rc::new(Cell::new(0));
2152
2153        struct DropCounter {
2154            counter: Rc<Cell<i32>>,
2155        }
2156
2157        impl Drop for DropCounter {
2158            fn drop(&mut self) {
2159                self.counter.set(self.counter.get() + 1);
2160            }
2161        }
2162
2163        {
2164            let mut vec: SegmentedVec<DropCounter, 4> = SegmentedVec::new();
2165            for _ in 0..10 {
2166                vec.push(DropCounter {
2167                    counter: Rc::clone(&drop_count),
2168                });
2169            }
2170        }
2171
2172        assert_eq!(drop_count.get(), 10);
2173    }
2174
2175    #[test]
2176    fn test_shrink_to_fit() {
2177        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2178        vec.extend(0..100);
2179        vec.truncate(5);
2180        vec.shrink_to_fit();
2181        assert_eq!(vec.len(), 5);
2182        for i in 0..5 {
2183            assert_eq!(vec[i], i as i32);
2184        }
2185    }
2186
2187    #[test]
2188    fn test_sort() {
2189        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2190        vec.extend([5, 2, 8, 1, 9, 3, 7, 4, 6, 0]);
2191        vec.sort();
2192        let result: Vec<i32> = vec.iter().copied().collect();
2193        assert_eq!(result, vec![0, 1, 2, 3, 4, 5, 6, 7, 8, 9]);
2194    }
2195
2196    #[test]
2197    fn test_sort_empty() {
2198        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2199        vec.sort();
2200        assert!(vec.is_empty());
2201    }
2202
2203    #[test]
2204    fn test_sort_single() {
2205        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2206        vec.push(42);
2207        vec.sort();
2208        assert_eq!(vec[0], 42);
2209    }
2210
2211    #[test]
2212    fn test_sort_already_sorted() {
2213        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2214        vec.extend(0..10);
2215        vec.sort();
2216        let result: Vec<i32> = vec.iter().copied().collect();
2217        assert_eq!(result, (0..10).collect::<Vec<_>>());
2218    }
2219
2220    #[test]
2221    fn test_sort_reverse_sorted() {
2222        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2223        vec.extend((0..10).rev());
2224        vec.sort();
2225        let result: Vec<i32> = vec.iter().copied().collect();
2226        assert_eq!(result, (0..10).collect::<Vec<_>>());
2227    }
2228
2229    #[test]
2230    fn test_sort_by() {
2231        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2232        vec.extend([5, 2, 8, 1, 9, 3, 7, 4, 6, 0]);
2233        vec.sort_by(|a, b| b.cmp(a)); // reverse order
2234        let result: Vec<i32> = vec.iter().copied().collect();
2235        assert_eq!(result, vec![9, 8, 7, 6, 5, 4, 3, 2, 1, 0]);
2236    }
2237
2238    #[test]
2239    fn test_sort_by_key() {
2240        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2241        vec.extend([-5, 2, -8, 1, -9, 3, -7, 4, -6, 0]);
2242        vec.sort_by_key(|k| k.abs());
2243        let result: Vec<i32> = vec.iter().copied().collect();
2244        assert_eq!(result, vec![0, 1, 2, 3, 4, -5, -6, -7, -8, -9]);
2245    }
2246
2247    #[test]
2248    fn test_sort_stable() {
2249        // Test that sort is stable (equal elements maintain their relative order)
2250        #[derive(Debug, Clone, Eq, PartialEq)]
2251        struct Item {
2252            key: i32,
2253            order: usize,
2254        }
2255
2256        let mut vec: SegmentedVec<Item, 4> = SegmentedVec::new();
2257        vec.push(Item { key: 1, order: 0 });
2258        vec.push(Item { key: 2, order: 1 });
2259        vec.push(Item { key: 1, order: 2 });
2260        vec.push(Item { key: 2, order: 3 });
2261        vec.push(Item { key: 1, order: 4 });
2262
2263        vec.sort_by_key(|item| item.key);
2264
2265        // All items with key=1 should come first, in original order
2266        assert_eq!(vec[0], Item { key: 1, order: 0 });
2267        assert_eq!(vec[1], Item { key: 1, order: 2 });
2268        assert_eq!(vec[2], Item { key: 1, order: 4 });
2269        // Then items with key=2, in original order
2270        assert_eq!(vec[3], Item { key: 2, order: 1 });
2271        assert_eq!(vec[4], Item { key: 2, order: 3 });
2272    }
2273
2274    #[test]
2275    fn test_sort_unstable() {
2276        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2277        vec.extend([5, 2, 8, 1, 9, 3, 7, 4, 6, 0]);
2278        vec.sort_unstable();
2279        let result: Vec<i32> = vec.iter().copied().collect();
2280        assert_eq!(result, vec![0, 1, 2, 3, 4, 5, 6, 7, 8, 9]);
2281    }
2282
2283    #[test]
2284    fn test_sort_unstable_by() {
2285        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2286        vec.extend([5, 2, 8, 1, 9, 3, 7, 4, 6, 0]);
2287        vec.sort_unstable_by(|a, b| b.cmp(a)); // reverse order
2288        let result: Vec<i32> = vec.iter().copied().collect();
2289        assert_eq!(result, vec![9, 8, 7, 6, 5, 4, 3, 2, 1, 0]);
2290    }
2291
2292    #[test]
2293    fn test_sort_unstable_by_key() {
2294        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2295        vec.extend([-5, 2, -8, 1, -9, 3, -7, 4, -6, 0]);
2296        vec.sort_unstable_by_key(|k| k.abs());
2297        // Just verify it's sorted by absolute value (unstable may reorder equal elements)
2298        let result: Vec<i32> = vec.iter().copied().collect();
2299        for i in 1..result.len() {
2300            assert!(result[i - 1].abs() <= result[i].abs());
2301        }
2302    }
2303
2304    #[test]
2305    fn test_sort_large() {
2306        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2307        // Add numbers in reverse order
2308        vec.extend((0..1000).rev());
2309        vec.sort();
2310        for i in 0..1000 {
2311            assert_eq!(vec[i], i as i32);
2312        }
2313    }
2314
2315    #[test]
2316    fn test_sort_unstable_large() {
2317        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2318        // Add numbers in reverse order
2319        vec.extend((0..1000).rev());
2320        vec.sort_unstable();
2321        for i in 0..1000 {
2322            assert_eq!(vec[i], i as i32);
2323        }
2324    }
2325
2326    #[test]
2327    fn test_sort_with_zero_prealloc() {
2328        let mut vec: SegmentedVec<i32, 0> = SegmentedVec::new();
2329        vec.extend([5, 2, 8, 1, 9, 3, 7, 4, 6, 0]);
2330        vec.sort();
2331        let result: Vec<i32> = vec.iter().copied().collect();
2332        assert_eq!(result, vec![0, 1, 2, 3, 4, 5, 6, 7, 8, 9]);
2333    }
2334
2335    #[test]
2336    fn test_sort_pointers_remain_stable_after_sort() {
2337        // Verify that after sorting, pointers to elements are still valid
2338        // (they point to different values, but the memory locations are stable)
2339        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2340        vec.extend([5, 2, 8, 1, 9]);
2341
2342        // Get pointer to first element before sort
2343        let ptr = &vec[0] as *const i32;
2344
2345        vec.sort();
2346
2347        // After sort, the pointer still points to valid memory (now contains sorted value)
2348        assert_eq!(unsafe { *ptr }, 1); // First element after sort is 1
2349    }
2350
2351    // --- Slice tests ---
2352
2353    #[test]
2354    fn test_as_slice() {
2355        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2356        vec.extend(0..10);
2357
2358        let slice = vec.as_slice();
2359        assert_eq!(slice.len(), 10);
2360        assert_eq!(slice[0], 0);
2361        assert_eq!(slice[9], 9);
2362    }
2363
2364    #[test]
2365    fn test_as_mut_slice() {
2366        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2367        vec.extend(0..10);
2368
2369        {
2370            let mut slice = vec.as_mut_slice();
2371            slice[0] = 100;
2372            slice[9] = 200;
2373        }
2374
2375        assert_eq!(vec[0], 100);
2376        assert_eq!(vec[9], 200);
2377    }
2378
2379    #[test]
2380    fn test_slice_range() {
2381        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2382        vec.extend(0..10);
2383
2384        let slice = vec.slice(2..5);
2385        assert_eq!(slice.len(), 3);
2386        assert_eq!(slice[0], 2);
2387        assert_eq!(slice[1], 3);
2388        assert_eq!(slice[2], 4);
2389    }
2390
2391    #[test]
2392    fn test_slice_mut_range() {
2393        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2394        vec.extend(0..10);
2395
2396        {
2397            let mut slice = vec.slice_mut(2..5);
2398            slice[0] = 100;
2399            slice[2] = 200;
2400        }
2401
2402        assert_eq!(vec[2], 100);
2403        assert_eq!(vec[4], 200);
2404    }
2405
2406    #[test]
2407    fn test_slice_first_last() {
2408        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2409        vec.extend(0..10);
2410
2411        let slice = vec.as_slice();
2412        assert_eq!(slice.first(), Some(&0));
2413        assert_eq!(slice.last(), Some(&9));
2414    }
2415
2416    #[test]
2417    fn test_slice_iter() {
2418        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2419        vec.extend(0..10);
2420
2421        let slice = vec.as_slice();
2422        let collected: Vec<i32> = slice.iter().copied().collect();
2423        assert_eq!(collected, (0..10).collect::<Vec<_>>());
2424    }
2425
2426    #[test]
2427    fn test_slice_iter_rev() {
2428        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2429        vec.extend(0..10);
2430
2431        let slice = vec.as_slice();
2432        let collected: Vec<i32> = slice.iter().rev().copied().collect();
2433        assert_eq!(collected, (0..10).rev().collect::<Vec<_>>());
2434    }
2435
2436    #[test]
2437    fn test_slice_contains() {
2438        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2439        vec.extend(0..10);
2440
2441        let slice = vec.as_slice();
2442        assert!(slice.contains(&5));
2443        assert!(!slice.contains(&100));
2444    }
2445
2446    #[test]
2447    fn test_slice_binary_search() {
2448        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2449        vec.extend(0..100);
2450
2451        let slice = vec.as_slice();
2452        assert_eq!(slice.binary_search(&50), Ok(50));
2453        assert_eq!(slice.binary_search(&0), Ok(0));
2454        assert_eq!(slice.binary_search(&99), Ok(99));
2455        assert_eq!(slice.binary_search(&100), Err(100));
2456    }
2457
2458    #[test]
2459    fn test_slice_split_at() {
2460        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2461        vec.extend(0..10);
2462
2463        let slice = vec.as_slice();
2464        let (left, right) = slice.split_at(5);
2465        assert_eq!(left.len(), 5);
2466        assert_eq!(right.len(), 5);
2467        assert_eq!(left[0], 0);
2468        assert_eq!(right[0], 5);
2469    }
2470
2471    #[test]
2472    fn test_slice_to_vec() {
2473        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2474        vec.extend(0..10);
2475
2476        let slice = vec.as_slice();
2477        let converted = slice.to_vec();
2478        assert_eq!(converted, (0..10).collect::<Vec<_>>());
2479    }
2480
2481    #[test]
2482    fn test_slice_mut_sort() {
2483        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2484        vec.extend([5, 3, 1, 4, 2, 8, 7, 6, 0, 9]);
2485
2486        // Sort only the middle part
2487        {
2488            let mut slice = vec.slice_mut(2..8);
2489            slice.sort();
2490        }
2491
2492        let result: Vec<i32> = vec.iter().copied().collect();
2493        assert_eq!(result, vec![5, 3, 1, 2, 4, 6, 7, 8, 0, 9]);
2494    }
2495
2496    #[test]
2497    fn test_slice_mut_reverse() {
2498        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2499        vec.extend(0..10);
2500
2501        {
2502            let mut slice = vec.slice_mut(2..8);
2503            slice.reverse();
2504        }
2505
2506        let result: Vec<i32> = vec.iter().copied().collect();
2507        assert_eq!(result, vec![0, 1, 7, 6, 5, 4, 3, 2, 8, 9]);
2508    }
2509
2510    #[test]
2511    fn test_slice_mut_fill() {
2512        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2513        vec.extend(0..10);
2514
2515        {
2516            let mut slice = vec.slice_mut(2..5);
2517            slice.fill(99);
2518        }
2519
2520        let result: Vec<i32> = vec.iter().copied().collect();
2521        assert_eq!(result, vec![0, 1, 99, 99, 99, 5, 6, 7, 8, 9]);
2522    }
2523
2524    #[test]
2525    fn test_slice_starts_with() {
2526        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2527        vec.extend(0..10);
2528
2529        let slice = vec.as_slice();
2530        assert!(slice.starts_with(&[0, 1, 2]));
2531        assert!(!slice.starts_with(&[1, 2, 3]));
2532    }
2533
2534    #[test]
2535    fn test_slice_ends_with() {
2536        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2537        vec.extend(0..10);
2538
2539        let slice = vec.as_slice();
2540        assert!(slice.ends_with(&[7, 8, 9]));
2541        assert!(!slice.ends_with(&[6, 7, 8]));
2542    }
2543
2544    #[test]
2545    fn test_slice_eq() {
2546        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2547        vec.extend(0..10);
2548
2549        let slice = vec.as_slice();
2550        assert_eq!(slice, (0..10).collect::<Vec<_>>());
2551    }
2552
2553    #[test]
2554    fn test_min_non_zero_cap() {
2555        // For u8 (1 byte), first segment should be 8 elements
2556        let mut vec_u8: SegmentedVec<u8, 0> = SegmentedVec::new();
2557        vec_u8.push(1);
2558        assert_eq!(vec_u8.capacity(), 8);
2559
2560        // For i32 (4 bytes, <= 1024), first segment should be 4 elements
2561        let mut vec_i32: SegmentedVec<i32, 0> = SegmentedVec::new();
2562        vec_i32.push(1);
2563        assert_eq!(vec_i32.capacity(), 4);
2564
2565        // Verify indexing still works correctly with the larger first segment
2566        for i in 0u8..100 {
2567            vec_u8.push(i);
2568        }
2569        for i in 0..101 {
2570            assert_eq!(vec_u8[i], if i == 0 { 1 } else { (i - 1) as u8 });
2571        }
2572    }
2573
2574    #[test]
2575    fn test_extend_from_copy_slice() {
2576        // Test with PREALLOC=0
2577        let mut vec: SegmentedVec<i32, 0> = SegmentedVec::new();
2578        let data: Vec<i32> = (0..100).collect();
2579        vec.extend_from_copy_slice(&data);
2580        assert_eq!(vec.len(), 100);
2581        for i in 0..100 {
2582            assert_eq!(vec[i], i as i32);
2583        }
2584
2585        // Test with PREALLOC=4, starting empty
2586        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2587        vec.extend_from_copy_slice(&data);
2588        assert_eq!(vec.len(), 100);
2589        for i in 0..100 {
2590            assert_eq!(vec[i], i as i32);
2591        }
2592
2593        // Test with PREALLOC=4, starting with some elements
2594        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2595        vec.push(999);
2596        vec.push(998);
2597        vec.extend_from_copy_slice(&data[..10]);
2598        assert_eq!(vec.len(), 12);
2599        assert_eq!(vec[0], 999);
2600        assert_eq!(vec[1], 998);
2601        for i in 0..10 {
2602            assert_eq!(vec[i + 2], i as i32);
2603        }
2604
2605        // Test empty slice
2606        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2607        vec.extend_from_copy_slice(&[]);
2608        assert!(vec.is_empty());
2609
2610        // Test extend to non-boundary, then push (regression test)
2611        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2612        vec.extend_from_copy_slice(&[1, 2, 3]); // 3 elements, not at boundary
2613        assert_eq!(vec.len(), 3);
2614        vec.push(4); // Should use fast path, not crash in push_slow
2615        vec.push(5);
2616        vec.push(6);
2617        assert_eq!(vec.len(), 6);
2618        for i in 0..6 {
2619            assert_eq!(vec[i], (i + 1) as i32);
2620        }
2621
2622        // Test extend to exact capacity boundary, then push
2623        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2624        vec.extend_from_copy_slice(&[1, 2, 3, 4]); // Fills prealloc exactly
2625        assert_eq!(vec.len(), 4);
2626        vec.push(5); // Should correctly allocate new segment
2627        assert_eq!(vec.len(), 5);
2628        assert_eq!(vec[4], 5);
2629    }
2630}