segmented_vec/
lib.rs

1//! A segmented growable vector with stable pointers.
2//!
3//! Unlike `Vec`, pushing new elements never invalidates pointers to existing elements.
4//! This is achieved by storing elements in segments of exponentially growing sizes.
5//!
6//! # Example
7//!
8//! ```
9//! use segmented_vec::SegmentedVec;
10//!
11//! let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
12//! vec.push(1);
13//! vec.push(2);
14//!
15//! // Get a pointer to the first element
16//! let ptr = &vec[0] as *const i32;
17//!
18//! // Push more elements - the pointer remains valid!
19//! for i in 3..100 {
20//!     vec.push(i);
21//! }
22//!
23//! // The pointer is still valid
24//! assert_eq!(unsafe { *ptr }, 1);
25//! ```
26
27mod slice;
28mod sort;
29
30pub use slice::{
31    Chunks, ChunksExact, RChunks, SegmentedSlice, SegmentedSliceMut, SliceIter, SliceIterMut,
32    Windows,
33};
34
35use std::alloc::{self, Layout};
36use std::cmp::Ordering;
37use std::marker::PhantomData;
38use std::mem::MaybeUninit;
39use std::ops::{Index, IndexMut};
40use std::ptr::NonNull;
41
42/// A segmented vector with stable pointers.
43///
44/// `PREALLOC` specifies the number of elements to store inline (must be 0 or a power of 2).
45/// Elements beyond the preallocation are stored in dynamically allocated segments.
46pub struct SegmentedVec<T, const PREALLOC: usize = 0> {
47    prealloc_segment: MaybeUninit<[T; PREALLOC]>,
48    dynamic_segments: Vec<NonNull<T>>,
49    len: usize,
50    /// Cached pointer to the next write position (for fast push)
51    write_ptr: *mut T,
52    /// Number of elements remaining in the current segment
53    segment_end: *mut T,
54    _marker: PhantomData<T>,
55}
56
57// Safety: SegmentedVec is Send if T is Send
58unsafe impl<T: Send, const PREALLOC: usize> Send for SegmentedVec<T, PREALLOC> {}
59// Safety: SegmentedVec is Sync if T is Sync
60unsafe impl<T: Sync, const PREALLOC: usize> Sync for SegmentedVec<T, PREALLOC> {}
61
62impl<T, const PREALLOC: usize> SegmentedVec<T, PREALLOC> {
63    const PREALLOC_EXP: u32 = if PREALLOC == 0 {
64        0
65    } else {
66        assert!(
67            PREALLOC.is_power_of_two(),
68            "PREALLOC must be 0 or a power of 2"
69        );
70        PREALLOC.trailing_zeros()
71    };
72
73    /// Minimum capacity for the first dynamic segment when PREALLOC=0.
74    /// Avoids tiny allocations that heap allocators round up anyway.
75    /// - 8 for 1-byte elements (allocators round up small requests)
76    /// - 4 for moderate elements (<= 1 KiB)
77    /// - 1 for large elements (avoid wasting space)
78    const MIN_NON_ZERO_CAP: usize = {
79        let size = std::mem::size_of::<T>();
80        if size == 1 {
81            8
82        } else if size <= 1024 {
83            4
84        } else {
85            1
86        }
87    };
88
89    const MIN_CAP_EXP: u32 = Self::MIN_NON_ZERO_CAP.trailing_zeros();
90
91    const BIAS: usize = if PREALLOC > 0 {
92        PREALLOC
93    } else {
94        Self::MIN_NON_ZERO_CAP
95    };
96
97    /// Offset to subtract from MSB to get shelf index.
98    const SHELF_OFFSET: u32 = if PREALLOC == 0 {
99        Self::MIN_CAP_EXP
100    } else {
101        Self::PREALLOC_EXP + 1
102    };
103
104    /// Creates a new empty `SegmentedVec`.
105    ///
106    /// # Example
107    ///
108    /// ```
109    /// use segmented_vec::SegmentedVec;
110    /// let vec: SegmentedVec<i32, 4> = SegmentedVec::new();
111    /// assert!(vec.is_empty());
112    /// ```
113    #[inline]
114    pub const fn new() -> Self {
115        Self {
116            prealloc_segment: MaybeUninit::uninit(),
117            dynamic_segments: Vec::new(),
118            len: 0,
119            write_ptr: std::ptr::null_mut(),
120            segment_end: std::ptr::null_mut(),
121            _marker: PhantomData,
122        }
123    }
124
125    /// Initialize or update the write pointer cache.
126    #[inline]
127    fn init_write_ptr(&mut self) {
128        if PREALLOC > 0 && self.len < PREALLOC {
129            let base = unsafe { (*self.prealloc_segment.as_mut_ptr()).as_mut_ptr() };
130            self.write_ptr = unsafe { base.add(self.len) };
131            self.segment_end = unsafe { base.add(PREALLOC) };
132        } else if !self.dynamic_segments.is_empty() {
133            let (shelf, box_idx) = Self::location(self.len);
134            let shelf_size = Self::shelf_size(shelf as u32);
135            let base = unsafe { self.dynamic_segments.get_unchecked(shelf).as_ptr() };
136            self.write_ptr = unsafe { base.add(box_idx) };
137            self.segment_end = unsafe { base.add(shelf_size) };
138        } else if PREALLOC > 0 {
139            let base = unsafe { (*self.prealloc_segment.as_mut_ptr()).as_mut_ptr() };
140            self.write_ptr = base;
141            self.segment_end = unsafe { base.add(PREALLOC) };
142        } else {
143            self.write_ptr = std::ptr::null_mut();
144            self.segment_end = std::ptr::null_mut();
145        }
146    }
147
148    /// Returns the number of elements in the vector.
149    #[inline]
150    pub const fn len(&self) -> usize {
151        self.len
152    }
153
154    /// Returns `true` if the vector contains no elements.
155    #[inline]
156    pub const fn is_empty(&self) -> bool {
157        self.len == 0
158    }
159
160    /// Returns the current capacity of the vector.
161    #[inline]
162    pub fn capacity(&self) -> usize {
163        Self::compute_capacity(self.dynamic_segments.len() as u32)
164    }
165
166    /// Appends an element to the back of the vector.
167    ///
168    /// # Panics
169    ///
170    /// Panics if allocation fails.
171    ///
172    /// # Example
173    ///
174    /// ```
175    /// use segmented_vec::SegmentedVec;
176    /// let mut vec: SegmentedVec<i32> = SegmentedVec::new();
177    /// vec.push(1);
178    /// vec.push(2);
179    /// assert_eq!(vec.len(), 2);
180    /// ```
181    #[inline]
182    pub fn push(&mut self, value: T) {
183        // Fast path: we have space in the current segment
184        if self.write_ptr < self.segment_end {
185            unsafe {
186                std::ptr::write(self.write_ptr, value);
187                self.write_ptr = self.write_ptr.add(1);
188            }
189            self.len += 1;
190            return;
191        }
192
193        // Slow path: need to grow or move to next segment
194        self.push_slow(value);
195    }
196
197    #[cold]
198    #[inline(never)]
199    fn push_slow(&mut self, value: T) {
200        let new_len = self.len + 1;
201
202        // Special case: first push into prealloc segment
203        if PREALLOC > 0 && self.len == 0 {
204            unsafe {
205                let base = (*self.prealloc_segment.as_mut_ptr()).as_mut_ptr();
206                std::ptr::write(base, value);
207                self.write_ptr = base.add(1);
208                self.segment_end = base.add(PREALLOC);
209            }
210            self.len = 1;
211            return;
212        }
213
214        // We're at a segment boundary (box_index = 0), so biased = len + BIAS is a power of 2
215        // Calculate shelf directly without full location()
216        let biased = self.len + Self::BIAS;
217        let shelf = (biased.trailing_zeros() - Self::SHELF_OFFSET) as usize;
218        let shelf_size = Self::shelf_size(shelf as u32);
219
220        self.grow_capacity(new_len);
221
222        unsafe {
223            let base = self.dynamic_segments.get_unchecked(shelf).as_ptr();
224            std::ptr::write(base, value);
225            self.write_ptr = base.add(1);
226            self.segment_end = base.add(shelf_size);
227        }
228        self.len = new_len;
229    }
230
231    /// Removes the last element from the vector and returns it, or `None` if empty.
232    ///
233    /// # Example
234    ///
235    /// ```
236    /// use segmented_vec::SegmentedVec;
237    /// let mut vec: SegmentedVec<i32> = SegmentedVec::new();
238    /// vec.push(1);
239    /// vec.push(2);
240    /// assert_eq!(vec.pop(), Some(2));
241    /// assert_eq!(vec.pop(), Some(1));
242    /// assert_eq!(vec.pop(), None);
243    /// ```
244    #[inline]
245    pub fn pop(&mut self) -> Option<T> {
246        if self.len == 0 {
247            return None;
248        }
249        self.len -= 1;
250
251        // Check if we're at a segment boundary (biased len is a power of 2)
252        let biased = self.len + Self::BIAS;
253        if biased & (biased - 1) == 0 {
254            // Crossed a segment boundary, recalculate
255            self.init_write_ptr();
256            Some(unsafe { self.unchecked_read(self.len) })
257        } else {
258            // Same segment, just decrement write_ptr
259            unsafe {
260                self.write_ptr = self.write_ptr.sub(1);
261            }
262            Some(unsafe { std::ptr::read(self.write_ptr) })
263        }
264    }
265
266    /// Returns a reference to the element at the given index.
267    ///
268    /// Returns `None` if the index is out of bounds.
269    #[inline]
270    pub fn get(&self, index: usize) -> Option<&T> {
271        if index < self.len {
272            Some(unsafe { self.unchecked_at(index) })
273        } else {
274            None
275        }
276    }
277
278    /// Returns a mutable reference to the element at the given index.
279    ///
280    /// Returns `None` if the index is out of bounds.
281    #[inline]
282    pub fn get_mut(&mut self, index: usize) -> Option<&mut T> {
283        if index < self.len {
284            Some(unsafe { self.unchecked_at_mut(index) })
285        } else {
286            None
287        }
288    }
289
290    /// Returns a reference to the first element, or `None` if empty.
291    #[inline]
292    pub fn first(&self) -> Option<&T> {
293        self.get(0)
294    }
295
296    /// Returns a mutable reference to the first element, or `None` if empty.
297    #[inline]
298    pub fn first_mut(&mut self) -> Option<&mut T> {
299        self.get_mut(0)
300    }
301
302    /// Returns a reference to the last element, or `None` if empty.
303    #[inline]
304    pub fn last(&self) -> Option<&T> {
305        if self.len == 0 {
306            None
307        } else {
308            self.get(self.len - 1)
309        }
310    }
311
312    /// Returns a mutable reference to the last element, or `None` if empty.
313    #[inline]
314    pub fn last_mut(&mut self) -> Option<&mut T> {
315        if self.len == 0 {
316            None
317        } else {
318            self.get_mut(self.len - 1)
319        }
320    }
321
322    /// Returns `true` if the slice contains an element with the given value.
323    #[inline]
324    pub fn contains(&self, x: &T) -> bool
325    where
326        T: PartialEq,
327    {
328        self.as_slice().contains(x)
329    }
330
331    /// Returns `true` if `needle` is a prefix of the vector.
332    pub fn starts_with(&self, needle: &[T]) -> bool
333    where
334        T: PartialEq,
335    {
336        self.as_slice().starts_with(needle)
337    }
338
339    /// Returns `true` if `needle` is a suffix of the vector.
340    pub fn ends_with(&self, needle: &[T]) -> bool
341    where
342        T: PartialEq,
343    {
344        self.as_slice().ends_with(needle)
345    }
346
347    /// Binary searches this vector for a given element.
348    ///
349    /// If the value is found, returns `Ok(index)`. If not found, returns
350    /// `Err(index)` where `index` is the position where the element could be inserted.
351    pub fn binary_search(&self, x: &T) -> Result<usize, usize>
352    where
353        T: Ord,
354    {
355        self.as_slice().binary_search(x)
356    }
357
358    /// Binary searches this vector with a comparator function.
359    pub fn binary_search_by<F>(&self, f: F) -> Result<usize, usize>
360    where
361        F: FnMut(&T) -> Ordering,
362    {
363        self.as_slice().binary_search_by(f)
364    }
365
366    /// Binary searches this vector with a key extraction function.
367    pub fn binary_search_by_key<B, F>(&self, b: &B, f: F) -> Result<usize, usize>
368    where
369        F: FnMut(&T) -> B,
370        B: Ord,
371    {
372        self.as_slice().binary_search_by_key(b, f)
373    }
374
375    /// Swaps two elements in the vector.
376    ///
377    /// # Panics
378    ///
379    /// Panics if `a` or `b` are out of bounds.
380    #[inline]
381    pub fn swap(&mut self, a: usize, b: usize) {
382        self.as_mut_slice().swap(a, b)
383    }
384
385    /// Reverses the order of elements in the vector.
386    pub fn reverse(&mut self) {
387        self.as_mut_slice().reverse()
388    }
389
390    /// Fills the vector with the given value.
391    pub fn fill(&mut self, value: T)
392    where
393        T: Clone,
394    {
395        self.as_mut_slice().fill(value)
396    }
397
398    /// Fills the vector with values produced by a function.
399    pub fn fill_with<F>(&mut self, f: F)
400    where
401        F: FnMut() -> T,
402    {
403        self.as_mut_slice().fill_with(f)
404    }
405
406    /// Clears the vector, removing all elements.
407    ///
408    /// This does not deallocate the dynamic segments.
409    pub fn clear(&mut self) {
410        if self.len == 0 {
411            return;
412        }
413
414        if std::mem::needs_drop::<T>() {
415            // Drop elements in prealloc segment
416            if PREALLOC > 0 {
417                let count = self.len.min(PREALLOC);
418                let ptr = unsafe { (*self.prealloc_segment.as_mut_ptr()).as_mut_ptr() };
419                unsafe { std::ptr::drop_in_place(std::slice::from_raw_parts_mut(ptr, count)) };
420            }
421
422            // Drop elements in dynamic segments
423            if self.len > PREALLOC {
424                let mut remaining = self.len - PREALLOC;
425                for shelf in 0..self.dynamic_segments.len() {
426                    let size = Self::shelf_size(shelf as u32);
427                    let count = remaining.min(size);
428                    let ptr = unsafe { self.dynamic_segments.get_unchecked(shelf).as_ptr() };
429                    unsafe { std::ptr::drop_in_place(std::slice::from_raw_parts_mut(ptr, count)) };
430                    remaining -= count;
431                    if remaining == 0 {
432                        break;
433                    }
434                }
435            }
436        }
437
438        self.len = 0;
439        // Reset write pointer cache
440        if PREALLOC > 0 {
441            let base = unsafe { (*self.prealloc_segment.as_mut_ptr()).as_mut_ptr() };
442            self.write_ptr = base;
443            self.segment_end = unsafe { base.add(PREALLOC) };
444        } else {
445            self.write_ptr = std::ptr::null_mut();
446            self.segment_end = std::ptr::null_mut();
447        }
448    }
449
450    /// Shortens the vector, keeping the first `new_len` elements.
451    ///
452    /// If `new_len` is greater than or equal to the current length, this has no effect.
453    pub fn truncate(&mut self, new_len: usize) {
454        if new_len >= self.len {
455            return;
456        }
457
458        if std::mem::needs_drop::<T>() {
459            // Drop elements in prealloc segment (from new_len to min(self.len, PREALLOC))
460            if PREALLOC > 0 && new_len < PREALLOC {
461                let start = new_len;
462                let end = self.len.min(PREALLOC);
463                if start < end {
464                    let ptr = unsafe {
465                        (*self.prealloc_segment.as_mut_ptr())
466                            .as_mut_ptr()
467                            .add(start)
468                    };
469                    unsafe {
470                        std::ptr::drop_in_place(std::slice::from_raw_parts_mut(ptr, end - start))
471                    };
472                }
473            }
474
475            // Drop elements in dynamic segments
476            if self.len > PREALLOC {
477                let dynamic_new_len = new_len.saturating_sub(PREALLOC);
478                let dynamic_old_len = self.len - PREALLOC;
479
480                if dynamic_new_len < dynamic_old_len {
481                    let mut pos = 0usize;
482                    for shelf in 0..self.dynamic_segments.len() {
483                        let size = Self::shelf_size(shelf as u32);
484                        let seg_end = pos + size;
485
486                        // Calculate overlap with [dynamic_new_len, dynamic_old_len)
487                        let drop_start = dynamic_new_len.max(pos);
488                        let drop_end = dynamic_old_len.min(seg_end);
489
490                        if drop_start < drop_end {
491                            let offset = drop_start - pos;
492                            let count = drop_end - drop_start;
493                            let ptr = unsafe {
494                                self.dynamic_segments
495                                    .get_unchecked(shelf)
496                                    .as_ptr()
497                                    .add(offset)
498                            };
499                            unsafe {
500                                std::ptr::drop_in_place(std::slice::from_raw_parts_mut(ptr, count))
501                            };
502                        }
503
504                        pos = seg_end;
505                        if pos >= dynamic_old_len {
506                            break;
507                        }
508                    }
509                }
510            }
511        }
512
513        self.len = new_len;
514        // Update write pointer cache
515        if new_len > 0 {
516            self.init_write_ptr();
517        } else if PREALLOC > 0 {
518            let base = unsafe { (*self.prealloc_segment.as_mut_ptr()).as_mut_ptr() };
519            self.write_ptr = base;
520            self.segment_end = unsafe { base.add(PREALLOC) };
521        } else {
522            self.write_ptr = std::ptr::null_mut();
523            self.segment_end = std::ptr::null_mut();
524        }
525    }
526
527    /// Reserves capacity for at least `additional` more elements.
528    pub fn reserve(&mut self, additional: usize) {
529        let old_capacity = self.capacity();
530        self.grow_capacity(self.len + additional);
531        // Initialize write pointer if we didn't have capacity before
532        if old_capacity == 0 && self.capacity() > 0 && self.segment_end.is_null() {
533            self.init_write_ptr();
534        }
535    }
536
537    /// Shrinks the capacity to match the current length.
538    ///
539    /// This deallocates unused dynamic segments.
540    pub fn shrink_to_fit(&mut self) {
541        self.shrink_capacity(self.len);
542    }
543
544    /// Returns an iterator over references to the elements.
545    #[inline]
546    pub fn iter(&self) -> Iter<'_, T, PREALLOC> {
547        // Initialize with null pointers - first next() call will set up the segment
548        Iter {
549            vec: self,
550            ptr: std::ptr::null(),
551            segment_end: std::ptr::null(),
552            index: 0,
553            shelf_index: 0,
554        }
555    }
556
557    /// Returns an iterator over mutable references to the elements.
558    #[inline]
559    pub fn iter_mut(&mut self) -> IterMut<'_, T, PREALLOC> {
560        // Initialize with null pointers - first next() call will set up the segment
561        IterMut {
562            vec: self,
563            ptr: std::ptr::null_mut(),
564            segment_end: std::ptr::null_mut(),
565            index: 0,
566            shelf_index: 0,
567        }
568    }
569
570    /// Returns an immutable slice view of the entire vector.
571    ///
572    /// # Example
573    ///
574    /// ```
575    /// use segmented_vec::SegmentedVec;
576    ///
577    /// let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
578    /// vec.extend(0..10);
579    ///
580    /// let slice = vec.as_slice();
581    /// assert_eq!(slice.len(), 10);
582    /// assert_eq!(slice[0], 0);
583    /// ```
584    #[inline]
585    pub fn as_slice(&self) -> SegmentedSlice<'_, T, PREALLOC> {
586        SegmentedSlice::new(self)
587    }
588
589    /// Returns a mutable slice view of the entire vector.
590    ///
591    /// # Example
592    ///
593    /// ```
594    /// use segmented_vec::SegmentedVec;
595    ///
596    /// let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
597    /// vec.extend(0..10);
598    ///
599    /// let mut slice = vec.as_mut_slice();
600    /// slice[0] = 100;
601    /// assert_eq!(vec[0], 100);
602    /// ```
603    #[inline]
604    pub fn as_mut_slice(&mut self) -> SegmentedSliceMut<'_, T, PREALLOC> {
605        SegmentedSliceMut::new(self)
606    }
607
608    /// Returns an immutable slice view of a range of the vector.
609    ///
610    /// # Panics
611    ///
612    /// Panics if the range is out of bounds.
613    ///
614    /// # Example
615    ///
616    /// ```
617    /// use segmented_vec::SegmentedVec;
618    ///
619    /// let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
620    /// vec.extend(0..10);
621    ///
622    /// let slice = vec.slice(2..5);
623    /// assert_eq!(slice.len(), 3);
624    /// assert_eq!(slice[0], 2);
625    /// ```
626    #[inline]
627    pub fn slice(&self, range: std::ops::Range<usize>) -> SegmentedSlice<'_, T, PREALLOC> {
628        assert!(range.start <= range.end && range.end <= self.len);
629        SegmentedSlice::from_range(self, range.start, range.end)
630    }
631
632    /// Returns a mutable slice view of a range of the vector.
633    ///
634    /// # Panics
635    ///
636    /// Panics if the range is out of bounds.
637    ///
638    /// # Example
639    ///
640    /// ```
641    /// use segmented_vec::SegmentedVec;
642    ///
643    /// let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
644    /// vec.extend(0..10);
645    ///
646    /// let mut slice = vec.slice_mut(2..5);
647    /// slice[0] = 100;
648    /// assert_eq!(vec[2], 100);
649    /// ```
650    #[inline]
651    pub fn slice_mut(
652        &mut self,
653        range: std::ops::Range<usize>,
654    ) -> SegmentedSliceMut<'_, T, PREALLOC> {
655        assert!(range.start <= range.end && range.end <= self.len);
656        SegmentedSliceMut::from_range(self, range.start, range.end)
657    }
658
659    /// Extends the vector by cloning elements from a slice.
660    pub fn extend_from_slice(&mut self, other: &[T])
661    where
662        T: Clone,
663    {
664        if other.is_empty() {
665            return;
666        }
667        self.reserve(other.len());
668
669        let mut src = other;
670
671        // Fill prealloc segment if there's room
672        if PREALLOC > 0 && self.len < PREALLOC {
673            let prealloc_remaining = PREALLOC - self.len;
674            let to_copy = src.len().min(prealloc_remaining);
675            let dest = unsafe {
676                (*self.prealloc_segment.as_mut_ptr())
677                    .as_mut_ptr()
678                    .add(self.len)
679            };
680            for i in 0..to_copy {
681                unsafe { std::ptr::write(dest.add(i), src[i].clone()) };
682            }
683            self.len += to_copy;
684            src = &src[to_copy..];
685        }
686
687        // Fill dynamic segments
688        while !src.is_empty() {
689            let (shelf, box_idx, remaining) = Self::location_with_capacity(self.len);
690            let to_copy = src.len().min(remaining);
691            let dest = unsafe {
692                self.dynamic_segments
693                    .get_unchecked(shelf)
694                    .as_ptr()
695                    .add(box_idx)
696            };
697            for i in 0..to_copy {
698                unsafe { std::ptr::write(dest.add(i), src[i].clone()) };
699            }
700            self.len += to_copy;
701            src = &src[to_copy..];
702        }
703
704        // Set up write pointer cache for next push
705        if self.len < self.capacity() {
706            self.init_write_ptr();
707        } else {
708            self.write_ptr = std::ptr::null_mut();
709            self.segment_end = std::ptr::null_mut();
710        }
711    }
712
713    /// Extends the vector by copying elements from a slice (for `Copy` types).
714    ///
715    /// This is more efficient than `extend_from_slice` for `Copy` types
716    /// as it uses bulk memory copy operations.
717    pub fn extend_from_copy_slice(&mut self, other: &[T])
718    where
719        T: Copy,
720    {
721        if other.is_empty() {
722            return;
723        }
724        self.reserve(other.len());
725
726        let mut src = other;
727
728        // Fill prealloc segment if there's room
729        if PREALLOC > 0 && self.len < PREALLOC {
730            let prealloc_remaining = PREALLOC - self.len;
731            let to_copy = src.len().min(prealloc_remaining);
732            unsafe {
733                let dest = (*self.prealloc_segment.as_mut_ptr())
734                    .as_mut_ptr()
735                    .add(self.len);
736                std::ptr::copy_nonoverlapping(src.as_ptr(), dest, to_copy);
737            };
738            self.len += to_copy;
739            src = &src[to_copy..];
740        }
741
742        // Fill dynamic segments
743        while !src.is_empty() {
744            let (shelf, box_idx, remaining) = Self::location_with_capacity(self.len);
745            let to_copy = src.len().min(remaining);
746            unsafe {
747                let dest = self
748                    .dynamic_segments
749                    .get_unchecked(shelf)
750                    .as_ptr()
751                    .add(box_idx);
752                std::ptr::copy_nonoverlapping(src.as_ptr(), dest, to_copy);
753            };
754            self.len += to_copy;
755            src = &src[to_copy..];
756        }
757
758        // Set up write pointer cache for next push
759        if self.len < self.capacity() {
760            self.init_write_ptr();
761        } else {
762            self.write_ptr = std::ptr::null_mut();
763            self.segment_end = std::ptr::null_mut();
764        }
765    }
766
767    /// Sorts the vector in place using a stable sorting algorithm.
768    ///
769    /// This sort is stable (i.e., does not reorder equal elements) and O(n * log(n)) worst-case.
770    ///
771    /// The algorithm is a merge sort adapted from the Rust standard library.
772    ///
773    /// # Examples
774    ///
775    /// ```
776    /// use segmented_vec::SegmentedVec;
777    ///
778    /// let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
779    /// vec.extend([3, 1, 4, 1, 5, 9, 2, 6]);
780    /// vec.sort();
781    /// assert_eq!(vec.iter().copied().collect::<Vec<_>>(), vec![1, 1, 2, 3, 4, 5, 6, 9]);
782    /// ```
783    #[inline]
784    pub fn sort(&mut self)
785    where
786        T: Ord,
787    {
788        self.as_mut_slice().sort();
789    }
790
791    /// Sorts the vector in place with a comparator function.
792    ///
793    /// This sort is stable (i.e., does not reorder equal elements) and O(n * log(n)) worst-case.
794    ///
795    /// The comparator function must define a total ordering for the elements.
796    ///
797    /// # Examples
798    ///
799    /// ```
800    /// use segmented_vec::SegmentedVec;
801    ///
802    /// let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
803    /// vec.extend([3, 1, 4, 1, 5, 9, 2, 6]);
804    /// vec.sort_by(|a, b| b.cmp(a)); // reverse order
805    /// assert_eq!(vec.iter().copied().collect::<Vec<_>>(), vec![9, 6, 5, 4, 3, 2, 1, 1]);
806    /// ```
807    pub fn sort_by<F>(&mut self, compare: F)
808    where
809        F: FnMut(&T, &T) -> Ordering,
810    {
811        self.as_mut_slice().sort_by(compare);
812    }
813
814    /// Sorts the vector in place with a key extraction function.
815    ///
816    /// This sort is stable (i.e., does not reorder equal elements) and O(m * n * log(n)) worst-case,
817    /// where the key function is O(m).
818    ///
819    /// # Examples
820    ///
821    /// ```
822    /// use segmented_vec::SegmentedVec;
823    ///
824    /// let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
825    /// vec.extend([-3, 1, -4, 1, 5, -9, 2, 6]);
826    /// vec.sort_by_key(|k| k.abs());
827    /// assert_eq!(vec.iter().copied().collect::<Vec<_>>(), vec![1, 1, 2, -3, -4, 5, 6, -9]);
828    /// ```
829    #[inline]
830    pub fn sort_by_key<K, F>(&mut self, f: F)
831    where
832        F: FnMut(&T) -> K,
833        K: Ord,
834    {
835        self.as_mut_slice().sort_by_key(f);
836    }
837
838    /// Sorts the vector in place using an unstable sorting algorithm.
839    ///
840    /// This sort is unstable (i.e., may reorder equal elements), in-place, and O(n * log(n)) worst-case.
841    ///
842    /// The algorithm is a quicksort with heapsort fallback, adapted from the Rust standard library.
843    ///
844    /// # Examples
845    ///
846    /// ```
847    /// use segmented_vec::SegmentedVec;
848    ///
849    /// let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
850    /// vec.extend([3, 1, 4, 1, 5, 9, 2, 6]);
851    /// vec.sort_unstable();
852    /// assert_eq!(vec.iter().copied().collect::<Vec<_>>(), vec![1, 1, 2, 3, 4, 5, 6, 9]);
853    /// ```
854    #[inline]
855    pub fn sort_unstable(&mut self)
856    where
857        T: Ord,
858    {
859        self.as_mut_slice().sort_unstable();
860    }
861
862    /// Sorts the vector in place with a comparator function using an unstable sorting algorithm.
863    ///
864    /// This sort is unstable (i.e., may reorder equal elements), in-place, and O(n * log(n)) worst-case.
865    ///
866    /// The comparator function must define a total ordering for the elements.
867    ///
868    /// # Examples
869    ///
870    /// ```
871    /// use segmented_vec::SegmentedVec;
872    ///
873    /// let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
874    /// vec.extend([3, 1, 4, 1, 5, 9, 2, 6]);
875    /// vec.sort_unstable_by(|a, b| b.cmp(a)); // reverse order
876    /// assert_eq!(vec.iter().copied().collect::<Vec<_>>(), vec![9, 6, 5, 4, 3, 2, 1, 1]);
877    /// ```
878    pub fn sort_unstable_by<F>(&mut self, compare: F)
879    where
880        F: FnMut(&T, &T) -> Ordering,
881    {
882        self.as_mut_slice().sort_unstable_by(compare);
883    }
884
885    /// Sorts the vector in place with a key extraction function using an unstable sorting algorithm.
886    ///
887    /// This sort is unstable (i.e., may reorder equal elements), in-place, and O(n * log(n)) worst-case,
888    /// where the key function is O(m).
889    ///
890    /// # Examples
891    ///
892    /// ```
893    /// use segmented_vec::SegmentedVec;
894    ///
895    /// let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
896    /// vec.extend([-3, 1, -4, 1, 5, -9, 2, 6]);
897    /// vec.sort_unstable_by_key(|k| k.abs());
898    /// // Note: unstable sort may reorder equal elements, so we just check it's sorted by abs value
899    /// let result: Vec<i32> = vec.iter().copied().collect();
900    /// for i in 1..result.len() {
901    ///     assert!(result[i-1].abs() <= result[i].abs());
902    /// }
903    /// ```
904    #[inline]
905    pub fn sort_unstable_by_key<K, F>(&mut self, f: F)
906    where
907        F: FnMut(&T) -> K,
908        K: Ord,
909    {
910        self.as_mut_slice().sort_unstable_by_key(f);
911    }
912
913    /// Checks if the elements of this vector are sorted.
914    pub fn is_sorted(&self) -> bool
915    where
916        T: PartialOrd,
917    {
918        self.as_slice().is_sorted()
919    }
920
921    /// Checks if the elements of this vector are sorted using the given comparator function.
922    pub fn is_sorted_by<F>(&self, compare: F) -> bool
923    where
924        F: FnMut(&T, &T) -> bool,
925    {
926        self.as_slice().is_sorted_by(compare)
927    }
928
929    /// Checks if the elements of this vector are sorted using the given key extraction function.
930    pub fn is_sorted_by_key<K, F>(&self, f: F) -> bool
931    where
932        F: FnMut(&T) -> K,
933        K: PartialOrd,
934    {
935        self.as_slice().is_sorted_by_key(f)
936    }
937
938    /// Returns the index of the partition point according to the given predicate.
939    pub fn partition_point<P>(&self, pred: P) -> usize
940    where
941        P: FnMut(&T) -> bool,
942    {
943        self.as_slice().partition_point(pred)
944    }
945
946    /// Rotates the vector in-place such that the first `mid` elements move to the end.
947    pub fn rotate_left(&mut self, mid: usize) {
948        self.as_mut_slice().rotate_left(mid);
949    }
950
951    /// Rotates the vector in-place such that the last `k` elements move to the front.
952    pub fn rotate_right(&mut self, k: usize) {
953        self.as_mut_slice().rotate_right(k);
954    }
955
956    /// Creates a new `SegmentedVec` with at least the specified capacity.
957    pub fn with_capacity(capacity: usize) -> Self {
958        let mut vec = Self::new();
959        vec.reserve(capacity);
960        vec
961    }
962
963    /// Inserts an element at position `index`, shifting all elements after it to the right.
964    ///
965    /// # Panics
966    ///
967    /// Panics if `index > len`.
968    pub fn insert(&mut self, index: usize, element: T) {
969        assert!(index <= self.len);
970        self.push(element);
971        // Rotate the new element into place
972        if index < self.len - 1 {
973            for i in (index..self.len - 1).rev() {
974                unsafe {
975                    let ptr_a = self.unchecked_at_mut(i) as *mut T;
976                    let ptr_b = self.unchecked_at_mut(i + 1) as *mut T;
977                    std::ptr::swap(ptr_a, ptr_b);
978                }
979            }
980        }
981    }
982
983    /// Removes and returns the element at position `index`, shifting all elements after it to the left.
984    ///
985    /// # Panics
986    ///
987    /// Panics if `index >= len`.
988    pub fn remove(&mut self, index: usize) -> T {
989        assert!(index < self.len);
990        // Shift elements left
991        for i in index..self.len - 1 {
992            unsafe {
993                let ptr_a = self.unchecked_at_mut(i) as *mut T;
994                let ptr_b = self.unchecked_at_mut(i + 1) as *mut T;
995                std::ptr::swap(ptr_a, ptr_b);
996            }
997        }
998        self.pop().unwrap()
999    }
1000
1001    /// Removes an element from the vector and returns it.
1002    ///
1003    /// The removed element is replaced by the last element of the vector.
1004    /// This does not preserve ordering, but is O(1).
1005    ///
1006    /// # Panics
1007    ///
1008    /// Panics if `index >= len`.
1009    pub fn swap_remove(&mut self, index: usize) -> T {
1010        assert!(index < self.len);
1011        let last_index = self.len - 1;
1012        if index != last_index {
1013            unsafe {
1014                let ptr_a = self.unchecked_at_mut(index) as *mut T;
1015                let ptr_b = self.unchecked_at_mut(last_index) as *mut T;
1016                std::ptr::swap(ptr_a, ptr_b);
1017            }
1018        }
1019        self.pop().unwrap()
1020    }
1021
1022    /// Retains only the elements specified by the predicate.
1023    ///
1024    /// Removes all elements for which `f(&element)` returns `false`.
1025    pub fn retain<F>(&mut self, mut f: F)
1026    where
1027        F: FnMut(&T) -> bool,
1028    {
1029        let mut i = 0;
1030        while i < self.len {
1031            if f(unsafe { self.unchecked_at(i) }) {
1032                i += 1;
1033            } else {
1034                self.remove(i);
1035            }
1036        }
1037    }
1038
1039    /// Retains only the elements specified by the predicate, passing a mutable reference.
1040    pub fn retain_mut<F>(&mut self, mut f: F)
1041    where
1042        F: FnMut(&mut T) -> bool,
1043    {
1044        let mut i = 0;
1045        while i < self.len {
1046            if f(unsafe { self.unchecked_at_mut(i) }) {
1047                i += 1;
1048            } else {
1049                self.remove(i);
1050            }
1051        }
1052    }
1053
1054    /// Removes consecutive duplicate elements.
1055    ///
1056    /// If the vector is sorted, this removes all duplicates.
1057    pub fn dedup(&mut self)
1058    where
1059        T: PartialEq,
1060    {
1061        self.dedup_by(|a, b| a == b);
1062    }
1063
1064    /// Removes consecutive elements that satisfy the given equality relation.
1065    pub fn dedup_by<F>(&mut self, mut same_bucket: F)
1066    where
1067        F: FnMut(&mut T, &mut T) -> bool,
1068    {
1069        if self.len <= 1 {
1070            return;
1071        }
1072        let mut write = 1;
1073        for read in 1..self.len {
1074            let should_keep = unsafe {
1075                let prev_ptr = self.unchecked_at_mut(write - 1) as *mut T;
1076                let curr_ptr = self.unchecked_at_mut(read) as *mut T;
1077                !same_bucket(&mut *prev_ptr, &mut *curr_ptr)
1078            };
1079            if should_keep {
1080                if read != write {
1081                    unsafe {
1082                        let ptr_src = self.unchecked_at_mut(read) as *mut T;
1083                        let ptr_dst = self.unchecked_at_mut(write) as *mut T;
1084                        std::ptr::swap(ptr_dst, ptr_src);
1085                    }
1086                }
1087                write += 1;
1088            } else {
1089                // Drop the duplicate
1090                unsafe {
1091                    std::ptr::drop_in_place(self.unchecked_at_mut(read));
1092                }
1093            }
1094        }
1095        self.len = write;
1096    }
1097
1098    /// Removes consecutive elements that map to the same key.
1099    pub fn dedup_by_key<K, F>(&mut self, mut key: F)
1100    where
1101        F: FnMut(&mut T) -> K,
1102        K: PartialEq,
1103    {
1104        self.dedup_by(|a, b| key(a) == key(b));
1105    }
1106
1107    /// Resizes the vector in-place so that `len` is equal to `new_len`.
1108    ///
1109    /// If `new_len` is greater than `len`, the vector is extended by the difference,
1110    /// with each additional slot filled with `value`.
1111    /// If `new_len` is less than `len`, the vector is simply truncated.
1112    pub fn resize(&mut self, new_len: usize, value: T)
1113    where
1114        T: Clone,
1115    {
1116        if new_len > self.len {
1117            self.reserve(new_len - self.len);
1118            while self.len < new_len {
1119                self.push(value.clone());
1120            }
1121        } else {
1122            self.truncate(new_len);
1123        }
1124    }
1125
1126    /// Resizes the vector in-place so that `len` is equal to `new_len`.
1127    ///
1128    /// If `new_len` is greater than `len`, the vector is extended by the difference,
1129    /// with each additional slot filled with the result of calling the closure `f`.
1130    pub fn resize_with<F>(&mut self, new_len: usize, mut f: F)
1131    where
1132        F: FnMut() -> T,
1133    {
1134        if new_len > self.len {
1135            self.reserve(new_len - self.len);
1136            while self.len < new_len {
1137                self.push(f());
1138            }
1139        } else {
1140            self.truncate(new_len);
1141        }
1142    }
1143
1144    /// Moves all the elements of `other` into `self`, leaving `other` empty.
1145    pub fn append(&mut self, other: &mut Self) {
1146        let other_len = other.len;
1147        self.reserve(other_len);
1148        let start = self.len;
1149        while let Some(item) = other.pop() {
1150            self.push(item);
1151        }
1152        // Reverse the appended portion since pop() returns in reverse order
1153        let mut left = start;
1154        let mut right = self.len;
1155        while left < right {
1156            right -= 1;
1157            if left < right {
1158                unsafe {
1159                    let ptr_a = self.unchecked_at_mut(left) as *mut T;
1160                    let ptr_b = self.unchecked_at_mut(right) as *mut T;
1161                    std::ptr::swap(ptr_a, ptr_b);
1162                }
1163                left += 1;
1164            }
1165        }
1166    }
1167
1168    /// Splits the vector into two at the given index.
1169    ///
1170    /// Returns a newly allocated vector containing the elements in the range `[at, len)`.
1171    /// After the call, the original vector will contain elements `[0, at)`.
1172    ///
1173    /// # Panics
1174    ///
1175    /// Panics if `at > len`.
1176    pub fn split_off(&mut self, at: usize) -> Self {
1177        assert!(at <= self.len);
1178        let mut other = Self::new();
1179        other.reserve(self.len - at);
1180        for i in at..self.len {
1181            other.push(unsafe { self.unchecked_read(i) });
1182        }
1183        self.len = at;
1184        other
1185    }
1186
1187    /// Returns an iterator over `chunk_size` elements of the vector at a time.
1188    ///
1189    /// # Panics
1190    ///
1191    /// Panics if `chunk_size` is 0.
1192    pub fn chunks(&self, chunk_size: usize) -> Chunks<'_, T, PREALLOC> {
1193        self.as_slice().chunks(chunk_size)
1194    }
1195
1196    /// Returns an iterator over all contiguous windows of length `size`.
1197    ///
1198    /// # Panics
1199    ///
1200    /// Panics if `size` is 0.
1201    pub fn windows(&self, size: usize) -> Windows<'_, T, PREALLOC> {
1202        self.as_slice().windows(size)
1203    }
1204
1205    /// Returns an iterator over `chunk_size` elements at a time, starting from the end.
1206    ///
1207    /// # Panics
1208    ///
1209    /// Panics if `chunk_size` is 0.
1210    pub fn rchunks(&self, chunk_size: usize) -> RChunks<'_, T, PREALLOC> {
1211        self.as_slice().rchunks(chunk_size)
1212    }
1213
1214    /// Returns an iterator over exactly `chunk_size` elements at a time.
1215    ///
1216    /// # Panics
1217    ///
1218    /// Panics if `chunk_size` is 0.
1219    pub fn chunks_exact(&self, chunk_size: usize) -> ChunksExact<'_, T, PREALLOC> {
1220        self.as_slice().chunks_exact(chunk_size)
1221    }
1222
1223    /// Creates a draining iterator that removes the specified range and yields the removed items.
1224    ///
1225    /// # Panics
1226    ///
1227    /// Panics if the starting point is greater than the end point or if the end point is greater than the length.
1228    pub fn drain(&mut self, range: std::ops::Range<usize>) -> Drain<'_, T, PREALLOC> {
1229        assert!(range.start <= range.end && range.end <= self.len);
1230        Drain {
1231            vec: self,
1232            range_start: range.start,
1233            range_end: range.end,
1234            index: range.start,
1235        }
1236    }
1237
1238    /// Copies the elements to a new `Vec<T>`.
1239    pub fn to_vec(&self) -> Vec<T>
1240    where
1241        T: Clone,
1242    {
1243        self.as_slice().to_vec()
1244    }
1245
1246    // --- Internal helper methods ---
1247
1248    /// Calculate the number of shelves needed for a given capacity.
1249    #[inline]
1250    fn shelf_count(box_count: usize) -> u32 {
1251        if box_count == 0 {
1252            return 0;
1253        }
1254        if PREALLOC == 0 {
1255            let val = box_count + Self::MIN_NON_ZERO_CAP;
1256            val.next_power_of_two().trailing_zeros() - Self::MIN_CAP_EXP
1257        } else {
1258            let val = box_count + PREALLOC;
1259            val.next_power_of_two().trailing_zeros() - Self::PREALLOC_EXP - 1
1260        }
1261    }
1262
1263    /// Calculate the size of a shelf at a given index.
1264    #[inline]
1265    fn shelf_size(shelf_index: u32) -> usize {
1266        if PREALLOC == 0 {
1267            Self::MIN_NON_ZERO_CAP << shelf_index
1268        } else {
1269            1usize << (shelf_index + Self::PREALLOC_EXP + 1)
1270        }
1271    }
1272
1273    /// Calculate which shelf and box index a list index falls into.
1274    /// Returns (shelf_index, box_index).
1275    #[inline]
1276    fn location(list_index: usize) -> (usize, usize) {
1277        let biased = list_index + Self::BIAS;
1278        let msb = biased.ilog2();
1279        let shelf = msb - Self::SHELF_OFFSET;
1280        // Clear the most significant bit to get box_index
1281        let box_idx = biased ^ (1usize << msb);
1282        (shelf as usize, box_idx)
1283    }
1284
1285    /// Calculate shelf, box index, and remaining capacity in one go.
1286    /// Returns (shelf_index, box_index, segment_remaining).
1287    #[inline]
1288    fn location_with_capacity(list_index: usize) -> (usize, usize, usize) {
1289        let biased = list_index + Self::BIAS;
1290        let msb = biased.ilog2();
1291        let shelf = msb - Self::SHELF_OFFSET;
1292        let box_idx = biased ^ (1usize << msb);
1293        // segment_remaining = shelf_size - box_idx = (1 << msb) - box_idx
1294        //                   = (1 << msb) - (biased - (1 << msb))
1295        //                   = (2 << msb) - biased
1296        let segment_remaining = (2usize << msb) - biased;
1297        (shelf as usize, box_idx, segment_remaining)
1298    }
1299
1300    /// Get an unchecked reference to an element.
1301    ///
1302    /// # Safety
1303    ///
1304    /// `index` must be less than `self.len`.
1305    #[inline]
1306    pub(crate) unsafe fn unchecked_at(&self, index: usize) -> &T {
1307        unsafe {
1308            if index < PREALLOC {
1309                &(*self.prealloc_segment.as_ptr())[index]
1310            } else {
1311                let (shelf, box_idx) = Self::location(index);
1312                &*self
1313                    .dynamic_segments
1314                    .get_unchecked(shelf)
1315                    .as_ptr()
1316                    .add(box_idx)
1317            }
1318        }
1319    }
1320
1321    /// Get an unchecked mutable reference to an element.
1322    ///
1323    /// # Safety
1324    ///
1325    /// `index` must be less than `self.len`.
1326    #[inline]
1327    pub(crate) unsafe fn unchecked_at_mut(&mut self, index: usize) -> &mut T {
1328        unsafe {
1329            if index < PREALLOC {
1330                &mut (*self.prealloc_segment.as_mut_ptr())[index]
1331            } else {
1332                let (shelf, box_idx) = Self::location(index);
1333                &mut *self
1334                    .dynamic_segments
1335                    .get_unchecked(shelf)
1336                    .as_ptr()
1337                    .add(box_idx)
1338            }
1339        }
1340    }
1341
1342    /// Read a value from an index.
1343    ///
1344    /// # Safety
1345    ///
1346    /// `index` must be less than `self.len`, and the value must not be read again.
1347    #[inline]
1348    unsafe fn unchecked_read(&self, index: usize) -> T {
1349        unsafe {
1350            if index < PREALLOC {
1351                std::ptr::read(&(*self.prealloc_segment.as_ptr())[index])
1352            } else {
1353                let (shelf, box_idx) = Self::location(index);
1354                std::ptr::read(
1355                    self.dynamic_segments
1356                        .get_unchecked(shelf)
1357                        .as_ptr()
1358                        .add(box_idx),
1359                )
1360            }
1361        }
1362    }
1363
1364    /// Grow capacity to accommodate at least `new_capacity` elements.
1365    fn grow_capacity(&mut self, new_capacity: usize) {
1366        let new_shelf_count = Self::shelf_count(new_capacity);
1367        let old_shelf_count = self.dynamic_segments.len() as u32;
1368
1369        if new_shelf_count > old_shelf_count {
1370            self.dynamic_segments
1371                .reserve((new_shelf_count - old_shelf_count) as usize);
1372
1373            for i in old_shelf_count..new_shelf_count {
1374                let size = Self::shelf_size(i);
1375                let layout = Layout::array::<T>(size).expect("Layout overflow");
1376                let ptr = if layout.size() == 0 {
1377                    NonNull::dangling()
1378                } else {
1379                    let ptr = unsafe { alloc::alloc(layout) };
1380                    NonNull::new(ptr as *mut T).expect("Allocation failed")
1381                };
1382                self.dynamic_segments.push(ptr);
1383            }
1384        }
1385    }
1386
1387    /// Compute total capacity given the number of dynamic shelves.
1388    #[inline]
1389    fn compute_capacity(shelf_count: u32) -> usize {
1390        if shelf_count == 0 {
1391            PREALLOC
1392        } else if PREALLOC == 0 {
1393            (Self::MIN_NON_ZERO_CAP << shelf_count) - Self::MIN_NON_ZERO_CAP
1394        } else {
1395            (1usize << (Self::PREALLOC_EXP + 1 + shelf_count)) - PREALLOC
1396        }
1397    }
1398
1399    /// Shrink capacity to the minimum needed for `new_capacity` elements.
1400    fn shrink_capacity(&mut self, new_capacity: usize) {
1401        if new_capacity <= PREALLOC {
1402            // Free all dynamic segments
1403            self.free_shelves(self.dynamic_segments.len() as u32, 0);
1404            self.dynamic_segments.clear();
1405            self.dynamic_segments.shrink_to_fit();
1406            return;
1407        }
1408
1409        let new_shelf_count = Self::shelf_count(new_capacity);
1410        let old_shelf_count = self.dynamic_segments.len() as u32;
1411
1412        if new_shelf_count < old_shelf_count {
1413            self.free_shelves(old_shelf_count, new_shelf_count);
1414            self.dynamic_segments.truncate(new_shelf_count as usize);
1415            self.dynamic_segments.shrink_to_fit();
1416        }
1417    }
1418
1419    /// Free shelves from `from_count` down to `to_count` (exclusive).
1420    fn free_shelves(&mut self, from_count: u32, to_count: u32) {
1421        for i in (to_count..from_count).rev() {
1422            let size = Self::shelf_size(i);
1423            let layout = Layout::array::<T>(size).expect("Layout overflow");
1424            if layout.size() > 0 {
1425                unsafe {
1426                    alloc::dealloc(
1427                        self.dynamic_segments[i as usize].as_ptr() as *mut u8,
1428                        layout,
1429                    );
1430                }
1431            }
1432        }
1433    }
1434}
1435
1436impl<T, const PREALLOC: usize> Drop for SegmentedVec<T, PREALLOC> {
1437    fn drop(&mut self) {
1438        // Drop all elements
1439        self.clear();
1440        // Free all dynamic segments
1441        self.free_shelves(self.dynamic_segments.len() as u32, 0);
1442    }
1443}
1444
1445impl<T, const PREALLOC: usize> sort::IndexedAccess<T> for SegmentedVec<T, PREALLOC> {
1446    #[inline]
1447    fn get_ref(&self, index: usize) -> &T {
1448        debug_assert!(index < self.len);
1449        unsafe { self.unchecked_at(index) }
1450    }
1451
1452    #[inline]
1453    fn get_ptr(&self, index: usize) -> *const T {
1454        debug_assert!(index < self.len);
1455        if index < PREALLOC {
1456            unsafe { (*self.prealloc_segment.as_ptr()).as_ptr().add(index) }
1457        } else {
1458            let (shelf, box_idx) = Self::location(index);
1459            unsafe {
1460                self.dynamic_segments
1461                    .get_unchecked(shelf)
1462                    .as_ptr()
1463                    .add(box_idx)
1464            }
1465        }
1466    }
1467
1468    #[inline]
1469    fn get_ptr_mut(&mut self, index: usize) -> *mut T {
1470        debug_assert!(index < self.len);
1471        if index < PREALLOC {
1472            unsafe {
1473                (*self.prealloc_segment.as_mut_ptr())
1474                    .as_mut_ptr()
1475                    .add(index)
1476            }
1477        } else {
1478            let (shelf, box_idx) = Self::location(index);
1479            unsafe {
1480                self.dynamic_segments
1481                    .get_unchecked(shelf)
1482                    .as_ptr()
1483                    .add(box_idx)
1484            }
1485        }
1486    }
1487
1488    #[inline]
1489    fn swap(&mut self, a: usize, b: usize) {
1490        if a == b {
1491            return;
1492        }
1493        debug_assert!(a < self.len && b < self.len);
1494        unsafe {
1495            let ptr_a = self.get_ptr_mut(a);
1496            let ptr_b = self.get_ptr_mut(b);
1497            std::ptr::swap(ptr_a, ptr_b);
1498        }
1499    }
1500}
1501
1502impl<T, const PREALLOC: usize> Default for SegmentedVec<T, PREALLOC> {
1503    fn default() -> Self {
1504        Self::new()
1505    }
1506}
1507
1508impl<T, const PREALLOC: usize> Index<usize> for SegmentedVec<T, PREALLOC> {
1509    type Output = T;
1510
1511    fn index(&self, index: usize) -> &Self::Output {
1512        self.get(index).expect("index out of bounds")
1513    }
1514}
1515
1516impl<T, const PREALLOC: usize> IndexMut<usize> for SegmentedVec<T, PREALLOC> {
1517    fn index_mut(&mut self, index: usize) -> &mut Self::Output {
1518        self.get_mut(index).expect("index out of bounds")
1519    }
1520}
1521
1522impl<T: Clone, const PREALLOC: usize> Clone for SegmentedVec<T, PREALLOC> {
1523    fn clone(&self) -> Self {
1524        if self.len == 0 {
1525            return Self::new();
1526        }
1527
1528        let mut new_vec = Self::new();
1529        new_vec.reserve(self.len);
1530
1531        // Clone prealloc segment
1532        if PREALLOC > 0 {
1533            let count = self.len.min(PREALLOC);
1534            let src = unsafe { (*self.prealloc_segment.as_ptr()).as_ptr() };
1535            let dst = unsafe { (*new_vec.prealloc_segment.as_mut_ptr()).as_mut_ptr() };
1536            for i in 0..count {
1537                unsafe { std::ptr::write(dst.add(i), (*src.add(i)).clone()) };
1538            }
1539            new_vec.len = count;
1540        }
1541
1542        // Clone dynamic segments
1543        if self.len > PREALLOC {
1544            let mut remaining = self.len - PREALLOC;
1545            for shelf in 0..self.dynamic_segments.len() {
1546                let size = Self::shelf_size(shelf as u32);
1547                let count = remaining.min(size);
1548                let src = unsafe { self.dynamic_segments.get_unchecked(shelf).as_ptr() };
1549                let dst = unsafe { new_vec.dynamic_segments.get_unchecked(shelf).as_ptr() };
1550                for i in 0..count {
1551                    unsafe { std::ptr::write(dst.add(i), (*src.add(i)).clone()) };
1552                }
1553                new_vec.len += count;
1554                remaining -= count;
1555                if remaining == 0 {
1556                    break;
1557                }
1558            }
1559        }
1560
1561        // Set up write pointer
1562        if new_vec.len < new_vec.capacity() {
1563            new_vec.init_write_ptr();
1564        } else {
1565            new_vec.write_ptr = std::ptr::null_mut();
1566            new_vec.segment_end = std::ptr::null_mut();
1567        }
1568
1569        new_vec
1570    }
1571}
1572
1573impl<T: std::fmt::Debug, const PREALLOC: usize> std::fmt::Debug for SegmentedVec<T, PREALLOC> {
1574    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
1575        f.debug_list().entries(self.iter()).finish()
1576    }
1577}
1578
1579impl<T: PartialEq, const PREALLOC: usize> PartialEq for SegmentedVec<T, PREALLOC> {
1580    fn eq(&self, other: &Self) -> bool {
1581        if self.len != other.len {
1582            return false;
1583        }
1584        for i in 0..self.len {
1585            if unsafe { self.unchecked_at(i) } != unsafe { other.unchecked_at(i) } {
1586                return false;
1587            }
1588        }
1589        true
1590    }
1591}
1592
1593impl<T: Eq, const PREALLOC: usize> Eq for SegmentedVec<T, PREALLOC> {}
1594
1595impl<T, const PREALLOC: usize> FromIterator<T> for SegmentedVec<T, PREALLOC> {
1596    fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> Self {
1597        let iter = iter.into_iter();
1598        let (lower, _) = iter.size_hint();
1599        let mut vec = Self::new();
1600        vec.reserve(lower);
1601        for item in iter {
1602            vec.push(item);
1603        }
1604        vec
1605    }
1606}
1607
1608impl<T, const PREALLOC: usize> Extend<T> for SegmentedVec<T, PREALLOC> {
1609    fn extend<I: IntoIterator<Item = T>>(&mut self, iter: I) {
1610        let iter = iter.into_iter();
1611        let (lower, _) = iter.size_hint();
1612        self.reserve(lower);
1613        for item in iter {
1614            self.push(item);
1615        }
1616    }
1617}
1618
1619// --- Iterator implementations ---
1620
1621/// An iterator over references to elements of a `SegmentedVec`.
1622pub struct Iter<'a, T, const PREALLOC: usize> {
1623    vec: &'a SegmentedVec<T, PREALLOC>,
1624    /// Current pointer within segment
1625    ptr: *const T,
1626    /// End of current segment (min of segment capacity and vec.len)
1627    segment_end: *const T,
1628    /// Current logical index
1629    index: usize,
1630    /// Current shelf index (for dynamic segments)
1631    shelf_index: u32,
1632}
1633
1634impl<'a, T, const PREALLOC: usize> Iterator for Iter<'a, T, PREALLOC> {
1635    type Item = &'a T;
1636
1637    #[inline]
1638    fn next(&mut self) -> Option<Self::Item> {
1639        if self.ptr < self.segment_end {
1640            let result = unsafe { &*self.ptr };
1641            self.ptr = unsafe { self.ptr.add(1) };
1642            self.index += 1;
1643            return Some(result);
1644        }
1645        self.next_segment()
1646    }
1647
1648    #[inline]
1649    fn size_hint(&self) -> (usize, Option<usize>) {
1650        let remaining = self.vec.len.saturating_sub(self.index);
1651        (remaining, Some(remaining))
1652    }
1653}
1654
1655impl<'a, T, const PREALLOC: usize> Iter<'a, T, PREALLOC> {
1656    #[inline]
1657    fn next_segment(&mut self) -> Option<&'a T> {
1658        if self.index >= self.vec.len {
1659            return None;
1660        }
1661
1662        // Move to next segment
1663        if self.index < PREALLOC {
1664            // In prealloc segment
1665            let ptr = unsafe {
1666                (*self.vec.prealloc_segment.as_ptr())
1667                    .as_ptr()
1668                    .add(self.index)
1669            };
1670            let remaining = PREALLOC - self.index;
1671            let segment_len = remaining.min(self.vec.len - self.index);
1672            self.ptr = ptr;
1673            self.segment_end = unsafe { ptr.add(segment_len) };
1674        } else {
1675            // In dynamic segments
1676            let shelf_idx = self.shelf_index as usize;
1677            let shelf_size = SegmentedVec::<T, PREALLOC>::shelf_size(self.shelf_index);
1678            let ptr = self.vec.dynamic_segments[shelf_idx].as_ptr();
1679            let segment_len = shelf_size.min(self.vec.len - self.index);
1680            self.ptr = ptr;
1681            self.segment_end = unsafe { ptr.add(segment_len) };
1682            self.shelf_index += 1;
1683        }
1684
1685        let result = unsafe { &*self.ptr };
1686        self.ptr = unsafe { self.ptr.add(1) };
1687        self.index += 1;
1688        Some(result)
1689    }
1690}
1691
1692impl<'a, T, const PREALLOC: usize> ExactSizeIterator for Iter<'a, T, PREALLOC> {}
1693
1694/// An iterator over mutable references to elements of a `SegmentedVec`.
1695pub struct IterMut<'a, T, const PREALLOC: usize> {
1696    vec: &'a mut SegmentedVec<T, PREALLOC>,
1697    /// Current pointer within segment
1698    ptr: *mut T,
1699    /// End of current segment (min of segment capacity and vec.len)
1700    segment_end: *mut T,
1701    /// Current logical index
1702    index: usize,
1703    /// Current shelf index (for dynamic segments)
1704    shelf_index: u32,
1705}
1706
1707impl<'a, T, const PREALLOC: usize> Iterator for IterMut<'a, T, PREALLOC> {
1708    type Item = &'a mut T;
1709
1710    #[inline]
1711    fn next(&mut self) -> Option<Self::Item> {
1712        if self.ptr < self.segment_end {
1713            let result = self.ptr;
1714            self.ptr = unsafe { self.ptr.add(1) };
1715            self.index += 1;
1716            return Some(unsafe { &mut *result });
1717        }
1718        self.next_segment()
1719    }
1720
1721    #[inline]
1722    fn size_hint(&self) -> (usize, Option<usize>) {
1723        let remaining = self.vec.len.saturating_sub(self.index);
1724        (remaining, Some(remaining))
1725    }
1726}
1727
1728impl<'a, T, const PREALLOC: usize> IterMut<'a, T, PREALLOC> {
1729    #[inline]
1730    fn next_segment(&mut self) -> Option<&'a mut T> {
1731        if self.index >= self.vec.len {
1732            return None;
1733        }
1734
1735        // Move to next segment
1736        if self.index < PREALLOC {
1737            // In prealloc segment
1738            let ptr = unsafe {
1739                (*self.vec.prealloc_segment.as_mut_ptr())
1740                    .as_mut_ptr()
1741                    .add(self.index)
1742            };
1743            let remaining = PREALLOC - self.index;
1744            let segment_len = remaining.min(self.vec.len - self.index);
1745            self.ptr = ptr;
1746            self.segment_end = unsafe { ptr.add(segment_len) };
1747        } else {
1748            // In dynamic segments
1749            let shelf_idx = self.shelf_index as usize;
1750            let shelf_size = SegmentedVec::<T, PREALLOC>::shelf_size(self.shelf_index);
1751            // as_ptr() returns *mut T since NonNull<T> stores *mut T
1752            let ptr = self.vec.dynamic_segments[shelf_idx].as_ptr();
1753            let segment_len = shelf_size.min(self.vec.len - self.index);
1754            self.ptr = ptr;
1755            self.segment_end = unsafe { ptr.add(segment_len) };
1756            self.shelf_index += 1;
1757        }
1758
1759        let result = self.ptr;
1760        self.ptr = unsafe { self.ptr.add(1) };
1761        self.index += 1;
1762        Some(unsafe { &mut *result })
1763    }
1764}
1765
1766impl<'a, T, const PREALLOC: usize> ExactSizeIterator for IterMut<'a, T, PREALLOC> {}
1767
1768impl<T, const PREALLOC: usize> IntoIterator for SegmentedVec<T, PREALLOC> {
1769    type Item = T;
1770    type IntoIter = IntoIter<T, PREALLOC>;
1771
1772    fn into_iter(self) -> Self::IntoIter {
1773        IntoIter {
1774            vec: self,
1775            index: 0,
1776        }
1777    }
1778}
1779
1780impl<'a, T, const PREALLOC: usize> IntoIterator for &'a SegmentedVec<T, PREALLOC> {
1781    type Item = &'a T;
1782    type IntoIter = Iter<'a, T, PREALLOC>;
1783
1784    fn into_iter(self) -> Self::IntoIter {
1785        self.iter()
1786    }
1787}
1788
1789impl<'a, T, const PREALLOC: usize> IntoIterator for &'a mut SegmentedVec<T, PREALLOC> {
1790    type Item = &'a mut T;
1791    type IntoIter = IterMut<'a, T, PREALLOC>;
1792
1793    fn into_iter(self) -> Self::IntoIter {
1794        self.iter_mut()
1795    }
1796}
1797
1798/// An owning iterator over elements of a `SegmentedVec`.
1799pub struct IntoIter<T, const PREALLOC: usize> {
1800    vec: SegmentedVec<T, PREALLOC>,
1801    index: usize,
1802}
1803
1804impl<T, const PREALLOC: usize> Iterator for IntoIter<T, PREALLOC> {
1805    type Item = T;
1806
1807    fn next(&mut self) -> Option<Self::Item> {
1808        if self.index >= self.vec.len {
1809            return None;
1810        }
1811        let value = unsafe { self.vec.unchecked_read(self.index) };
1812        self.index += 1;
1813        Some(value)
1814    }
1815
1816    fn size_hint(&self) -> (usize, Option<usize>) {
1817        let remaining = self.vec.len - self.index;
1818        (remaining, Some(remaining))
1819    }
1820}
1821
1822impl<T, const PREALLOC: usize> ExactSizeIterator for IntoIter<T, PREALLOC> {}
1823
1824impl<T, const PREALLOC: usize> Drop for IntoIter<T, PREALLOC> {
1825    fn drop(&mut self) {
1826        // Drop remaining elements that weren't consumed
1827        for i in self.index..self.vec.len {
1828            unsafe {
1829                std::ptr::drop_in_place(self.vec.unchecked_at_mut(i));
1830            }
1831        }
1832        // Prevent the Vec from dropping elements again
1833        self.vec.len = 0;
1834    }
1835}
1836
1837/// A draining iterator for `SegmentedVec`.
1838///
1839/// This struct is created by the `drain` method on `SegmentedVec`.
1840pub struct Drain<'a, T, const PREALLOC: usize> {
1841    vec: &'a mut SegmentedVec<T, PREALLOC>,
1842    range_start: usize,
1843    range_end: usize,
1844    index: usize,
1845}
1846
1847impl<'a, T, const PREALLOC: usize> Iterator for Drain<'a, T, PREALLOC> {
1848    type Item = T;
1849
1850    fn next(&mut self) -> Option<Self::Item> {
1851        if self.index >= self.range_end {
1852            None
1853        } else {
1854            let value = unsafe { std::ptr::read(self.vec.unchecked_at(self.index)) };
1855            self.index += 1;
1856            Some(value)
1857        }
1858    }
1859
1860    fn size_hint(&self) -> (usize, Option<usize>) {
1861        let remaining = self.range_end - self.index;
1862        (remaining, Some(remaining))
1863    }
1864}
1865
1866impl<'a, T, const PREALLOC: usize> DoubleEndedIterator for Drain<'a, T, PREALLOC> {
1867    fn next_back(&mut self) -> Option<Self::Item> {
1868        if self.index >= self.range_end {
1869            None
1870        } else {
1871            self.range_end -= 1;
1872            Some(unsafe { std::ptr::read(self.vec.unchecked_at(self.range_end)) })
1873        }
1874    }
1875}
1876
1877impl<'a, T, const PREALLOC: usize> ExactSizeIterator for Drain<'a, T, PREALLOC> {}
1878
1879impl<'a, T, const PREALLOC: usize> Drop for Drain<'a, T, PREALLOC> {
1880    fn drop(&mut self) {
1881        // Drop any remaining elements in the range
1882        for i in self.index..self.range_end {
1883            unsafe {
1884                std::ptr::drop_in_place(self.vec.unchecked_at_mut(i));
1885            }
1886        }
1887
1888        // Shift elements after the range to fill the gap
1889        let original_range_end = self.range_end;
1890        let original_len = self.vec.len;
1891        let drain_count = original_range_end - self.range_start;
1892
1893        for i in 0..(original_len - original_range_end) {
1894            unsafe {
1895                let src = self.vec.unchecked_at(original_range_end + i) as *const T;
1896                let dst = self.vec.unchecked_at_mut(self.range_start + i) as *mut T;
1897                std::ptr::copy_nonoverlapping(src, dst, 1);
1898            }
1899        }
1900
1901        self.vec.len = original_len - drain_count;
1902    }
1903}
1904
1905#[cfg(test)]
1906mod tests {
1907    use super::*;
1908
1909    #[test]
1910    fn test_new_empty() {
1911        let vec: SegmentedVec<i32, 4> = SegmentedVec::new();
1912        assert!(vec.is_empty());
1913        assert_eq!(vec.len(), 0);
1914    }
1915
1916    #[test]
1917    fn test_push_pop() {
1918        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
1919        vec.push(1);
1920        vec.push(2);
1921        vec.push(3);
1922        assert_eq!(vec.len(), 3);
1923        assert_eq!(vec.pop(), Some(3));
1924        assert_eq!(vec.pop(), Some(2));
1925        assert_eq!(vec.pop(), Some(1));
1926        assert_eq!(vec.pop(), None);
1927    }
1928
1929    #[test]
1930    fn test_get() {
1931        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
1932        vec.push(10);
1933        vec.push(20);
1934        vec.push(30);
1935        assert_eq!(vec.get(0), Some(&10));
1936        assert_eq!(vec.get(1), Some(&20));
1937        assert_eq!(vec.get(2), Some(&30));
1938        assert_eq!(vec.get(3), None);
1939    }
1940
1941    #[test]
1942    fn test_index() {
1943        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
1944        vec.push(10);
1945        vec.push(20);
1946        assert_eq!(vec[0], 10);
1947        assert_eq!(vec[1], 20);
1948        vec[0] = 100;
1949        assert_eq!(vec[0], 100);
1950    }
1951
1952    #[test]
1953    fn test_stable_pointers() {
1954        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
1955        vec.push(1);
1956        let ptr = &vec[0] as *const i32;
1957
1958        // Push many more elements to trigger segment allocations
1959        for i in 2..1000 {
1960            vec.push(i);
1961        }
1962
1963        // The pointer should still be valid
1964        assert_eq!(unsafe { *ptr }, 1);
1965    }
1966
1967    #[test]
1968    fn test_iter() {
1969        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
1970        for i in 0..100 {
1971            vec.push(i);
1972        }
1973
1974        let collected: Vec<i32> = vec.iter().copied().collect();
1975        let expected: Vec<i32> = (0..100).collect();
1976        assert_eq!(collected, expected);
1977    }
1978
1979    #[test]
1980    fn test_iter_mut() {
1981        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
1982        for i in 0..10 {
1983            vec.push(i);
1984        }
1985
1986        for item in vec.iter_mut() {
1987            *item *= 2;
1988        }
1989
1990        let collected: Vec<i32> = vec.iter().copied().collect();
1991        let expected: Vec<i32> = (0..10).map(|x| x * 2).collect();
1992        assert_eq!(collected, expected);
1993    }
1994
1995    #[test]
1996    fn test_into_iter() {
1997        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
1998        for i in 0..10 {
1999            vec.push(i);
2000        }
2001
2002        let collected: Vec<i32> = vec.into_iter().collect();
2003        let expected: Vec<i32> = (0..10).collect();
2004        assert_eq!(collected, expected);
2005    }
2006
2007    #[test]
2008    fn test_from_iter() {
2009        let vec: SegmentedVec<i32, 4> = (0..10).collect();
2010        assert_eq!(vec.len(), 10);
2011        for i in 0..10 {
2012            assert_eq!(vec[i], i as i32);
2013        }
2014    }
2015
2016    #[test]
2017    fn test_extend() {
2018        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2019        vec.extend(0..5);
2020        vec.extend(5..10);
2021        assert_eq!(vec.len(), 10);
2022        for i in 0..10 {
2023            assert_eq!(vec[i], i as i32);
2024        }
2025    }
2026
2027    #[test]
2028    fn test_clear() {
2029        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2030        vec.extend(0..10);
2031        vec.clear();
2032        assert!(vec.is_empty());
2033        assert_eq!(vec.len(), 0);
2034    }
2035
2036    #[test]
2037    fn test_truncate() {
2038        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2039        vec.extend(0..10);
2040        vec.truncate(5);
2041        assert_eq!(vec.len(), 5);
2042        for i in 0..5 {
2043            assert_eq!(vec[i], i as i32);
2044        }
2045    }
2046
2047    #[test]
2048    fn test_zero_prealloc() {
2049        let mut vec: SegmentedVec<i32, 0> = SegmentedVec::new();
2050        for i in 0..100 {
2051            vec.push(i);
2052        }
2053
2054        for i in 0..100 {
2055            assert_eq!(vec[i], i as i32);
2056        }
2057
2058        assert_eq!(vec.pop(), Some(99));
2059        assert_eq!(vec.len(), 99);
2060    }
2061
2062    #[test]
2063    fn test_various_prealloc_sizes() {
2064        fn test_prealloc<const N: usize>() {
2065            let mut vec: SegmentedVec<i32, N> = SegmentedVec::new();
2066            for i in 0..100 {
2067                vec.push(i);
2068            }
2069            for i in 0..100 {
2070                assert_eq!(vec[i], i as i32);
2071            }
2072        }
2073
2074        test_prealloc::<0>();
2075        test_prealloc::<1>();
2076        test_prealloc::<2>();
2077        test_prealloc::<4>();
2078        test_prealloc::<8>();
2079        test_prealloc::<16>();
2080    }
2081
2082    #[test]
2083    fn test_clone() {
2084        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2085        vec.extend(0..10);
2086        let vec2 = vec.clone();
2087        assert_eq!(vec, vec2);
2088    }
2089
2090    #[test]
2091    fn test_debug() {
2092        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2093        vec.extend(0..3);
2094        let debug_str = format!("{:?}", vec);
2095        assert_eq!(debug_str, "[0, 1, 2]");
2096    }
2097
2098    #[test]
2099    fn test_first_last() {
2100        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2101        assert_eq!(vec.first(), None);
2102        assert_eq!(vec.last(), None);
2103
2104        vec.push(1);
2105        assert_eq!(vec.first(), Some(&1));
2106        assert_eq!(vec.last(), Some(&1));
2107
2108        vec.push(2);
2109        vec.push(3);
2110        assert_eq!(vec.first(), Some(&1));
2111        assert_eq!(vec.last(), Some(&3));
2112    }
2113
2114    #[test]
2115    fn test_drop_elements() {
2116        use std::cell::Cell;
2117        use std::rc::Rc;
2118
2119        let drop_count = Rc::new(Cell::new(0));
2120
2121        struct DropCounter {
2122            counter: Rc<Cell<i32>>,
2123        }
2124
2125        impl Drop for DropCounter {
2126            fn drop(&mut self) {
2127                self.counter.set(self.counter.get() + 1);
2128            }
2129        }
2130
2131        {
2132            let mut vec: SegmentedVec<DropCounter, 4> = SegmentedVec::new();
2133            for _ in 0..10 {
2134                vec.push(DropCounter {
2135                    counter: Rc::clone(&drop_count),
2136                });
2137            }
2138        }
2139
2140        assert_eq!(drop_count.get(), 10);
2141    }
2142
2143    #[test]
2144    fn test_shrink_to_fit() {
2145        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2146        vec.extend(0..100);
2147        vec.truncate(5);
2148        vec.shrink_to_fit();
2149        assert_eq!(vec.len(), 5);
2150        for i in 0..5 {
2151            assert_eq!(vec[i], i as i32);
2152        }
2153    }
2154
2155    #[test]
2156    fn test_sort() {
2157        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2158        vec.extend([5, 2, 8, 1, 9, 3, 7, 4, 6, 0]);
2159        vec.sort();
2160        let result: Vec<i32> = vec.iter().copied().collect();
2161        assert_eq!(result, vec![0, 1, 2, 3, 4, 5, 6, 7, 8, 9]);
2162    }
2163
2164    #[test]
2165    fn test_sort_empty() {
2166        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2167        vec.sort();
2168        assert!(vec.is_empty());
2169    }
2170
2171    #[test]
2172    fn test_sort_single() {
2173        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2174        vec.push(42);
2175        vec.sort();
2176        assert_eq!(vec[0], 42);
2177    }
2178
2179    #[test]
2180    fn test_sort_already_sorted() {
2181        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2182        vec.extend(0..10);
2183        vec.sort();
2184        let result: Vec<i32> = vec.iter().copied().collect();
2185        assert_eq!(result, (0..10).collect::<Vec<_>>());
2186    }
2187
2188    #[test]
2189    fn test_sort_reverse_sorted() {
2190        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2191        vec.extend((0..10).rev());
2192        vec.sort();
2193        let result: Vec<i32> = vec.iter().copied().collect();
2194        assert_eq!(result, (0..10).collect::<Vec<_>>());
2195    }
2196
2197    #[test]
2198    fn test_sort_by() {
2199        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2200        vec.extend([5, 2, 8, 1, 9, 3, 7, 4, 6, 0]);
2201        vec.sort_by(|a, b| b.cmp(a)); // reverse order
2202        let result: Vec<i32> = vec.iter().copied().collect();
2203        assert_eq!(result, vec![9, 8, 7, 6, 5, 4, 3, 2, 1, 0]);
2204    }
2205
2206    #[test]
2207    fn test_sort_by_key() {
2208        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2209        vec.extend([-5, 2, -8, 1, -9, 3, -7, 4, -6, 0]);
2210        vec.sort_by_key(|k| k.abs());
2211        let result: Vec<i32> = vec.iter().copied().collect();
2212        assert_eq!(result, vec![0, 1, 2, 3, 4, -5, -6, -7, -8, -9]);
2213    }
2214
2215    #[test]
2216    fn test_sort_stable() {
2217        // Test that sort is stable (equal elements maintain their relative order)
2218        #[derive(Debug, Clone, Eq, PartialEq)]
2219        struct Item {
2220            key: i32,
2221            order: usize,
2222        }
2223
2224        let mut vec: SegmentedVec<Item, 4> = SegmentedVec::new();
2225        vec.push(Item { key: 1, order: 0 });
2226        vec.push(Item { key: 2, order: 1 });
2227        vec.push(Item { key: 1, order: 2 });
2228        vec.push(Item { key: 2, order: 3 });
2229        vec.push(Item { key: 1, order: 4 });
2230
2231        vec.sort_by_key(|item| item.key);
2232
2233        // All items with key=1 should come first, in original order
2234        assert_eq!(vec[0], Item { key: 1, order: 0 });
2235        assert_eq!(vec[1], Item { key: 1, order: 2 });
2236        assert_eq!(vec[2], Item { key: 1, order: 4 });
2237        // Then items with key=2, in original order
2238        assert_eq!(vec[3], Item { key: 2, order: 1 });
2239        assert_eq!(vec[4], Item { key: 2, order: 3 });
2240    }
2241
2242    #[test]
2243    fn test_sort_unstable() {
2244        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2245        vec.extend([5, 2, 8, 1, 9, 3, 7, 4, 6, 0]);
2246        vec.sort_unstable();
2247        let result: Vec<i32> = vec.iter().copied().collect();
2248        assert_eq!(result, vec![0, 1, 2, 3, 4, 5, 6, 7, 8, 9]);
2249    }
2250
2251    #[test]
2252    fn test_sort_unstable_by() {
2253        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2254        vec.extend([5, 2, 8, 1, 9, 3, 7, 4, 6, 0]);
2255        vec.sort_unstable_by(|a, b| b.cmp(a)); // reverse order
2256        let result: Vec<i32> = vec.iter().copied().collect();
2257        assert_eq!(result, vec![9, 8, 7, 6, 5, 4, 3, 2, 1, 0]);
2258    }
2259
2260    #[test]
2261    fn test_sort_unstable_by_key() {
2262        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2263        vec.extend([-5, 2, -8, 1, -9, 3, -7, 4, -6, 0]);
2264        vec.sort_unstable_by_key(|k| k.abs());
2265        // Just verify it's sorted by absolute value (unstable may reorder equal elements)
2266        let result: Vec<i32> = vec.iter().copied().collect();
2267        for i in 1..result.len() {
2268            assert!(result[i - 1].abs() <= result[i].abs());
2269        }
2270    }
2271
2272    #[test]
2273    fn test_sort_large() {
2274        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2275        // Add numbers in reverse order
2276        vec.extend((0..1000).rev());
2277        vec.sort();
2278        for i in 0..1000 {
2279            assert_eq!(vec[i], i as i32);
2280        }
2281    }
2282
2283    #[test]
2284    fn test_sort_unstable_large() {
2285        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2286        // Add numbers in reverse order
2287        vec.extend((0..1000).rev());
2288        vec.sort_unstable();
2289        for i in 0..1000 {
2290            assert_eq!(vec[i], i as i32);
2291        }
2292    }
2293
2294    #[test]
2295    fn test_sort_with_zero_prealloc() {
2296        let mut vec: SegmentedVec<i32, 0> = SegmentedVec::new();
2297        vec.extend([5, 2, 8, 1, 9, 3, 7, 4, 6, 0]);
2298        vec.sort();
2299        let result: Vec<i32> = vec.iter().copied().collect();
2300        assert_eq!(result, vec![0, 1, 2, 3, 4, 5, 6, 7, 8, 9]);
2301    }
2302
2303    #[test]
2304    fn test_sort_pointers_remain_stable_after_sort() {
2305        // Verify that after sorting, pointers to elements are still valid
2306        // (they point to different values, but the memory locations are stable)
2307        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2308        vec.extend([5, 2, 8, 1, 9]);
2309
2310        // Get pointer to first element before sort
2311        let ptr = &vec[0] as *const i32;
2312
2313        vec.sort();
2314
2315        // After sort, the pointer still points to valid memory (now contains sorted value)
2316        assert_eq!(unsafe { *ptr }, 1); // First element after sort is 1
2317    }
2318
2319    // --- Slice tests ---
2320
2321    #[test]
2322    fn test_as_slice() {
2323        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2324        vec.extend(0..10);
2325
2326        let slice = vec.as_slice();
2327        assert_eq!(slice.len(), 10);
2328        assert_eq!(slice[0], 0);
2329        assert_eq!(slice[9], 9);
2330    }
2331
2332    #[test]
2333    fn test_as_mut_slice() {
2334        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2335        vec.extend(0..10);
2336
2337        {
2338            let mut slice = vec.as_mut_slice();
2339            slice[0] = 100;
2340            slice[9] = 200;
2341        }
2342
2343        assert_eq!(vec[0], 100);
2344        assert_eq!(vec[9], 200);
2345    }
2346
2347    #[test]
2348    fn test_slice_range() {
2349        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2350        vec.extend(0..10);
2351
2352        let slice = vec.slice(2..5);
2353        assert_eq!(slice.len(), 3);
2354        assert_eq!(slice[0], 2);
2355        assert_eq!(slice[1], 3);
2356        assert_eq!(slice[2], 4);
2357    }
2358
2359    #[test]
2360    fn test_slice_mut_range() {
2361        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2362        vec.extend(0..10);
2363
2364        {
2365            let mut slice = vec.slice_mut(2..5);
2366            slice[0] = 100;
2367            slice[2] = 200;
2368        }
2369
2370        assert_eq!(vec[2], 100);
2371        assert_eq!(vec[4], 200);
2372    }
2373
2374    #[test]
2375    fn test_slice_first_last() {
2376        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2377        vec.extend(0..10);
2378
2379        let slice = vec.as_slice();
2380        assert_eq!(slice.first(), Some(&0));
2381        assert_eq!(slice.last(), Some(&9));
2382    }
2383
2384    #[test]
2385    fn test_slice_iter() {
2386        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2387        vec.extend(0..10);
2388
2389        let slice = vec.as_slice();
2390        let collected: Vec<i32> = slice.iter().copied().collect();
2391        assert_eq!(collected, (0..10).collect::<Vec<_>>());
2392    }
2393
2394    #[test]
2395    fn test_slice_iter_rev() {
2396        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2397        vec.extend(0..10);
2398
2399        let slice = vec.as_slice();
2400        let collected: Vec<i32> = slice.iter().rev().copied().collect();
2401        assert_eq!(collected, (0..10).rev().collect::<Vec<_>>());
2402    }
2403
2404    #[test]
2405    fn test_slice_contains() {
2406        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2407        vec.extend(0..10);
2408
2409        let slice = vec.as_slice();
2410        assert!(slice.contains(&5));
2411        assert!(!slice.contains(&100));
2412    }
2413
2414    #[test]
2415    fn test_slice_binary_search() {
2416        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2417        vec.extend(0..100);
2418
2419        let slice = vec.as_slice();
2420        assert_eq!(slice.binary_search(&50), Ok(50));
2421        assert_eq!(slice.binary_search(&0), Ok(0));
2422        assert_eq!(slice.binary_search(&99), Ok(99));
2423        assert_eq!(slice.binary_search(&100), Err(100));
2424    }
2425
2426    #[test]
2427    fn test_slice_split_at() {
2428        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2429        vec.extend(0..10);
2430
2431        let slice = vec.as_slice();
2432        let (left, right) = slice.split_at(5);
2433        assert_eq!(left.len(), 5);
2434        assert_eq!(right.len(), 5);
2435        assert_eq!(left[0], 0);
2436        assert_eq!(right[0], 5);
2437    }
2438
2439    #[test]
2440    fn test_slice_to_vec() {
2441        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2442        vec.extend(0..10);
2443
2444        let slice = vec.as_slice();
2445        let converted = slice.to_vec();
2446        assert_eq!(converted, (0..10).collect::<Vec<_>>());
2447    }
2448
2449    #[test]
2450    fn test_slice_mut_sort() {
2451        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2452        vec.extend([5, 3, 1, 4, 2, 8, 7, 6, 0, 9]);
2453
2454        // Sort only the middle part
2455        {
2456            let mut slice = vec.slice_mut(2..8);
2457            slice.sort();
2458        }
2459
2460        let result: Vec<i32> = vec.iter().copied().collect();
2461        assert_eq!(result, vec![5, 3, 1, 2, 4, 6, 7, 8, 0, 9]);
2462    }
2463
2464    #[test]
2465    fn test_slice_mut_reverse() {
2466        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2467        vec.extend(0..10);
2468
2469        {
2470            let mut slice = vec.slice_mut(2..8);
2471            slice.reverse();
2472        }
2473
2474        let result: Vec<i32> = vec.iter().copied().collect();
2475        assert_eq!(result, vec![0, 1, 7, 6, 5, 4, 3, 2, 8, 9]);
2476    }
2477
2478    #[test]
2479    fn test_slice_mut_fill() {
2480        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2481        vec.extend(0..10);
2482
2483        {
2484            let mut slice = vec.slice_mut(2..5);
2485            slice.fill(99);
2486        }
2487
2488        let result: Vec<i32> = vec.iter().copied().collect();
2489        assert_eq!(result, vec![0, 1, 99, 99, 99, 5, 6, 7, 8, 9]);
2490    }
2491
2492    #[test]
2493    fn test_slice_starts_with() {
2494        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2495        vec.extend(0..10);
2496
2497        let slice = vec.as_slice();
2498        assert!(slice.starts_with(&[0, 1, 2]));
2499        assert!(!slice.starts_with(&[1, 2, 3]));
2500    }
2501
2502    #[test]
2503    fn test_slice_ends_with() {
2504        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2505        vec.extend(0..10);
2506
2507        let slice = vec.as_slice();
2508        assert!(slice.ends_with(&[7, 8, 9]));
2509        assert!(!slice.ends_with(&[6, 7, 8]));
2510    }
2511
2512    #[test]
2513    fn test_slice_eq() {
2514        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2515        vec.extend(0..10);
2516
2517        let slice = vec.as_slice();
2518        assert_eq!(slice, (0..10).collect::<Vec<_>>());
2519    }
2520
2521    #[test]
2522    fn test_min_non_zero_cap() {
2523        // For u8 (1 byte), first segment should be 8 elements
2524        let mut vec_u8: SegmentedVec<u8, 0> = SegmentedVec::new();
2525        vec_u8.push(1);
2526        assert_eq!(vec_u8.capacity(), 8);
2527
2528        // For i32 (4 bytes, <= 1024), first segment should be 4 elements
2529        let mut vec_i32: SegmentedVec<i32, 0> = SegmentedVec::new();
2530        vec_i32.push(1);
2531        assert_eq!(vec_i32.capacity(), 4);
2532
2533        // Verify indexing still works correctly with the larger first segment
2534        for i in 0u8..100 {
2535            vec_u8.push(i);
2536        }
2537        for i in 0..101 {
2538            assert_eq!(vec_u8[i], if i == 0 { 1 } else { (i - 1) as u8 });
2539        }
2540    }
2541
2542    #[test]
2543    fn test_extend_from_copy_slice() {
2544        // Test with PREALLOC=0
2545        let mut vec: SegmentedVec<i32, 0> = SegmentedVec::new();
2546        let data: Vec<i32> = (0..100).collect();
2547        vec.extend_from_copy_slice(&data);
2548        assert_eq!(vec.len(), 100);
2549        for i in 0..100 {
2550            assert_eq!(vec[i], i as i32);
2551        }
2552
2553        // Test with PREALLOC=4, starting empty
2554        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2555        vec.extend_from_copy_slice(&data);
2556        assert_eq!(vec.len(), 100);
2557        for i in 0..100 {
2558            assert_eq!(vec[i], i as i32);
2559        }
2560
2561        // Test with PREALLOC=4, starting with some elements
2562        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2563        vec.push(999);
2564        vec.push(998);
2565        vec.extend_from_copy_slice(&data[..10]);
2566        assert_eq!(vec.len(), 12);
2567        assert_eq!(vec[0], 999);
2568        assert_eq!(vec[1], 998);
2569        for i in 0..10 {
2570            assert_eq!(vec[i + 2], i as i32);
2571        }
2572
2573        // Test empty slice
2574        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2575        vec.extend_from_copy_slice(&[]);
2576        assert!(vec.is_empty());
2577
2578        // Test extend to non-boundary, then push (regression test)
2579        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2580        vec.extend_from_copy_slice(&[1, 2, 3]); // 3 elements, not at boundary
2581        assert_eq!(vec.len(), 3);
2582        vec.push(4); // Should use fast path, not crash in push_slow
2583        vec.push(5);
2584        vec.push(6);
2585        assert_eq!(vec.len(), 6);
2586        for i in 0..6 {
2587            assert_eq!(vec[i], (i + 1) as i32);
2588        }
2589
2590        // Test extend to exact capacity boundary, then push
2591        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2592        vec.extend_from_copy_slice(&[1, 2, 3, 4]); // Fills prealloc exactly
2593        assert_eq!(vec.len(), 4);
2594        vec.push(5); // Should correctly allocate new segment
2595        assert_eq!(vec.len(), 5);
2596        assert_eq!(vec[4], 5);
2597    }
2598}