segmented_vec/
lib.rs

1//! A segmented growable vector with stable pointers.
2//!
3//! Unlike `Vec`, pushing new elements never invalidates pointers to existing elements.
4//! This is achieved by storing elements in segments of exponentially growing sizes.
5//!
6//! # Example
7//!
8//! ```
9//! use segmented_vec::SegmentedVec;
10//!
11//! let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
12//! vec.push(1);
13//! vec.push(2);
14//!
15//! // Get a pointer to the first element
16//! let ptr = &vec[0] as *const i32;
17//!
18//! // Push more elements - the pointer remains valid!
19//! for i in 3..100 {
20//!     vec.push(i);
21//! }
22//!
23//! // The pointer is still valid
24//! assert_eq!(unsafe { *ptr }, 1);
25//! ```
26
27mod slice;
28mod sort;
29
30pub use slice::{
31    Chunks, ChunksExact, RChunks, SegmentedSlice, SegmentedSliceMut, SliceIter, SliceIterMut,
32    Windows,
33};
34
35use std::alloc::{self, Layout};
36use std::cmp::Ordering;
37use std::marker::PhantomData;
38use std::mem::MaybeUninit;
39use std::ops::{Index, IndexMut};
40use std::ptr::NonNull;
41
42/// A segmented vector with stable pointers.
43///
44/// `PREALLOC` specifies the number of elements to store inline (must be 0 or a power of 2).
45/// Elements beyond the preallocation are stored in dynamically allocated segments.
46pub struct SegmentedVec<T, const PREALLOC: usize = 0> {
47    prealloc_segment: MaybeUninit<[T; PREALLOC]>,
48    dynamic_segments: Vec<NonNull<T>>,
49    len: usize,
50    /// Cached pointer to the next write position (for fast push)
51    write_ptr: *mut T,
52    /// Pointer to the end of the current segment
53    segment_end: *mut T,
54    /// Pointer to the base of the current segment
55    segment_base: *mut T,
56    _marker: PhantomData<T>,
57}
58
59// Safety: SegmentedVec is Send if T is Send
60unsafe impl<T: Send, const PREALLOC: usize> Send for SegmentedVec<T, PREALLOC> {}
61// Safety: SegmentedVec is Sync if T is Sync
62unsafe impl<T: Sync, const PREALLOC: usize> Sync for SegmentedVec<T, PREALLOC> {}
63
64impl<T, const PREALLOC: usize> SegmentedVec<T, PREALLOC> {
65    const PREALLOC_EXP: u32 = if PREALLOC == 0 {
66        0
67    } else {
68        assert!(
69            PREALLOC.is_power_of_two(),
70            "PREALLOC must be 0 or a power of 2"
71        );
72        PREALLOC.trailing_zeros()
73    };
74
75    /// Minimum capacity for the first dynamic segment when PREALLOC=0.
76    /// Avoids tiny allocations that heap allocators round up anyway.
77    /// - 8 for 1-byte elements (allocators round up small requests)
78    /// - 4 for moderate elements (<= 1 KiB)
79    /// - 1 for large elements (avoid wasting space)
80    const MIN_NON_ZERO_CAP: usize = {
81        let size = std::mem::size_of::<T>();
82        if size == 1 {
83            8
84        } else if size <= 1024 {
85            4
86        } else {
87            1
88        }
89    };
90
91    const MIN_CAP_EXP: u32 = Self::MIN_NON_ZERO_CAP.trailing_zeros();
92
93    const BIAS: usize = if PREALLOC > 0 {
94        PREALLOC
95    } else {
96        Self::MIN_NON_ZERO_CAP
97    };
98
99    /// Offset to subtract from MSB to get shelf index.
100    const SHELF_OFFSET: u32 = if PREALLOC == 0 {
101        Self::MIN_CAP_EXP
102    } else {
103        Self::PREALLOC_EXP + 1
104    };
105
106    /// Creates a new empty `SegmentedVec`.
107    ///
108    /// # Example
109    ///
110    /// ```
111    /// use segmented_vec::SegmentedVec;
112    /// let vec: SegmentedVec<i32, 4> = SegmentedVec::new();
113    /// assert!(vec.is_empty());
114    /// ```
115    #[inline]
116    pub const fn new() -> Self {
117        Self {
118            prealloc_segment: MaybeUninit::uninit(),
119            dynamic_segments: Vec::new(),
120            len: 0,
121            write_ptr: std::ptr::null_mut(),
122            segment_end: std::ptr::null_mut(),
123            segment_base: std::ptr::null_mut(),
124            _marker: PhantomData,
125        }
126    }
127
128    /// Initialize or update the write pointer cache.
129    #[inline]
130    fn init_write_ptr(&mut self) {
131        if PREALLOC > 0 && self.len < PREALLOC {
132            let base = unsafe { (*self.prealloc_segment.as_mut_ptr()).as_mut_ptr() };
133            self.segment_base = base;
134            self.write_ptr = unsafe { base.add(self.len) };
135            self.segment_end = unsafe { base.add(PREALLOC) };
136        } else if !self.dynamic_segments.is_empty() {
137            let (shelf, box_idx) = Self::location(self.len);
138            let shelf_size = Self::shelf_size(shelf as u32);
139            let base = unsafe { self.dynamic_segments.get_unchecked(shelf).as_ptr() };
140            self.segment_base = base;
141            self.write_ptr = unsafe { base.add(box_idx) };
142            self.segment_end = unsafe { base.add(shelf_size) };
143        } else if PREALLOC > 0 {
144            let base = unsafe { (*self.prealloc_segment.as_mut_ptr()).as_mut_ptr() };
145            self.segment_base = base;
146            self.write_ptr = base;
147            self.segment_end = unsafe { base.add(PREALLOC) };
148        } else {
149            self.segment_base = std::ptr::null_mut();
150            self.write_ptr = std::ptr::null_mut();
151            self.segment_end = std::ptr::null_mut();
152        }
153    }
154
155    /// Returns the number of elements in the vector.
156    #[inline]
157    pub const fn len(&self) -> usize {
158        self.len
159    }
160
161    /// Returns `true` if the vector contains no elements.
162    #[inline]
163    pub const fn is_empty(&self) -> bool {
164        self.len == 0
165    }
166
167    /// Returns the current capacity of the vector.
168    #[inline]
169    pub fn capacity(&self) -> usize {
170        Self::compute_capacity(self.dynamic_segments.len() as u32)
171    }
172
173    /// Appends an element to the back of the vector.
174    ///
175    /// # Panics
176    ///
177    /// Panics if allocation fails.
178    ///
179    /// # Example
180    ///
181    /// ```
182    /// use segmented_vec::SegmentedVec;
183    /// let mut vec: SegmentedVec<i32> = SegmentedVec::new();
184    /// vec.push(1);
185    /// vec.push(2);
186    /// assert_eq!(vec.len(), 2);
187    /// ```
188    #[inline]
189    pub fn push(&mut self, value: T) {
190        // Fast path: we have space in the current segment
191        if self.write_ptr < self.segment_end {
192            unsafe {
193                std::ptr::write(self.write_ptr, value);
194                self.write_ptr = self.write_ptr.add(1);
195            }
196            self.len += 1;
197            return;
198        }
199
200        // Slow path: need to grow or move to next segment
201        self.push_slow(value);
202    }
203
204    #[cold]
205    #[inline(never)]
206    fn push_slow(&mut self, value: T) {
207        let new_len = self.len + 1;
208
209        // Special case: first push into prealloc segment
210        if PREALLOC > 0 && self.len == 0 {
211            unsafe {
212                let base = (*self.prealloc_segment.as_mut_ptr()).as_mut_ptr();
213                std::ptr::write(base, value);
214                self.segment_base = base;
215                self.write_ptr = base.add(1);
216                self.segment_end = base.add(PREALLOC);
217            }
218            self.len = 1;
219            return;
220        }
221
222        // We're at a segment boundary (box_index = 0), so biased = len + BIAS is a power of 2
223        // Calculate shelf directly without full location()
224        let biased = self.len + Self::BIAS;
225        let shelf = (biased.trailing_zeros() - Self::SHELF_OFFSET) as usize;
226        let shelf_size = Self::shelf_size(shelf as u32);
227
228        let old_segments_len = self.dynamic_segments.len();
229
230        let base = if shelf >= old_segments_len {
231            self.grow_once();
232            unsafe { self.dynamic_segments.last().unwrap_unchecked().as_ptr() }
233        } else {
234            unsafe { self.dynamic_segments.get_unchecked(shelf).as_ptr() }
235        };
236
237        unsafe {
238            std::ptr::write(base, value);
239            self.segment_base = base;
240            self.write_ptr = base.add(1);
241            self.segment_end = base.add(shelf_size);
242        }
243        self.len = new_len;
244    }
245
246    /// Removes the last element from the vector and returns it, or `None` if empty.
247    ///
248    /// # Example
249    ///
250    /// ```
251    /// use segmented_vec::SegmentedVec;
252    /// let mut vec: SegmentedVec<i32> = SegmentedVec::new();
253    /// vec.push(1);
254    /// vec.push(2);
255    /// assert_eq!(vec.pop(), Some(2));
256    /// assert_eq!(vec.pop(), Some(1));
257    /// assert_eq!(vec.pop(), None);
258    /// ```
259    #[inline]
260    pub fn pop(&mut self) -> Option<T> {
261        // Fast path: current segment still have elements
262        if self.write_ptr > self.segment_base {
263            unsafe {
264                let new_len = self.len - 1;
265                self.len = new_len;
266                // Move back to the last written element
267                self.write_ptr = self.write_ptr.sub(1);
268                // Return the value
269                Some(std::ptr::read(self.write_ptr))
270            }
271        } else {
272            // Slow path: current segment doesn't have any element left, cross the boundary
273            self.pop_slow_path()
274        }
275    }
276
277    #[cold]
278    #[inline(never)]
279    fn pop_slow_path(&mut self) -> Option<T> {
280        if self.len == 0 {
281            return None;
282        }
283
284        unsafe {
285            // Because write_ptr == segment_base, the element we want
286            // is actually at the very first slot of this segment.
287            let val = std::ptr::read(self.write_ptr);
288
289            let new_len = self.len - 1;
290            self.len = new_len;
291            self.init_write_ptr();
292
293            Some(val)
294        }
295    }
296
297    /// Returns a reference to the element at the given index.
298    ///
299    /// Returns `None` if the index is out of bounds.
300    #[inline]
301    pub fn get(&self, index: usize) -> Option<&T> {
302        if index < self.len {
303            Some(unsafe { self.unchecked_at(index) })
304        } else {
305            None
306        }
307    }
308
309    /// Returns a mutable reference to the element at the given index.
310    ///
311    /// Returns `None` if the index is out of bounds.
312    #[inline]
313    pub fn get_mut(&mut self, index: usize) -> Option<&mut T> {
314        if index < self.len {
315            Some(unsafe { self.unchecked_at_mut(index) })
316        } else {
317            None
318        }
319    }
320
321    /// Returns a reference to the first element, or `None` if empty.
322    #[inline]
323    pub fn first(&self) -> Option<&T> {
324        self.get(0)
325    }
326
327    /// Returns a mutable reference to the first element, or `None` if empty.
328    #[inline]
329    pub fn first_mut(&mut self) -> Option<&mut T> {
330        self.get_mut(0)
331    }
332
333    /// Returns a reference to the last element, or `None` if empty.
334    #[inline]
335    pub fn last(&self) -> Option<&T> {
336        if self.len == 0 {
337            None
338        } else {
339            self.get(self.len - 1)
340        }
341    }
342
343    /// Returns a mutable reference to the last element, or `None` if empty.
344    #[inline]
345    pub fn last_mut(&mut self) -> Option<&mut T> {
346        if self.len == 0 {
347            None
348        } else {
349            self.get_mut(self.len - 1)
350        }
351    }
352
353    /// Returns `true` if the slice contains an element with the given value.
354    #[inline]
355    pub fn contains(&self, x: &T) -> bool
356    where
357        T: PartialEq,
358    {
359        self.as_slice().contains(x)
360    }
361
362    /// Returns `true` if `needle` is a prefix of the vector.
363    pub fn starts_with(&self, needle: &[T]) -> bool
364    where
365        T: PartialEq,
366    {
367        self.as_slice().starts_with(needle)
368    }
369
370    /// Returns `true` if `needle` is a suffix of the vector.
371    pub fn ends_with(&self, needle: &[T]) -> bool
372    where
373        T: PartialEq,
374    {
375        self.as_slice().ends_with(needle)
376    }
377
378    /// Binary searches this vector for a given element.
379    ///
380    /// If the value is found, returns `Ok(index)`. If not found, returns
381    /// `Err(index)` where `index` is the position where the element could be inserted.
382    pub fn binary_search(&self, x: &T) -> Result<usize, usize>
383    where
384        T: Ord,
385    {
386        self.as_slice().binary_search(x)
387    }
388
389    /// Binary searches this vector with a comparator function.
390    pub fn binary_search_by<F>(&self, f: F) -> Result<usize, usize>
391    where
392        F: FnMut(&T) -> Ordering,
393    {
394        self.as_slice().binary_search_by(f)
395    }
396
397    /// Binary searches this vector with a key extraction function.
398    pub fn binary_search_by_key<B, F>(&self, b: &B, f: F) -> Result<usize, usize>
399    where
400        F: FnMut(&T) -> B,
401        B: Ord,
402    {
403        self.as_slice().binary_search_by_key(b, f)
404    }
405
406    /// Swaps two elements in the vector.
407    ///
408    /// # Panics
409    ///
410    /// Panics if `a` or `b` are out of bounds.
411    #[inline]
412    pub fn swap(&mut self, a: usize, b: usize) {
413        self.as_mut_slice().swap(a, b)
414    }
415
416    /// Reverses the order of elements in the vector.
417    pub fn reverse(&mut self) {
418        self.as_mut_slice().reverse()
419    }
420
421    /// Fills the vector with the given value.
422    pub fn fill(&mut self, value: T)
423    where
424        T: Clone,
425    {
426        self.as_mut_slice().fill(value)
427    }
428
429    /// Fills the vector with values produced by a function.
430    pub fn fill_with<F>(&mut self, f: F)
431    where
432        F: FnMut() -> T,
433    {
434        self.as_mut_slice().fill_with(f)
435    }
436
437    /// Clears the vector, removing all elements.
438    ///
439    /// This does not deallocate the dynamic segments.
440    pub fn clear(&mut self) {
441        if self.len == 0 {
442            return;
443        }
444
445        if std::mem::needs_drop::<T>() {
446            // Drop elements in prealloc segment
447            if PREALLOC > 0 {
448                let count = self.len.min(PREALLOC);
449                let ptr = unsafe { (*self.prealloc_segment.as_mut_ptr()).as_mut_ptr() };
450                unsafe { std::ptr::drop_in_place(std::ptr::slice_from_raw_parts_mut(ptr, count)) };
451            }
452
453            // Drop elements in dynamic segments
454            if self.len > PREALLOC {
455                let mut remaining = self.len - PREALLOC;
456                for shelf in 0..self.dynamic_segments.len() {
457                    let size = Self::shelf_size(shelf as u32);
458                    let count = remaining.min(size);
459                    let ptr = unsafe { self.dynamic_segments.get_unchecked(shelf).as_ptr() };
460                    unsafe {
461                        std::ptr::drop_in_place(std::ptr::slice_from_raw_parts_mut(ptr, count))
462                    };
463                    remaining -= count;
464                    if remaining == 0 {
465                        break;
466                    }
467                }
468            }
469        }
470
471        self.len = 0;
472        // Reset write pointer cache
473        if PREALLOC > 0 {
474            let base = unsafe { (*self.prealloc_segment.as_mut_ptr()).as_mut_ptr() };
475            self.write_ptr = base;
476            self.segment_end = unsafe { base.add(PREALLOC) };
477        } else {
478            self.write_ptr = std::ptr::null_mut();
479            self.segment_end = std::ptr::null_mut();
480        }
481    }
482
483    /// Shortens the vector, keeping the first `new_len` elements.
484    ///
485    /// If `new_len` is greater than or equal to the current length, this has no effect.
486    pub fn truncate(&mut self, new_len: usize) {
487        if new_len >= self.len {
488            return;
489        }
490
491        if std::mem::needs_drop::<T>() {
492            // Drop elements in prealloc segment (from new_len to min(self.len, PREALLOC))
493            if PREALLOC > 0 && new_len < PREALLOC {
494                let start = new_len;
495                let end = self.len.min(PREALLOC);
496                if start < end {
497                    let ptr = unsafe {
498                        (*self.prealloc_segment.as_mut_ptr())
499                            .as_mut_ptr()
500                            .add(start)
501                    };
502                    unsafe {
503                        std::ptr::drop_in_place(std::ptr::slice_from_raw_parts_mut(
504                            ptr,
505                            end - start,
506                        ))
507                    };
508                }
509            }
510
511            // Drop elements in dynamic segments
512            if self.len > PREALLOC {
513                let dynamic_new_len = new_len.saturating_sub(PREALLOC);
514                let dynamic_old_len = self.len - PREALLOC;
515
516                if dynamic_new_len < dynamic_old_len {
517                    let mut pos = 0usize;
518                    for shelf in 0..self.dynamic_segments.len() {
519                        let size = Self::shelf_size(shelf as u32);
520                        let seg_end = pos + size;
521
522                        // Calculate overlap with [dynamic_new_len, dynamic_old_len)
523                        let drop_start = dynamic_new_len.max(pos);
524                        let drop_end = dynamic_old_len.min(seg_end);
525
526                        if drop_start < drop_end {
527                            let offset = drop_start - pos;
528                            let count = drop_end - drop_start;
529                            let ptr = unsafe {
530                                self.dynamic_segments
531                                    .get_unchecked(shelf)
532                                    .as_ptr()
533                                    .add(offset)
534                            };
535                            unsafe {
536                                std::ptr::drop_in_place(std::ptr::slice_from_raw_parts_mut(
537                                    ptr, count,
538                                ))
539                            };
540                        }
541
542                        pos = seg_end;
543                        if pos >= dynamic_old_len {
544                            break;
545                        }
546                    }
547                }
548            }
549        }
550
551        self.len = new_len;
552        // Update write pointer cache
553        if new_len > 0 {
554            self.init_write_ptr();
555        } else if PREALLOC > 0 {
556            let base = unsafe { (*self.prealloc_segment.as_mut_ptr()).as_mut_ptr() };
557            self.write_ptr = base;
558            self.segment_end = unsafe { base.add(PREALLOC) };
559        } else {
560            self.write_ptr = std::ptr::null_mut();
561            self.segment_end = std::ptr::null_mut();
562        }
563    }
564
565    /// Reserves capacity for at least `additional` more elements.
566    pub fn reserve(&mut self, additional: usize) {
567        let old_capacity = self.capacity();
568        self.grow_capacity(self.len + additional);
569        // Initialize write pointer if we didn't have capacity before
570        if old_capacity == 0 && self.capacity() > 0 && self.segment_end.is_null() {
571            self.init_write_ptr();
572        }
573    }
574
575    /// Shrinks the capacity to match the current length.
576    ///
577    /// This deallocates unused dynamic segments.
578    pub fn shrink_to_fit(&mut self) {
579        self.shrink_capacity(self.len);
580    }
581
582    /// Returns an iterator over references to the elements.
583    #[inline]
584    pub fn iter(&self) -> Iter<'_, T, PREALLOC> {
585        // Initialize with null pointers - first next() call will set up the segment
586        Iter {
587            vec: self,
588            ptr: std::ptr::null(),
589            segment_end: std::ptr::null(),
590            index: 0,
591            shelf_index: 0,
592        }
593    }
594
595    /// Returns an iterator over mutable references to the elements.
596    #[inline]
597    pub fn iter_mut(&mut self) -> IterMut<'_, T, PREALLOC> {
598        // Initialize with null pointers - first next() call will set up the segment
599        IterMut {
600            vec: self,
601            ptr: std::ptr::null_mut(),
602            segment_end: std::ptr::null_mut(),
603            index: 0,
604            shelf_index: 0,
605        }
606    }
607
608    /// Returns an immutable slice view of the entire vector.
609    ///
610    /// # Example
611    ///
612    /// ```
613    /// use segmented_vec::SegmentedVec;
614    ///
615    /// let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
616    /// vec.extend(0..10);
617    ///
618    /// let slice = vec.as_slice();
619    /// assert_eq!(slice.len(), 10);
620    /// assert_eq!(slice[0], 0);
621    /// ```
622    #[inline]
623    pub fn as_slice(&self) -> SegmentedSlice<'_, T, PREALLOC> {
624        SegmentedSlice::new(self)
625    }
626
627    /// Returns a mutable slice view of the entire vector.
628    ///
629    /// # Example
630    ///
631    /// ```
632    /// use segmented_vec::SegmentedVec;
633    ///
634    /// let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
635    /// vec.extend(0..10);
636    ///
637    /// let mut slice = vec.as_mut_slice();
638    /// slice[0] = 100;
639    /// assert_eq!(vec[0], 100);
640    /// ```
641    #[inline]
642    pub fn as_mut_slice(&mut self) -> SegmentedSliceMut<'_, T, PREALLOC> {
643        SegmentedSliceMut::new(self)
644    }
645
646    /// Returns an immutable slice view of a range of the vector.
647    ///
648    /// # Panics
649    ///
650    /// Panics if the range is out of bounds.
651    ///
652    /// # Example
653    ///
654    /// ```
655    /// use segmented_vec::SegmentedVec;
656    ///
657    /// let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
658    /// vec.extend(0..10);
659    ///
660    /// let slice = vec.slice(2..5);
661    /// assert_eq!(slice.len(), 3);
662    /// assert_eq!(slice[0], 2);
663    /// ```
664    #[inline]
665    pub fn slice(&self, range: std::ops::Range<usize>) -> SegmentedSlice<'_, T, PREALLOC> {
666        assert!(range.start <= range.end && range.end <= self.len);
667        SegmentedSlice::from_range(self, range.start, range.end)
668    }
669
670    /// Returns a mutable slice view of a range of the vector.
671    ///
672    /// # Panics
673    ///
674    /// Panics if the range is out of bounds.
675    ///
676    /// # Example
677    ///
678    /// ```
679    /// use segmented_vec::SegmentedVec;
680    ///
681    /// let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
682    /// vec.extend(0..10);
683    ///
684    /// let mut slice = vec.slice_mut(2..5);
685    /// slice[0] = 100;
686    /// assert_eq!(vec[2], 100);
687    /// ```
688    #[inline]
689    pub fn slice_mut(
690        &mut self,
691        range: std::ops::Range<usize>,
692    ) -> SegmentedSliceMut<'_, T, PREALLOC> {
693        assert!(range.start <= range.end && range.end <= self.len);
694        SegmentedSliceMut::from_range(self, range.start, range.end)
695    }
696
697    /// Extends the vector by cloning elements from a slice.
698    pub fn extend_from_slice(&mut self, other: &[T])
699    where
700        T: Clone,
701    {
702        if other.is_empty() {
703            return;
704        }
705        self.reserve(other.len());
706
707        let mut src = other;
708
709        // Fill prealloc segment if there's room
710        if PREALLOC > 0 && self.len < PREALLOC {
711            let prealloc_remaining = PREALLOC - self.len;
712            let to_copy = src.len().min(prealloc_remaining);
713            let dest = unsafe {
714                (*self.prealloc_segment.as_mut_ptr())
715                    .as_mut_ptr()
716                    .add(self.len)
717            };
718            for (i, item) in src.iter().take(to_copy).enumerate() {
719                unsafe { std::ptr::write(dest.add(i), item.clone()) };
720            }
721            self.len += to_copy;
722            src = &src[to_copy..];
723        }
724
725        // Fill dynamic segments
726        while !src.is_empty() {
727            let (shelf, box_idx, remaining) = Self::location_with_capacity(self.len);
728            let to_copy = src.len().min(remaining);
729            let dest = unsafe {
730                self.dynamic_segments
731                    .get_unchecked(shelf)
732                    .as_ptr()
733                    .add(box_idx)
734            };
735            for (i, item) in src.iter().take(to_copy).enumerate() {
736                unsafe { std::ptr::write(dest.add(i), item.clone()) };
737            }
738            self.len += to_copy;
739            src = &src[to_copy..];
740        }
741
742        // Set up write pointer cache for next push
743        if self.len < self.capacity() {
744            self.init_write_ptr();
745        } else {
746            self.write_ptr = std::ptr::null_mut();
747            self.segment_end = std::ptr::null_mut();
748        }
749    }
750
751    /// Extends the vector by copying elements from a slice (for `Copy` types).
752    ///
753    /// This is more efficient than `extend_from_slice` for `Copy` types
754    /// as it uses bulk memory copy operations.
755    pub fn extend_from_copy_slice(&mut self, other: &[T])
756    where
757        T: Copy,
758    {
759        if other.is_empty() {
760            return;
761        }
762        self.reserve(other.len());
763
764        let mut src = other;
765
766        // Fill prealloc segment if there's room
767        if PREALLOC > 0 && self.len < PREALLOC {
768            let prealloc_remaining = PREALLOC - self.len;
769            let to_copy = src.len().min(prealloc_remaining);
770            unsafe {
771                let dest = (*self.prealloc_segment.as_mut_ptr())
772                    .as_mut_ptr()
773                    .add(self.len);
774                std::ptr::copy_nonoverlapping(src.as_ptr(), dest, to_copy);
775            };
776            self.len += to_copy;
777            src = &src[to_copy..];
778        }
779
780        // Fill dynamic segments
781        while !src.is_empty() {
782            let (shelf, box_idx, remaining) = Self::location_with_capacity(self.len);
783            let to_copy = src.len().min(remaining);
784            unsafe {
785                let dest = self
786                    .dynamic_segments
787                    .get_unchecked(shelf)
788                    .as_ptr()
789                    .add(box_idx);
790                std::ptr::copy_nonoverlapping(src.as_ptr(), dest, to_copy);
791            };
792            self.len += to_copy;
793            src = &src[to_copy..];
794        }
795
796        // Set up write pointer cache for next push
797        if self.len < self.capacity() {
798            self.init_write_ptr();
799        } else {
800            self.write_ptr = std::ptr::null_mut();
801            self.segment_end = std::ptr::null_mut();
802        }
803    }
804
805    /// Sorts the vector in place using a stable sorting algorithm.
806    ///
807    /// This sort is stable (i.e., does not reorder equal elements) and O(n * log(n)) worst-case.
808    ///
809    /// The algorithm is a merge sort adapted from the Rust standard library.
810    ///
811    /// # Examples
812    ///
813    /// ```
814    /// use segmented_vec::SegmentedVec;
815    ///
816    /// let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
817    /// vec.extend([3, 1, 4, 1, 5, 9, 2, 6]);
818    /// vec.sort();
819    /// assert_eq!(vec.iter().copied().collect::<Vec<_>>(), vec![1, 1, 2, 3, 4, 5, 6, 9]);
820    /// ```
821    #[inline]
822    pub fn sort(&mut self)
823    where
824        T: Ord,
825    {
826        self.as_mut_slice().sort();
827    }
828
829    /// Sorts the vector in place with a comparator function.
830    ///
831    /// This sort is stable (i.e., does not reorder equal elements) and O(n * log(n)) worst-case.
832    ///
833    /// The comparator function must define a total ordering for the elements.
834    ///
835    /// # Examples
836    ///
837    /// ```
838    /// use segmented_vec::SegmentedVec;
839    ///
840    /// let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
841    /// vec.extend([3, 1, 4, 1, 5, 9, 2, 6]);
842    /// vec.sort_by(|a, b| b.cmp(a)); // reverse order
843    /// assert_eq!(vec.iter().copied().collect::<Vec<_>>(), vec![9, 6, 5, 4, 3, 2, 1, 1]);
844    /// ```
845    pub fn sort_by<F>(&mut self, compare: F)
846    where
847        F: FnMut(&T, &T) -> Ordering,
848    {
849        self.as_mut_slice().sort_by(compare);
850    }
851
852    /// Sorts the vector in place with a key extraction function.
853    ///
854    /// This sort is stable (i.e., does not reorder equal elements) and O(m * n * log(n)) worst-case,
855    /// where the key function is O(m).
856    ///
857    /// # Examples
858    ///
859    /// ```
860    /// use segmented_vec::SegmentedVec;
861    ///
862    /// let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
863    /// vec.extend([-3, 1, -4, 1, 5, -9, 2, 6]);
864    /// vec.sort_by_key(|k| k.abs());
865    /// assert_eq!(vec.iter().copied().collect::<Vec<_>>(), vec![1, 1, 2, -3, -4, 5, 6, -9]);
866    /// ```
867    #[inline]
868    pub fn sort_by_key<K, F>(&mut self, f: F)
869    where
870        F: FnMut(&T) -> K,
871        K: Ord,
872    {
873        self.as_mut_slice().sort_by_key(f);
874    }
875
876    /// Sorts the vector in place using an unstable sorting algorithm.
877    ///
878    /// This sort is unstable (i.e., may reorder equal elements), in-place, and O(n * log(n)) worst-case.
879    ///
880    /// The algorithm is a quicksort with heapsort fallback, adapted from the Rust standard library.
881    ///
882    /// # Examples
883    ///
884    /// ```
885    /// use segmented_vec::SegmentedVec;
886    ///
887    /// let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
888    /// vec.extend([3, 1, 4, 1, 5, 9, 2, 6]);
889    /// vec.sort_unstable();
890    /// assert_eq!(vec.iter().copied().collect::<Vec<_>>(), vec![1, 1, 2, 3, 4, 5, 6, 9]);
891    /// ```
892    #[inline]
893    pub fn sort_unstable(&mut self)
894    where
895        T: Ord,
896    {
897        self.as_mut_slice().sort_unstable();
898    }
899
900    /// Sorts the vector in place with a comparator function using an unstable sorting algorithm.
901    ///
902    /// This sort is unstable (i.e., may reorder equal elements), in-place, and O(n * log(n)) worst-case.
903    ///
904    /// The comparator function must define a total ordering for the elements.
905    ///
906    /// # Examples
907    ///
908    /// ```
909    /// use segmented_vec::SegmentedVec;
910    ///
911    /// let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
912    /// vec.extend([3, 1, 4, 1, 5, 9, 2, 6]);
913    /// vec.sort_unstable_by(|a, b| b.cmp(a)); // reverse order
914    /// assert_eq!(vec.iter().copied().collect::<Vec<_>>(), vec![9, 6, 5, 4, 3, 2, 1, 1]);
915    /// ```
916    pub fn sort_unstable_by<F>(&mut self, compare: F)
917    where
918        F: FnMut(&T, &T) -> Ordering,
919    {
920        self.as_mut_slice().sort_unstable_by(compare);
921    }
922
923    /// Sorts the vector in place with a key extraction function using an unstable sorting algorithm.
924    ///
925    /// This sort is unstable (i.e., may reorder equal elements), in-place, and O(n * log(n)) worst-case,
926    /// where the key function is O(m).
927    ///
928    /// # Examples
929    ///
930    /// ```
931    /// use segmented_vec::SegmentedVec;
932    ///
933    /// let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
934    /// vec.extend([-3, 1, -4, 1, 5, -9, 2, 6]);
935    /// vec.sort_unstable_by_key(|k| k.abs());
936    /// // Note: unstable sort may reorder equal elements, so we just check it's sorted by abs value
937    /// let result: Vec<i32> = vec.iter().copied().collect();
938    /// for i in 1..result.len() {
939    ///     assert!(result[i-1].abs() <= result[i].abs());
940    /// }
941    /// ```
942    #[inline]
943    pub fn sort_unstable_by_key<K, F>(&mut self, f: F)
944    where
945        F: FnMut(&T) -> K,
946        K: Ord,
947    {
948        self.as_mut_slice().sort_unstable_by_key(f);
949    }
950
951    /// Checks if the elements of this vector are sorted.
952    pub fn is_sorted(&self) -> bool
953    where
954        T: PartialOrd,
955    {
956        self.as_slice().is_sorted()
957    }
958
959    /// Checks if the elements of this vector are sorted using the given comparator function.
960    pub fn is_sorted_by<F>(&self, compare: F) -> bool
961    where
962        F: FnMut(&T, &T) -> bool,
963    {
964        self.as_slice().is_sorted_by(compare)
965    }
966
967    /// Checks if the elements of this vector are sorted using the given key extraction function.
968    pub fn is_sorted_by_key<K, F>(&self, f: F) -> bool
969    where
970        F: FnMut(&T) -> K,
971        K: PartialOrd,
972    {
973        self.as_slice().is_sorted_by_key(f)
974    }
975
976    /// Returns the index of the partition point according to the given predicate.
977    pub fn partition_point<P>(&self, pred: P) -> usize
978    where
979        P: FnMut(&T) -> bool,
980    {
981        self.as_slice().partition_point(pred)
982    }
983
984    /// Rotates the vector in-place such that the first `mid` elements move to the end.
985    pub fn rotate_left(&mut self, mid: usize) {
986        self.as_mut_slice().rotate_left(mid);
987    }
988
989    /// Rotates the vector in-place such that the last `k` elements move to the front.
990    pub fn rotate_right(&mut self, k: usize) {
991        self.as_mut_slice().rotate_right(k);
992    }
993
994    /// Creates a new `SegmentedVec` with at least the specified capacity.
995    pub fn with_capacity(capacity: usize) -> Self {
996        let mut vec = Self::new();
997        vec.reserve(capacity);
998        vec
999    }
1000
1001    /// Inserts an element at position `index`, shifting all elements after it to the right.
1002    ///
1003    /// # Panics
1004    ///
1005    /// Panics if `index > len`.
1006    pub fn insert(&mut self, index: usize, element: T) {
1007        assert!(index <= self.len);
1008        self.push(element);
1009        // Rotate the new element into place
1010        if index < self.len - 1 {
1011            for i in (index..self.len - 1).rev() {
1012                unsafe {
1013                    let ptr_a = self.unchecked_at_mut(i) as *mut T;
1014                    let ptr_b = self.unchecked_at_mut(i + 1) as *mut T;
1015                    std::ptr::swap(ptr_a, ptr_b);
1016                }
1017            }
1018        }
1019    }
1020
1021    /// Removes and returns the element at position `index`, shifting all elements after it to the left.
1022    ///
1023    /// # Panics
1024    ///
1025    /// Panics if `index >= len`.
1026    pub fn remove(&mut self, index: usize) -> T {
1027        assert!(index < self.len);
1028        // Shift elements left
1029        for i in index..self.len - 1 {
1030            unsafe {
1031                let ptr_a = self.unchecked_at_mut(i) as *mut T;
1032                let ptr_b = self.unchecked_at_mut(i + 1) as *mut T;
1033                std::ptr::swap(ptr_a, ptr_b);
1034            }
1035        }
1036        self.pop().unwrap()
1037    }
1038
1039    /// Removes an element from the vector and returns it.
1040    ///
1041    /// The removed element is replaced by the last element of the vector.
1042    /// This does not preserve ordering, but is O(1).
1043    ///
1044    /// # Panics
1045    ///
1046    /// Panics if `index >= len`.
1047    pub fn swap_remove(&mut self, index: usize) -> T {
1048        assert!(index < self.len);
1049        let last_index = self.len - 1;
1050        if index != last_index {
1051            unsafe {
1052                let ptr_a = self.unchecked_at_mut(index) as *mut T;
1053                let ptr_b = self.unchecked_at_mut(last_index) as *mut T;
1054                std::ptr::swap(ptr_a, ptr_b);
1055            }
1056        }
1057        self.pop().unwrap()
1058    }
1059
1060    /// Retains only the elements specified by the predicate.
1061    ///
1062    /// Removes all elements for which `f(&element)` returns `false`.
1063    pub fn retain<F>(&mut self, mut f: F)
1064    where
1065        F: FnMut(&T) -> bool,
1066    {
1067        let mut i = 0;
1068        while i < self.len {
1069            if f(unsafe { self.unchecked_at(i) }) {
1070                i += 1;
1071            } else {
1072                self.remove(i);
1073            }
1074        }
1075    }
1076
1077    /// Retains only the elements specified by the predicate, passing a mutable reference.
1078    pub fn retain_mut<F>(&mut self, mut f: F)
1079    where
1080        F: FnMut(&mut T) -> bool,
1081    {
1082        let mut i = 0;
1083        while i < self.len {
1084            if f(unsafe { self.unchecked_at_mut(i) }) {
1085                i += 1;
1086            } else {
1087                self.remove(i);
1088            }
1089        }
1090    }
1091
1092    /// Removes consecutive duplicate elements.
1093    ///
1094    /// If the vector is sorted, this removes all duplicates.
1095    pub fn dedup(&mut self)
1096    where
1097        T: PartialEq,
1098    {
1099        self.dedup_by(|a, b| a == b);
1100    }
1101
1102    /// Removes consecutive elements that satisfy the given equality relation.
1103    pub fn dedup_by<F>(&mut self, mut same_bucket: F)
1104    where
1105        F: FnMut(&mut T, &mut T) -> bool,
1106    {
1107        if self.len <= 1 {
1108            return;
1109        }
1110        let mut write = 1;
1111        for read in 1..self.len {
1112            let should_keep = unsafe {
1113                let prev_ptr = self.unchecked_at_mut(write - 1) as *mut T;
1114                let curr_ptr = self.unchecked_at_mut(read) as *mut T;
1115                !same_bucket(&mut *prev_ptr, &mut *curr_ptr)
1116            };
1117            if should_keep {
1118                if read != write {
1119                    unsafe {
1120                        let ptr_src = self.unchecked_at_mut(read) as *mut T;
1121                        let ptr_dst = self.unchecked_at_mut(write) as *mut T;
1122                        std::ptr::swap(ptr_dst, ptr_src);
1123                    }
1124                }
1125                write += 1;
1126            } else {
1127                // Drop the duplicate
1128                unsafe {
1129                    std::ptr::drop_in_place(self.unchecked_at_mut(read));
1130                }
1131            }
1132        }
1133        self.len = write;
1134    }
1135
1136    /// Removes consecutive elements that map to the same key.
1137    pub fn dedup_by_key<K, F>(&mut self, mut key: F)
1138    where
1139        F: FnMut(&mut T) -> K,
1140        K: PartialEq,
1141    {
1142        self.dedup_by(|a, b| key(a) == key(b));
1143    }
1144
1145    /// Resizes the vector in-place so that `len` is equal to `new_len`.
1146    ///
1147    /// If `new_len` is greater than `len`, the vector is extended by the difference,
1148    /// with each additional slot filled with `value`.
1149    /// If `new_len` is less than `len`, the vector is simply truncated.
1150    pub fn resize(&mut self, new_len: usize, value: T)
1151    where
1152        T: Clone,
1153    {
1154        if new_len > self.len {
1155            self.reserve(new_len - self.len);
1156            while self.len < new_len {
1157                self.push(value.clone());
1158            }
1159        } else {
1160            self.truncate(new_len);
1161        }
1162    }
1163
1164    /// Resizes the vector in-place so that `len` is equal to `new_len`.
1165    ///
1166    /// If `new_len` is greater than `len`, the vector is extended by the difference,
1167    /// with each additional slot filled with the result of calling the closure `f`.
1168    pub fn resize_with<F>(&mut self, new_len: usize, mut f: F)
1169    where
1170        F: FnMut() -> T,
1171    {
1172        if new_len > self.len {
1173            self.reserve(new_len - self.len);
1174            while self.len < new_len {
1175                self.push(f());
1176            }
1177        } else {
1178            self.truncate(new_len);
1179        }
1180    }
1181
1182    /// Moves all the elements of `other` into `self`, leaving `other` empty.
1183    pub fn append(&mut self, other: &mut Self) {
1184        let other_len = other.len;
1185        self.reserve(other_len);
1186        let start = self.len;
1187        while let Some(item) = other.pop() {
1188            self.push(item);
1189        }
1190        // Reverse the appended portion since pop() returns in reverse order
1191        let mut left = start;
1192        let mut right = self.len;
1193        while left < right {
1194            right -= 1;
1195            if left < right {
1196                unsafe {
1197                    let ptr_a = self.unchecked_at_mut(left) as *mut T;
1198                    let ptr_b = self.unchecked_at_mut(right) as *mut T;
1199                    std::ptr::swap(ptr_a, ptr_b);
1200                }
1201                left += 1;
1202            }
1203        }
1204    }
1205
1206    /// Splits the vector into two at the given index.
1207    ///
1208    /// Returns a newly allocated vector containing the elements in the range `[at, len)`.
1209    /// After the call, the original vector will contain elements `[0, at)`.
1210    ///
1211    /// # Panics
1212    ///
1213    /// Panics if `at > len`.
1214    pub fn split_off(&mut self, at: usize) -> Self {
1215        assert!(at <= self.len);
1216        let mut other = Self::new();
1217        other.reserve(self.len - at);
1218        for i in at..self.len {
1219            other.push(unsafe { self.unchecked_read(i) });
1220        }
1221        self.len = at;
1222        other
1223    }
1224
1225    /// Returns an iterator over `chunk_size` elements of the vector at a time.
1226    ///
1227    /// # Panics
1228    ///
1229    /// Panics if `chunk_size` is 0.
1230    pub fn chunks(&self, chunk_size: usize) -> Chunks<'_, T, PREALLOC> {
1231        self.as_slice().chunks(chunk_size)
1232    }
1233
1234    /// Returns an iterator over all contiguous windows of length `size`.
1235    ///
1236    /// # Panics
1237    ///
1238    /// Panics if `size` is 0.
1239    pub fn windows(&self, size: usize) -> Windows<'_, T, PREALLOC> {
1240        self.as_slice().windows(size)
1241    }
1242
1243    /// Returns an iterator over `chunk_size` elements at a time, starting from the end.
1244    ///
1245    /// # Panics
1246    ///
1247    /// Panics if `chunk_size` is 0.
1248    pub fn rchunks(&self, chunk_size: usize) -> RChunks<'_, T, PREALLOC> {
1249        self.as_slice().rchunks(chunk_size)
1250    }
1251
1252    /// Returns an iterator over exactly `chunk_size` elements at a time.
1253    ///
1254    /// # Panics
1255    ///
1256    /// Panics if `chunk_size` is 0.
1257    pub fn chunks_exact(&self, chunk_size: usize) -> ChunksExact<'_, T, PREALLOC> {
1258        self.as_slice().chunks_exact(chunk_size)
1259    }
1260
1261    /// Creates a draining iterator that removes the specified range and yields the removed items.
1262    ///
1263    /// # Panics
1264    ///
1265    /// Panics if the starting point is greater than the end point or if the end point is greater than the length.
1266    pub fn drain(&mut self, range: std::ops::Range<usize>) -> Drain<'_, T, PREALLOC> {
1267        assert!(range.start <= range.end && range.end <= self.len);
1268        Drain {
1269            vec: self,
1270            range_start: range.start,
1271            range_end: range.end,
1272            index: range.start,
1273        }
1274    }
1275
1276    /// Copies the elements to a new `Vec<T>`.
1277    pub fn to_vec(&self) -> Vec<T>
1278    where
1279        T: Clone,
1280    {
1281        self.as_slice().to_vec()
1282    }
1283
1284    // --- Internal helper methods ---
1285
1286    /// Calculate the number of shelves needed for a given capacity.
1287    #[inline]
1288    fn shelf_count(box_count: usize) -> u32 {
1289        if box_count == 0 {
1290            return 0;
1291        }
1292        if PREALLOC == 0 {
1293            let val = box_count + Self::MIN_NON_ZERO_CAP;
1294            val.next_power_of_two().trailing_zeros() - Self::MIN_CAP_EXP
1295        } else {
1296            let val = box_count + PREALLOC;
1297            val.next_power_of_two().trailing_zeros() - Self::PREALLOC_EXP - 1
1298        }
1299    }
1300
1301    /// Calculate the size of a shelf at a given index.
1302    #[inline]
1303    fn shelf_size(shelf_index: u32) -> usize {
1304        if PREALLOC == 0 {
1305            Self::MIN_NON_ZERO_CAP << shelf_index
1306        } else {
1307            1usize << (shelf_index + Self::PREALLOC_EXP + 1)
1308        }
1309    }
1310
1311    /// Calculate which shelf and box index a list index falls into.
1312    /// Returns (shelf_index, box_index).
1313    #[inline]
1314    fn location(list_index: usize) -> (usize, usize) {
1315        let biased = list_index + Self::BIAS;
1316        let msb = biased.ilog2();
1317        let shelf = msb - Self::SHELF_OFFSET;
1318        // Clear the most significant bit to get box_index
1319        let box_idx = biased ^ (1usize << msb);
1320        (shelf as usize, box_idx)
1321    }
1322
1323    /// Calculate shelf, box index, and remaining capacity in one go.
1324    /// Returns (shelf_index, box_index, segment_remaining).
1325    #[inline]
1326    fn location_with_capacity(list_index: usize) -> (usize, usize, usize) {
1327        let biased = list_index + Self::BIAS;
1328        let msb = biased.ilog2();
1329        let shelf = msb - Self::SHELF_OFFSET;
1330        let box_idx = biased ^ (1usize << msb);
1331        // segment_remaining = shelf_size - box_idx = (1 << msb) - box_idx
1332        //                   = (1 << msb) - (biased - (1 << msb))
1333        //                   = (2 << msb) - biased
1334        let segment_remaining = (2usize << msb) - biased;
1335        (shelf as usize, box_idx, segment_remaining)
1336    }
1337
1338    /// Get an unchecked reference to an element.
1339    ///
1340    /// # Safety
1341    ///
1342    /// `index` must be less than `self.len`.
1343    #[inline]
1344    pub(crate) unsafe fn unchecked_at(&self, index: usize) -> &T {
1345        unsafe {
1346            if index < PREALLOC {
1347                &(*self.prealloc_segment.as_ptr())[index]
1348            } else {
1349                let (shelf, box_idx) = Self::location(index);
1350                &*self
1351                    .dynamic_segments
1352                    .get_unchecked(shelf)
1353                    .as_ptr()
1354                    .add(box_idx)
1355            }
1356        }
1357    }
1358
1359    /// Get an unchecked mutable reference to an element.
1360    ///
1361    /// # Safety
1362    ///
1363    /// `index` must be less than `self.len`.
1364    #[inline]
1365    pub(crate) unsafe fn unchecked_at_mut(&mut self, index: usize) -> &mut T {
1366        unsafe {
1367            if index < PREALLOC {
1368                &mut (*self.prealloc_segment.as_mut_ptr())[index]
1369            } else {
1370                let (shelf, box_idx) = Self::location(index);
1371                &mut *self
1372                    .dynamic_segments
1373                    .get_unchecked(shelf)
1374                    .as_ptr()
1375                    .add(box_idx)
1376            }
1377        }
1378    }
1379
1380    /// Read a value from an index.
1381    ///
1382    /// # Safety
1383    ///
1384    /// `index` must be less than `self.len`, and the value must not be read again.
1385    #[inline]
1386    unsafe fn unchecked_read(&self, index: usize) -> T {
1387        unsafe {
1388            if index < PREALLOC {
1389                std::ptr::read(&(*self.prealloc_segment.as_ptr())[index])
1390            } else {
1391                let (shelf, box_idx) = Self::location(index);
1392                std::ptr::read(
1393                    self.dynamic_segments
1394                        .get_unchecked(shelf)
1395                        .as_ptr()
1396                        .add(box_idx),
1397                )
1398            }
1399        }
1400    }
1401
1402    fn grow_once(&mut self) {
1403        self.dynamic_segments.reserve(1);
1404
1405        let size = Self::shelf_size(self.dynamic_segments.len() as u32);
1406        let layout = Layout::array::<T>(size).expect("Layout overflow");
1407        let ptr = if layout.size() == 0 {
1408            NonNull::dangling()
1409        } else {
1410            let ptr = unsafe { alloc::alloc(layout) };
1411            NonNull::new(ptr as *mut T).expect("Allocation failed")
1412        };
1413        self.dynamic_segments.push(ptr);
1414    }
1415
1416    /// Grow capacity to accommodate at least `new_capacity` elements.
1417    fn grow_capacity(&mut self, new_capacity: usize) {
1418        let new_shelf_count = Self::shelf_count(new_capacity);
1419        let old_shelf_count = self.dynamic_segments.len() as u32;
1420
1421        if new_shelf_count > old_shelf_count {
1422            self.dynamic_segments
1423                .reserve((new_shelf_count - old_shelf_count) as usize);
1424
1425            for i in old_shelf_count..new_shelf_count {
1426                let size = Self::shelf_size(i);
1427                let layout = Layout::array::<T>(size).expect("Layout overflow");
1428                let ptr = if layout.size() == 0 {
1429                    NonNull::dangling()
1430                } else {
1431                    let ptr = unsafe { alloc::alloc(layout) };
1432                    NonNull::new(ptr as *mut T).expect("Allocation failed")
1433                };
1434                self.dynamic_segments.push(ptr);
1435            }
1436        }
1437    }
1438
1439    /// Compute total capacity given the number of dynamic shelves.
1440    #[inline]
1441    fn compute_capacity(shelf_count: u32) -> usize {
1442        if shelf_count == 0 {
1443            PREALLOC
1444        } else if PREALLOC == 0 {
1445            (Self::MIN_NON_ZERO_CAP << shelf_count) - Self::MIN_NON_ZERO_CAP
1446        } else {
1447            (1usize << (Self::PREALLOC_EXP + 1 + shelf_count)) - PREALLOC
1448        }
1449    }
1450
1451    /// Shrink capacity to the minimum needed for `new_capacity` elements.
1452    fn shrink_capacity(&mut self, new_capacity: usize) {
1453        if new_capacity <= PREALLOC {
1454            // Free all dynamic segments
1455            self.free_shelves(self.dynamic_segments.len() as u32, 0);
1456            self.dynamic_segments.clear();
1457            self.dynamic_segments.shrink_to_fit();
1458            return;
1459        }
1460
1461        let new_shelf_count = Self::shelf_count(new_capacity);
1462        let old_shelf_count = self.dynamic_segments.len() as u32;
1463
1464        if new_shelf_count < old_shelf_count {
1465            self.free_shelves(old_shelf_count, new_shelf_count);
1466            self.dynamic_segments.truncate(new_shelf_count as usize);
1467            self.dynamic_segments.shrink_to_fit();
1468        }
1469    }
1470
1471    /// Free shelves from `from_count` down to `to_count` (exclusive).
1472    fn free_shelves(&mut self, from_count: u32, to_count: u32) {
1473        for i in (to_count..from_count).rev() {
1474            let size = Self::shelf_size(i);
1475            let layout = Layout::array::<T>(size).expect("Layout overflow");
1476            if layout.size() > 0 {
1477                unsafe {
1478                    alloc::dealloc(
1479                        self.dynamic_segments[i as usize].as_ptr() as *mut u8,
1480                        layout,
1481                    );
1482                }
1483            }
1484        }
1485    }
1486}
1487
1488impl<T, const PREALLOC: usize> Drop for SegmentedVec<T, PREALLOC> {
1489    fn drop(&mut self) {
1490        // Drop all elements
1491        self.clear();
1492        // Free all dynamic segments
1493        self.free_shelves(self.dynamic_segments.len() as u32, 0);
1494    }
1495}
1496
1497impl<T, const PREALLOC: usize> sort::IndexedAccess<T> for SegmentedVec<T, PREALLOC> {
1498    #[inline]
1499    fn get_ref(&self, index: usize) -> &T {
1500        debug_assert!(index < self.len);
1501        unsafe { self.unchecked_at(index) }
1502    }
1503
1504    #[inline]
1505    fn get_ptr(&self, index: usize) -> *const T {
1506        debug_assert!(index < self.len);
1507        if index < PREALLOC {
1508            unsafe { (*self.prealloc_segment.as_ptr()).as_ptr().add(index) }
1509        } else {
1510            let (shelf, box_idx) = Self::location(index);
1511            unsafe {
1512                self.dynamic_segments
1513                    .get_unchecked(shelf)
1514                    .as_ptr()
1515                    .add(box_idx)
1516            }
1517        }
1518    }
1519
1520    #[inline]
1521    fn get_ptr_mut(&mut self, index: usize) -> *mut T {
1522        debug_assert!(index < self.len);
1523        if index < PREALLOC {
1524            unsafe {
1525                (*self.prealloc_segment.as_mut_ptr())
1526                    .as_mut_ptr()
1527                    .add(index)
1528            }
1529        } else {
1530            let (shelf, box_idx) = Self::location(index);
1531            unsafe {
1532                self.dynamic_segments
1533                    .get_unchecked(shelf)
1534                    .as_ptr()
1535                    .add(box_idx)
1536            }
1537        }
1538    }
1539
1540    #[inline]
1541    fn swap(&mut self, a: usize, b: usize) {
1542        if a == b {
1543            return;
1544        }
1545        debug_assert!(a < self.len && b < self.len);
1546        unsafe {
1547            let ptr_a = self.get_ptr_mut(a);
1548            let ptr_b = self.get_ptr_mut(b);
1549            std::ptr::swap(ptr_a, ptr_b);
1550        }
1551    }
1552}
1553
1554impl<T, const PREALLOC: usize> Default for SegmentedVec<T, PREALLOC> {
1555    fn default() -> Self {
1556        Self::new()
1557    }
1558}
1559
1560impl<T, const PREALLOC: usize> Index<usize> for SegmentedVec<T, PREALLOC> {
1561    type Output = T;
1562
1563    fn index(&self, index: usize) -> &Self::Output {
1564        self.get(index).expect("index out of bounds")
1565    }
1566}
1567
1568impl<T, const PREALLOC: usize> IndexMut<usize> for SegmentedVec<T, PREALLOC> {
1569    fn index_mut(&mut self, index: usize) -> &mut Self::Output {
1570        self.get_mut(index).expect("index out of bounds")
1571    }
1572}
1573
1574impl<T: Clone, const PREALLOC: usize> Clone for SegmentedVec<T, PREALLOC> {
1575    fn clone(&self) -> Self {
1576        if self.len == 0 {
1577            return Self::new();
1578        }
1579
1580        let mut new_vec = Self::new();
1581        new_vec.reserve(self.len);
1582
1583        // Clone prealloc segment
1584        if PREALLOC > 0 {
1585            let count = self.len.min(PREALLOC);
1586            let src = unsafe { (*self.prealloc_segment.as_ptr()).as_ptr() };
1587            let dst = unsafe { (*new_vec.prealloc_segment.as_mut_ptr()).as_mut_ptr() };
1588            for i in 0..count {
1589                unsafe { std::ptr::write(dst.add(i), (*src.add(i)).clone()) };
1590            }
1591            new_vec.len = count;
1592        }
1593
1594        // Clone dynamic segments
1595        if self.len > PREALLOC {
1596            let mut remaining = self.len - PREALLOC;
1597            for shelf in 0..self.dynamic_segments.len() {
1598                let size = Self::shelf_size(shelf as u32);
1599                let count = remaining.min(size);
1600                let src = unsafe { self.dynamic_segments.get_unchecked(shelf).as_ptr() };
1601                let dst = unsafe { new_vec.dynamic_segments.get_unchecked(shelf).as_ptr() };
1602                for i in 0..count {
1603                    unsafe { std::ptr::write(dst.add(i), (*src.add(i)).clone()) };
1604                }
1605                new_vec.len += count;
1606                remaining -= count;
1607                if remaining == 0 {
1608                    break;
1609                }
1610            }
1611        }
1612
1613        // Set up write pointer
1614        if new_vec.len < new_vec.capacity() {
1615            new_vec.init_write_ptr();
1616        } else {
1617            new_vec.write_ptr = std::ptr::null_mut();
1618            new_vec.segment_end = std::ptr::null_mut();
1619        }
1620
1621        new_vec
1622    }
1623}
1624
1625impl<T: std::fmt::Debug, const PREALLOC: usize> std::fmt::Debug for SegmentedVec<T, PREALLOC> {
1626    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
1627        f.debug_list().entries(self.iter()).finish()
1628    }
1629}
1630
1631impl<T: PartialEq, const PREALLOC: usize> PartialEq for SegmentedVec<T, PREALLOC> {
1632    fn eq(&self, other: &Self) -> bool {
1633        if self.len != other.len {
1634            return false;
1635        }
1636        for i in 0..self.len {
1637            if unsafe { self.unchecked_at(i) } != unsafe { other.unchecked_at(i) } {
1638                return false;
1639            }
1640        }
1641        true
1642    }
1643}
1644
1645impl<T: Eq, const PREALLOC: usize> Eq for SegmentedVec<T, PREALLOC> {}
1646
1647impl<T, const PREALLOC: usize> FromIterator<T> for SegmentedVec<T, PREALLOC> {
1648    fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> Self {
1649        let iter = iter.into_iter();
1650        let (lower, _) = iter.size_hint();
1651        let mut vec = Self::new();
1652        vec.reserve(lower);
1653        for item in iter {
1654            vec.push(item);
1655        }
1656        vec
1657    }
1658}
1659
1660impl<T, const PREALLOC: usize> Extend<T> for SegmentedVec<T, PREALLOC> {
1661    fn extend<I: IntoIterator<Item = T>>(&mut self, iter: I) {
1662        let iter = iter.into_iter();
1663        let (lower, _) = iter.size_hint();
1664        self.reserve(lower);
1665        for item in iter {
1666            self.push(item);
1667        }
1668    }
1669}
1670
1671// --- Iterator implementations ---
1672
1673/// An iterator over references to elements of a `SegmentedVec`.
1674pub struct Iter<'a, T, const PREALLOC: usize> {
1675    vec: &'a SegmentedVec<T, PREALLOC>,
1676    /// Current pointer within segment
1677    ptr: *const T,
1678    /// End of current segment (min of segment capacity and vec.len)
1679    segment_end: *const T,
1680    /// Current logical index
1681    index: usize,
1682    /// Current shelf index (for dynamic segments)
1683    shelf_index: u32,
1684}
1685
1686impl<'a, T, const PREALLOC: usize> Iterator for Iter<'a, T, PREALLOC> {
1687    type Item = &'a T;
1688
1689    #[inline]
1690    fn next(&mut self) -> Option<Self::Item> {
1691        if self.ptr < self.segment_end {
1692            let result = unsafe { &*self.ptr };
1693            self.ptr = unsafe { self.ptr.add(1) };
1694            self.index += 1;
1695            return Some(result);
1696        }
1697        self.next_segment()
1698    }
1699
1700    #[inline]
1701    fn size_hint(&self) -> (usize, Option<usize>) {
1702        let remaining = self.vec.len.saturating_sub(self.index);
1703        (remaining, Some(remaining))
1704    }
1705}
1706
1707impl<'a, T, const PREALLOC: usize> Iter<'a, T, PREALLOC> {
1708    #[inline]
1709    fn next_segment(&mut self) -> Option<&'a T> {
1710        if self.index >= self.vec.len {
1711            return None;
1712        }
1713
1714        // Move to next segment
1715        if self.index < PREALLOC {
1716            // In prealloc segment
1717            let ptr = unsafe {
1718                (*self.vec.prealloc_segment.as_ptr())
1719                    .as_ptr()
1720                    .add(self.index)
1721            };
1722            let remaining = PREALLOC - self.index;
1723            let segment_len = remaining.min(self.vec.len - self.index);
1724            self.ptr = ptr;
1725            self.segment_end = unsafe { ptr.add(segment_len) };
1726        } else {
1727            // In dynamic segments
1728            let shelf_idx = self.shelf_index as usize;
1729            let shelf_size = SegmentedVec::<T, PREALLOC>::shelf_size(self.shelf_index);
1730            let ptr = self.vec.dynamic_segments[shelf_idx].as_ptr();
1731            let segment_len = shelf_size.min(self.vec.len - self.index);
1732            self.ptr = ptr;
1733            self.segment_end = unsafe { ptr.add(segment_len) };
1734            self.shelf_index += 1;
1735        }
1736
1737        let result = unsafe { &*self.ptr };
1738        self.ptr = unsafe { self.ptr.add(1) };
1739        self.index += 1;
1740        Some(result)
1741    }
1742}
1743
1744impl<'a, T, const PREALLOC: usize> ExactSizeIterator for Iter<'a, T, PREALLOC> {}
1745
1746/// An iterator over mutable references to elements of a `SegmentedVec`.
1747pub struct IterMut<'a, T, const PREALLOC: usize> {
1748    vec: &'a mut SegmentedVec<T, PREALLOC>,
1749    /// Current pointer within segment
1750    ptr: *mut T,
1751    /// End of current segment (min of segment capacity and vec.len)
1752    segment_end: *mut T,
1753    /// Current logical index
1754    index: usize,
1755    /// Current shelf index (for dynamic segments)
1756    shelf_index: u32,
1757}
1758
1759impl<'a, T, const PREALLOC: usize> Iterator for IterMut<'a, T, PREALLOC> {
1760    type Item = &'a mut T;
1761
1762    #[inline]
1763    fn next(&mut self) -> Option<Self::Item> {
1764        if self.ptr < self.segment_end {
1765            let result = self.ptr;
1766            self.ptr = unsafe { self.ptr.add(1) };
1767            self.index += 1;
1768            return Some(unsafe { &mut *result });
1769        }
1770        self.next_segment()
1771    }
1772
1773    #[inline]
1774    fn size_hint(&self) -> (usize, Option<usize>) {
1775        let remaining = self.vec.len.saturating_sub(self.index);
1776        (remaining, Some(remaining))
1777    }
1778}
1779
1780impl<'a, T, const PREALLOC: usize> IterMut<'a, T, PREALLOC> {
1781    #[inline]
1782    fn next_segment(&mut self) -> Option<&'a mut T> {
1783        if self.index >= self.vec.len {
1784            return None;
1785        }
1786
1787        // Move to next segment
1788        if self.index < PREALLOC {
1789            // In prealloc segment
1790            let ptr = unsafe {
1791                (*self.vec.prealloc_segment.as_mut_ptr())
1792                    .as_mut_ptr()
1793                    .add(self.index)
1794            };
1795            let remaining = PREALLOC - self.index;
1796            let segment_len = remaining.min(self.vec.len - self.index);
1797            self.ptr = ptr;
1798            self.segment_end = unsafe { ptr.add(segment_len) };
1799        } else {
1800            // In dynamic segments
1801            let shelf_idx = self.shelf_index as usize;
1802            let shelf_size = SegmentedVec::<T, PREALLOC>::shelf_size(self.shelf_index);
1803            // as_ptr() returns *mut T since NonNull<T> stores *mut T
1804            let ptr = self.vec.dynamic_segments[shelf_idx].as_ptr();
1805            let segment_len = shelf_size.min(self.vec.len - self.index);
1806            self.ptr = ptr;
1807            self.segment_end = unsafe { ptr.add(segment_len) };
1808            self.shelf_index += 1;
1809        }
1810
1811        let result = self.ptr;
1812        self.ptr = unsafe { self.ptr.add(1) };
1813        self.index += 1;
1814        Some(unsafe { &mut *result })
1815    }
1816}
1817
1818impl<'a, T, const PREALLOC: usize> ExactSizeIterator for IterMut<'a, T, PREALLOC> {}
1819
1820impl<T, const PREALLOC: usize> IntoIterator for SegmentedVec<T, PREALLOC> {
1821    type Item = T;
1822    type IntoIter = IntoIter<T, PREALLOC>;
1823
1824    fn into_iter(self) -> Self::IntoIter {
1825        IntoIter {
1826            vec: self,
1827            index: 0,
1828        }
1829    }
1830}
1831
1832impl<'a, T, const PREALLOC: usize> IntoIterator for &'a SegmentedVec<T, PREALLOC> {
1833    type Item = &'a T;
1834    type IntoIter = Iter<'a, T, PREALLOC>;
1835
1836    fn into_iter(self) -> Self::IntoIter {
1837        self.iter()
1838    }
1839}
1840
1841impl<'a, T, const PREALLOC: usize> IntoIterator for &'a mut SegmentedVec<T, PREALLOC> {
1842    type Item = &'a mut T;
1843    type IntoIter = IterMut<'a, T, PREALLOC>;
1844
1845    fn into_iter(self) -> Self::IntoIter {
1846        self.iter_mut()
1847    }
1848}
1849
1850/// An owning iterator over elements of a `SegmentedVec`.
1851pub struct IntoIter<T, const PREALLOC: usize> {
1852    vec: SegmentedVec<T, PREALLOC>,
1853    index: usize,
1854}
1855
1856impl<T, const PREALLOC: usize> Iterator for IntoIter<T, PREALLOC> {
1857    type Item = T;
1858
1859    fn next(&mut self) -> Option<Self::Item> {
1860        if self.index >= self.vec.len {
1861            return None;
1862        }
1863        let value = unsafe { self.vec.unchecked_read(self.index) };
1864        self.index += 1;
1865        Some(value)
1866    }
1867
1868    fn size_hint(&self) -> (usize, Option<usize>) {
1869        let remaining = self.vec.len - self.index;
1870        (remaining, Some(remaining))
1871    }
1872}
1873
1874impl<T, const PREALLOC: usize> ExactSizeIterator for IntoIter<T, PREALLOC> {}
1875
1876impl<T, const PREALLOC: usize> Drop for IntoIter<T, PREALLOC> {
1877    fn drop(&mut self) {
1878        // Drop remaining elements that weren't consumed
1879        for i in self.index..self.vec.len {
1880            unsafe {
1881                std::ptr::drop_in_place(self.vec.unchecked_at_mut(i));
1882            }
1883        }
1884        // Prevent the Vec from dropping elements again
1885        self.vec.len = 0;
1886    }
1887}
1888
1889/// A draining iterator for `SegmentedVec`.
1890///
1891/// This struct is created by the `drain` method on `SegmentedVec`.
1892pub struct Drain<'a, T, const PREALLOC: usize> {
1893    vec: &'a mut SegmentedVec<T, PREALLOC>,
1894    range_start: usize,
1895    range_end: usize,
1896    index: usize,
1897}
1898
1899impl<'a, T, const PREALLOC: usize> Iterator for Drain<'a, T, PREALLOC> {
1900    type Item = T;
1901
1902    fn next(&mut self) -> Option<Self::Item> {
1903        if self.index >= self.range_end {
1904            None
1905        } else {
1906            let value = unsafe { std::ptr::read(self.vec.unchecked_at(self.index)) };
1907            self.index += 1;
1908            Some(value)
1909        }
1910    }
1911
1912    fn size_hint(&self) -> (usize, Option<usize>) {
1913        let remaining = self.range_end - self.index;
1914        (remaining, Some(remaining))
1915    }
1916}
1917
1918impl<'a, T, const PREALLOC: usize> DoubleEndedIterator for Drain<'a, T, PREALLOC> {
1919    fn next_back(&mut self) -> Option<Self::Item> {
1920        if self.index >= self.range_end {
1921            None
1922        } else {
1923            self.range_end -= 1;
1924            Some(unsafe { std::ptr::read(self.vec.unchecked_at(self.range_end)) })
1925        }
1926    }
1927}
1928
1929impl<'a, T, const PREALLOC: usize> ExactSizeIterator for Drain<'a, T, PREALLOC> {}
1930
1931impl<'a, T, const PREALLOC: usize> Drop for Drain<'a, T, PREALLOC> {
1932    fn drop(&mut self) {
1933        // Drop any remaining elements in the range
1934        for i in self.index..self.range_end {
1935            unsafe {
1936                std::ptr::drop_in_place(self.vec.unchecked_at_mut(i));
1937            }
1938        }
1939
1940        // Shift elements after the range to fill the gap
1941        let original_range_end = self.range_end;
1942        let original_len = self.vec.len;
1943        let drain_count = original_range_end - self.range_start;
1944
1945        for i in 0..(original_len - original_range_end) {
1946            unsafe {
1947                let src = self.vec.unchecked_at(original_range_end + i) as *const T;
1948                let dst = self.vec.unchecked_at_mut(self.range_start + i) as *mut T;
1949                std::ptr::copy_nonoverlapping(src, dst, 1);
1950            }
1951        }
1952
1953        self.vec.len = original_len - drain_count;
1954    }
1955}
1956
1957#[cfg(test)]
1958mod tests {
1959    use super::*;
1960
1961    #[test]
1962    fn test_new_empty() {
1963        let vec: SegmentedVec<i32, 4> = SegmentedVec::new();
1964        assert!(vec.is_empty());
1965        assert_eq!(vec.len(), 0);
1966    }
1967
1968    #[test]
1969    fn test_push_pop() {
1970        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
1971        vec.push(1);
1972        vec.push(2);
1973        vec.push(3);
1974        assert_eq!(vec.len(), 3);
1975        assert_eq!(vec.pop(), Some(3));
1976        assert_eq!(vec.pop(), Some(2));
1977        assert_eq!(vec.pop(), Some(1));
1978        assert_eq!(vec.pop(), None);
1979    }
1980
1981    #[test]
1982    fn test_get() {
1983        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
1984        vec.push(10);
1985        vec.push(20);
1986        vec.push(30);
1987        assert_eq!(vec.get(0), Some(&10));
1988        assert_eq!(vec.get(1), Some(&20));
1989        assert_eq!(vec.get(2), Some(&30));
1990        assert_eq!(vec.get(3), None);
1991    }
1992
1993    #[test]
1994    fn test_index() {
1995        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
1996        vec.push(10);
1997        vec.push(20);
1998        assert_eq!(vec[0], 10);
1999        assert_eq!(vec[1], 20);
2000        vec[0] = 100;
2001        assert_eq!(vec[0], 100);
2002    }
2003
2004    #[test]
2005    fn test_stable_pointers() {
2006        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2007        vec.push(1);
2008        let ptr = &vec[0] as *const i32;
2009
2010        // Push many more elements to trigger segment allocations
2011        for i in 2..1000 {
2012            vec.push(i);
2013        }
2014
2015        // The pointer should still be valid
2016        assert_eq!(unsafe { *ptr }, 1);
2017    }
2018
2019    #[test]
2020    fn test_iter() {
2021        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2022        for i in 0..100 {
2023            vec.push(i);
2024        }
2025
2026        let collected: Vec<i32> = vec.iter().copied().collect();
2027        let expected: Vec<i32> = (0..100).collect();
2028        assert_eq!(collected, expected);
2029    }
2030
2031    #[test]
2032    fn test_iter_mut() {
2033        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2034        for i in 0..10 {
2035            vec.push(i);
2036        }
2037
2038        for item in vec.iter_mut() {
2039            *item *= 2;
2040        }
2041
2042        let collected: Vec<i32> = vec.iter().copied().collect();
2043        let expected: Vec<i32> = (0..10).map(|x| x * 2).collect();
2044        assert_eq!(collected, expected);
2045    }
2046
2047    #[test]
2048    fn test_into_iter() {
2049        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2050        for i in 0..10 {
2051            vec.push(i);
2052        }
2053
2054        let collected: Vec<i32> = vec.into_iter().collect();
2055        let expected: Vec<i32> = (0..10).collect();
2056        assert_eq!(collected, expected);
2057    }
2058
2059    #[test]
2060    fn test_from_iter() {
2061        let vec: SegmentedVec<i32, 4> = (0..10).collect();
2062        assert_eq!(vec.len(), 10);
2063        for i in 0..10 {
2064            assert_eq!(vec[i], i as i32);
2065        }
2066    }
2067
2068    #[test]
2069    fn test_extend() {
2070        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2071        vec.extend(0..5);
2072        vec.extend(5..10);
2073        assert_eq!(vec.len(), 10);
2074        for i in 0..10 {
2075            assert_eq!(vec[i], i as i32);
2076        }
2077    }
2078
2079    #[test]
2080    fn test_clear() {
2081        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2082        vec.extend(0..10);
2083        vec.clear();
2084        assert!(vec.is_empty());
2085        assert_eq!(vec.len(), 0);
2086    }
2087
2088    #[test]
2089    fn test_truncate() {
2090        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2091        vec.extend(0..10);
2092        vec.truncate(5);
2093        assert_eq!(vec.len(), 5);
2094        for i in 0..5 {
2095            assert_eq!(vec[i], i as i32);
2096        }
2097    }
2098
2099    #[test]
2100    fn test_zero_prealloc() {
2101        let mut vec: SegmentedVec<i32, 0> = SegmentedVec::new();
2102        for i in 0..100 {
2103            vec.push(i);
2104        }
2105
2106        for i in 0..100 {
2107            assert_eq!(vec[i], i as i32);
2108        }
2109
2110        assert_eq!(vec.pop(), Some(99));
2111        assert_eq!(vec.len(), 99);
2112    }
2113
2114    #[test]
2115    fn test_various_prealloc_sizes() {
2116        fn test_prealloc<const N: usize>() {
2117            let mut vec: SegmentedVec<i32, N> = SegmentedVec::new();
2118            for i in 0..100 {
2119                vec.push(i);
2120            }
2121            for i in 0..100 {
2122                assert_eq!(vec[i], i as i32);
2123            }
2124        }
2125
2126        test_prealloc::<0>();
2127        test_prealloc::<1>();
2128        test_prealloc::<2>();
2129        test_prealloc::<4>();
2130        test_prealloc::<8>();
2131        test_prealloc::<16>();
2132    }
2133
2134    #[test]
2135    fn test_clone() {
2136        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2137        vec.extend(0..10);
2138        let vec2 = vec.clone();
2139        assert_eq!(vec, vec2);
2140    }
2141
2142    #[test]
2143    fn test_debug() {
2144        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2145        vec.extend(0..3);
2146        let debug_str = format!("{:?}", vec);
2147        assert_eq!(debug_str, "[0, 1, 2]");
2148    }
2149
2150    #[test]
2151    fn test_first_last() {
2152        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2153        assert_eq!(vec.first(), None);
2154        assert_eq!(vec.last(), None);
2155
2156        vec.push(1);
2157        assert_eq!(vec.first(), Some(&1));
2158        assert_eq!(vec.last(), Some(&1));
2159
2160        vec.push(2);
2161        vec.push(3);
2162        assert_eq!(vec.first(), Some(&1));
2163        assert_eq!(vec.last(), Some(&3));
2164    }
2165
2166    #[test]
2167    fn test_drop_elements() {
2168        use std::cell::Cell;
2169        use std::rc::Rc;
2170
2171        let drop_count = Rc::new(Cell::new(0));
2172
2173        struct DropCounter {
2174            counter: Rc<Cell<i32>>,
2175        }
2176
2177        impl Drop for DropCounter {
2178            fn drop(&mut self) {
2179                self.counter.set(self.counter.get() + 1);
2180            }
2181        }
2182
2183        {
2184            let mut vec: SegmentedVec<DropCounter, 4> = SegmentedVec::new();
2185            for _ in 0..10 {
2186                vec.push(DropCounter {
2187                    counter: Rc::clone(&drop_count),
2188                });
2189            }
2190        }
2191
2192        assert_eq!(drop_count.get(), 10);
2193    }
2194
2195    #[test]
2196    fn test_shrink_to_fit() {
2197        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2198        vec.extend(0..100);
2199        vec.truncate(5);
2200        vec.shrink_to_fit();
2201        assert_eq!(vec.len(), 5);
2202        for i in 0..5 {
2203            assert_eq!(vec[i], i as i32);
2204        }
2205    }
2206
2207    #[test]
2208    fn test_sort() {
2209        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2210        vec.extend([5, 2, 8, 1, 9, 3, 7, 4, 6, 0]);
2211        vec.sort();
2212        let result: Vec<i32> = vec.iter().copied().collect();
2213        assert_eq!(result, vec![0, 1, 2, 3, 4, 5, 6, 7, 8, 9]);
2214    }
2215
2216    #[test]
2217    fn test_sort_empty() {
2218        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2219        vec.sort();
2220        assert!(vec.is_empty());
2221    }
2222
2223    #[test]
2224    fn test_sort_single() {
2225        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2226        vec.push(42);
2227        vec.sort();
2228        assert_eq!(vec[0], 42);
2229    }
2230
2231    #[test]
2232    fn test_sort_already_sorted() {
2233        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2234        vec.extend(0..10);
2235        vec.sort();
2236        let result: Vec<i32> = vec.iter().copied().collect();
2237        assert_eq!(result, (0..10).collect::<Vec<_>>());
2238    }
2239
2240    #[test]
2241    fn test_sort_reverse_sorted() {
2242        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2243        vec.extend((0..10).rev());
2244        vec.sort();
2245        let result: Vec<i32> = vec.iter().copied().collect();
2246        assert_eq!(result, (0..10).collect::<Vec<_>>());
2247    }
2248
2249    #[test]
2250    fn test_sort_by() {
2251        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2252        vec.extend([5, 2, 8, 1, 9, 3, 7, 4, 6, 0]);
2253        vec.sort_by(|a, b| b.cmp(a)); // reverse order
2254        let result: Vec<i32> = vec.iter().copied().collect();
2255        assert_eq!(result, vec![9, 8, 7, 6, 5, 4, 3, 2, 1, 0]);
2256    }
2257
2258    #[test]
2259    fn test_sort_by_key() {
2260        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2261        vec.extend([-5, 2, -8, 1, -9, 3, -7, 4, -6, 0]);
2262        vec.sort_by_key(|k| k.abs());
2263        let result: Vec<i32> = vec.iter().copied().collect();
2264        assert_eq!(result, vec![0, 1, 2, 3, 4, -5, -6, -7, -8, -9]);
2265    }
2266
2267    #[test]
2268    fn test_sort_stable() {
2269        // Test that sort is stable (equal elements maintain their relative order)
2270        #[derive(Debug, Clone, Eq, PartialEq)]
2271        struct Item {
2272            key: i32,
2273            order: usize,
2274        }
2275
2276        let mut vec: SegmentedVec<Item, 4> = SegmentedVec::new();
2277        vec.push(Item { key: 1, order: 0 });
2278        vec.push(Item { key: 2, order: 1 });
2279        vec.push(Item { key: 1, order: 2 });
2280        vec.push(Item { key: 2, order: 3 });
2281        vec.push(Item { key: 1, order: 4 });
2282
2283        vec.sort_by_key(|item| item.key);
2284
2285        // All items with key=1 should come first, in original order
2286        assert_eq!(vec[0], Item { key: 1, order: 0 });
2287        assert_eq!(vec[1], Item { key: 1, order: 2 });
2288        assert_eq!(vec[2], Item { key: 1, order: 4 });
2289        // Then items with key=2, in original order
2290        assert_eq!(vec[3], Item { key: 2, order: 1 });
2291        assert_eq!(vec[4], Item { key: 2, order: 3 });
2292    }
2293
2294    #[test]
2295    fn test_sort_unstable() {
2296        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2297        vec.extend([5, 2, 8, 1, 9, 3, 7, 4, 6, 0]);
2298        vec.sort_unstable();
2299        let result: Vec<i32> = vec.iter().copied().collect();
2300        assert_eq!(result, vec![0, 1, 2, 3, 4, 5, 6, 7, 8, 9]);
2301    }
2302
2303    #[test]
2304    fn test_sort_unstable_by() {
2305        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2306        vec.extend([5, 2, 8, 1, 9, 3, 7, 4, 6, 0]);
2307        vec.sort_unstable_by(|a, b| b.cmp(a)); // reverse order
2308        let result: Vec<i32> = vec.iter().copied().collect();
2309        assert_eq!(result, vec![9, 8, 7, 6, 5, 4, 3, 2, 1, 0]);
2310    }
2311
2312    #[test]
2313    fn test_sort_unstable_by_key() {
2314        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2315        vec.extend([-5, 2, -8, 1, -9, 3, -7, 4, -6, 0]);
2316        vec.sort_unstable_by_key(|k| k.abs());
2317        // Just verify it's sorted by absolute value (unstable may reorder equal elements)
2318        let result: Vec<i32> = vec.iter().copied().collect();
2319        for i in 1..result.len() {
2320            assert!(result[i - 1].abs() <= result[i].abs());
2321        }
2322    }
2323
2324    #[test]
2325    fn test_sort_large() {
2326        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2327        // Add numbers in reverse order
2328        vec.extend((0..1000).rev());
2329        vec.sort();
2330        for i in 0..1000 {
2331            assert_eq!(vec[i], i as i32);
2332        }
2333    }
2334
2335    #[test]
2336    fn test_sort_unstable_large() {
2337        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2338        // Add numbers in reverse order
2339        vec.extend((0..1000).rev());
2340        vec.sort_unstable();
2341        for i in 0..1000 {
2342            assert_eq!(vec[i], i as i32);
2343        }
2344    }
2345
2346    #[test]
2347    fn test_sort_with_zero_prealloc() {
2348        let mut vec: SegmentedVec<i32, 0> = SegmentedVec::new();
2349        vec.extend([5, 2, 8, 1, 9, 3, 7, 4, 6, 0]);
2350        vec.sort();
2351        let result: Vec<i32> = vec.iter().copied().collect();
2352        assert_eq!(result, vec![0, 1, 2, 3, 4, 5, 6, 7, 8, 9]);
2353    }
2354
2355    #[test]
2356    fn test_sort_pointers_remain_stable_after_sort() {
2357        // Verify that after sorting, pointers to elements are still valid
2358        // (they point to different values, but the memory locations are stable)
2359        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2360        vec.extend([5, 2, 8, 1, 9]);
2361
2362        // Get pointer to first element before sort
2363        let ptr = &vec[0] as *const i32;
2364
2365        vec.sort();
2366
2367        // After sort, the pointer still points to valid memory (now contains sorted value)
2368        assert_eq!(unsafe { *ptr }, 1); // First element after sort is 1
2369    }
2370
2371    // --- Slice tests ---
2372
2373    #[test]
2374    fn test_as_slice() {
2375        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2376        vec.extend(0..10);
2377
2378        let slice = vec.as_slice();
2379        assert_eq!(slice.len(), 10);
2380        assert_eq!(slice[0], 0);
2381        assert_eq!(slice[9], 9);
2382    }
2383
2384    #[test]
2385    fn test_as_mut_slice() {
2386        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2387        vec.extend(0..10);
2388
2389        {
2390            let mut slice = vec.as_mut_slice();
2391            slice[0] = 100;
2392            slice[9] = 200;
2393        }
2394
2395        assert_eq!(vec[0], 100);
2396        assert_eq!(vec[9], 200);
2397    }
2398
2399    #[test]
2400    fn test_slice_range() {
2401        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2402        vec.extend(0..10);
2403
2404        let slice = vec.slice(2..5);
2405        assert_eq!(slice.len(), 3);
2406        assert_eq!(slice[0], 2);
2407        assert_eq!(slice[1], 3);
2408        assert_eq!(slice[2], 4);
2409    }
2410
2411    #[test]
2412    fn test_slice_mut_range() {
2413        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2414        vec.extend(0..10);
2415
2416        {
2417            let mut slice = vec.slice_mut(2..5);
2418            slice[0] = 100;
2419            slice[2] = 200;
2420        }
2421
2422        assert_eq!(vec[2], 100);
2423        assert_eq!(vec[4], 200);
2424    }
2425
2426    #[test]
2427    fn test_slice_first_last() {
2428        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2429        vec.extend(0..10);
2430
2431        let slice = vec.as_slice();
2432        assert_eq!(slice.first(), Some(&0));
2433        assert_eq!(slice.last(), Some(&9));
2434    }
2435
2436    #[test]
2437    fn test_slice_iter() {
2438        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2439        vec.extend(0..10);
2440
2441        let slice = vec.as_slice();
2442        let collected: Vec<i32> = slice.iter().copied().collect();
2443        assert_eq!(collected, (0..10).collect::<Vec<_>>());
2444    }
2445
2446    #[test]
2447    fn test_slice_iter_rev() {
2448        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2449        vec.extend(0..10);
2450
2451        let slice = vec.as_slice();
2452        let collected: Vec<i32> = slice.iter().rev().copied().collect();
2453        assert_eq!(collected, (0..10).rev().collect::<Vec<_>>());
2454    }
2455
2456    #[test]
2457    fn test_slice_contains() {
2458        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2459        vec.extend(0..10);
2460
2461        let slice = vec.as_slice();
2462        assert!(slice.contains(&5));
2463        assert!(!slice.contains(&100));
2464    }
2465
2466    #[test]
2467    fn test_slice_binary_search() {
2468        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2469        vec.extend(0..100);
2470
2471        let slice = vec.as_slice();
2472        assert_eq!(slice.binary_search(&50), Ok(50));
2473        assert_eq!(slice.binary_search(&0), Ok(0));
2474        assert_eq!(slice.binary_search(&99), Ok(99));
2475        assert_eq!(slice.binary_search(&100), Err(100));
2476    }
2477
2478    #[test]
2479    fn test_slice_split_at() {
2480        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2481        vec.extend(0..10);
2482
2483        let slice = vec.as_slice();
2484        let (left, right) = slice.split_at(5);
2485        assert_eq!(left.len(), 5);
2486        assert_eq!(right.len(), 5);
2487        assert_eq!(left[0], 0);
2488        assert_eq!(right[0], 5);
2489    }
2490
2491    #[test]
2492    fn test_slice_to_vec() {
2493        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2494        vec.extend(0..10);
2495
2496        let slice = vec.as_slice();
2497        let converted = slice.to_vec();
2498        assert_eq!(converted, (0..10).collect::<Vec<_>>());
2499    }
2500
2501    #[test]
2502    fn test_slice_mut_sort() {
2503        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2504        vec.extend([5, 3, 1, 4, 2, 8, 7, 6, 0, 9]);
2505
2506        // Sort only the middle part
2507        {
2508            let mut slice = vec.slice_mut(2..8);
2509            slice.sort();
2510        }
2511
2512        let result: Vec<i32> = vec.iter().copied().collect();
2513        assert_eq!(result, vec![5, 3, 1, 2, 4, 6, 7, 8, 0, 9]);
2514    }
2515
2516    #[test]
2517    fn test_slice_mut_reverse() {
2518        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2519        vec.extend(0..10);
2520
2521        {
2522            let mut slice = vec.slice_mut(2..8);
2523            slice.reverse();
2524        }
2525
2526        let result: Vec<i32> = vec.iter().copied().collect();
2527        assert_eq!(result, vec![0, 1, 7, 6, 5, 4, 3, 2, 8, 9]);
2528    }
2529
2530    #[test]
2531    fn test_slice_mut_fill() {
2532        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2533        vec.extend(0..10);
2534
2535        {
2536            let mut slice = vec.slice_mut(2..5);
2537            slice.fill(99);
2538        }
2539
2540        let result: Vec<i32> = vec.iter().copied().collect();
2541        assert_eq!(result, vec![0, 1, 99, 99, 99, 5, 6, 7, 8, 9]);
2542    }
2543
2544    #[test]
2545    fn test_slice_starts_with() {
2546        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2547        vec.extend(0..10);
2548
2549        let slice = vec.as_slice();
2550        assert!(slice.starts_with(&[0, 1, 2]));
2551        assert!(!slice.starts_with(&[1, 2, 3]));
2552    }
2553
2554    #[test]
2555    fn test_slice_ends_with() {
2556        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2557        vec.extend(0..10);
2558
2559        let slice = vec.as_slice();
2560        assert!(slice.ends_with(&[7, 8, 9]));
2561        assert!(!slice.ends_with(&[6, 7, 8]));
2562    }
2563
2564    #[test]
2565    fn test_slice_eq() {
2566        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2567        vec.extend(0..10);
2568
2569        let slice = vec.as_slice();
2570        assert_eq!(slice, (0..10).collect::<Vec<_>>());
2571    }
2572
2573    #[test]
2574    fn test_min_non_zero_cap() {
2575        // For u8 (1 byte), first segment should be 8 elements
2576        let mut vec_u8: SegmentedVec<u8, 0> = SegmentedVec::new();
2577        vec_u8.push(1);
2578        assert_eq!(vec_u8.capacity(), 8);
2579
2580        // For i32 (4 bytes, <= 1024), first segment should be 4 elements
2581        let mut vec_i32: SegmentedVec<i32, 0> = SegmentedVec::new();
2582        vec_i32.push(1);
2583        assert_eq!(vec_i32.capacity(), 4);
2584
2585        // Verify indexing still works correctly with the larger first segment
2586        for i in 0u8..100 {
2587            vec_u8.push(i);
2588        }
2589        for i in 0..101 {
2590            assert_eq!(vec_u8[i], if i == 0 { 1 } else { (i - 1) as u8 });
2591        }
2592    }
2593
2594    #[test]
2595    fn test_extend_from_copy_slice() {
2596        // Test with PREALLOC=0
2597        let mut vec: SegmentedVec<i32, 0> = SegmentedVec::new();
2598        let data: Vec<i32> = (0..100).collect();
2599        vec.extend_from_copy_slice(&data);
2600        assert_eq!(vec.len(), 100);
2601        for i in 0..100 {
2602            assert_eq!(vec[i], i as i32);
2603        }
2604
2605        // Test with PREALLOC=4, starting empty
2606        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2607        vec.extend_from_copy_slice(&data);
2608        assert_eq!(vec.len(), 100);
2609        for i in 0..100 {
2610            assert_eq!(vec[i], i as i32);
2611        }
2612
2613        // Test with PREALLOC=4, starting with some elements
2614        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2615        vec.push(999);
2616        vec.push(998);
2617        vec.extend_from_copy_slice(&data[..10]);
2618        assert_eq!(vec.len(), 12);
2619        assert_eq!(vec[0], 999);
2620        assert_eq!(vec[1], 998);
2621        for i in 0..10 {
2622            assert_eq!(vec[i + 2], i as i32);
2623        }
2624
2625        // Test empty slice
2626        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2627        vec.extend_from_copy_slice(&[]);
2628        assert!(vec.is_empty());
2629
2630        // Test extend to non-boundary, then push (regression test)
2631        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2632        vec.extend_from_copy_slice(&[1, 2, 3]); // 3 elements, not at boundary
2633        assert_eq!(vec.len(), 3);
2634        vec.push(4); // Should use fast path, not crash in push_slow
2635        vec.push(5);
2636        vec.push(6);
2637        assert_eq!(vec.len(), 6);
2638        for i in 0..6 {
2639            assert_eq!(vec[i], (i + 1) as i32);
2640        }
2641
2642        // Test extend to exact capacity boundary, then push
2643        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2644        vec.extend_from_copy_slice(&[1, 2, 3, 4]); // Fills prealloc exactly
2645        assert_eq!(vec.len(), 4);
2646        vec.push(5); // Should correctly allocate new segment
2647        assert_eq!(vec.len(), 5);
2648        assert_eq!(vec[4], 5);
2649    }
2650}