segmented_vec/
lib.rs

1//! A segmented growable vector with stable pointers.
2//!
3//! Unlike `Vec`, pushing new elements never invalidates pointers to existing elements.
4//! This is achieved by storing elements in segments of exponentially growing sizes.
5//!
6//! # Example
7//!
8//! ```
9//! use segmented_vec::SegmentedVec;
10//!
11//! let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
12//! vec.push(1);
13//! vec.push(2);
14//!
15//! // Get a pointer to the first element
16//! let ptr = &vec[0] as *const i32;
17//!
18//! // Push more elements - the pointer remains valid!
19//! for i in 3..100 {
20//!     vec.push(i);
21//! }
22//!
23//! // The pointer is still valid
24//! assert_eq!(unsafe { *ptr }, 1);
25//! ```
26
27mod slice;
28mod sort;
29
30pub use slice::{
31    Chunks, ChunksExact, RChunks, SegmentedSlice, SegmentedSliceMut, SliceIter, SliceIterMut,
32    Windows,
33};
34
35use std::alloc::{self, Layout};
36use std::cmp::Ordering;
37use std::marker::PhantomData;
38use std::mem::MaybeUninit;
39use std::ops::{Index, IndexMut};
40use std::ptr::NonNull;
41
42/// A segmented vector with stable pointers.
43///
44/// `PREALLOC` specifies the number of elements to store inline (must be 0 or a power of 2).
45/// Elements beyond the preallocation are stored in dynamically allocated segments.
46pub struct SegmentedVec<T, const PREALLOC: usize = 0> {
47    prealloc_segment: MaybeUninit<[T; PREALLOC]>,
48    dynamic_segments: Vec<NonNull<T>>,
49    len: usize,
50    /// Cached pointer to the next write position (for fast push)
51    write_ptr: *mut T,
52    /// Number of elements remaining in the current segment
53    segment_remaining: usize,
54    _marker: PhantomData<T>,
55}
56
57// Safety: SegmentedVec is Send if T is Send
58unsafe impl<T: Send, const PREALLOC: usize> Send for SegmentedVec<T, PREALLOC> {}
59// Safety: SegmentedVec is Sync if T is Sync
60unsafe impl<T: Sync, const PREALLOC: usize> Sync for SegmentedVec<T, PREALLOC> {}
61
62impl<T, const PREALLOC: usize> SegmentedVec<T, PREALLOC> {
63    const PREALLOC_EXP: u32 = if PREALLOC == 0 {
64        0
65    } else {
66        assert!(
67            PREALLOC.is_power_of_two(),
68            "PREALLOC must be 0 or a power of 2"
69        );
70        PREALLOC.trailing_zeros()
71    };
72
73    /// Minimum capacity for the first dynamic segment when PREALLOC=0.
74    /// Avoids tiny allocations that heap allocators round up anyway.
75    /// - 8 for 1-byte elements (allocators round up small requests)
76    /// - 4 for moderate elements (<= 1 KiB)
77    /// - 1 for large elements (avoid wasting space)
78    const MIN_NON_ZERO_CAP: usize = {
79        let size = std::mem::size_of::<T>();
80        if size == 1 {
81            8
82        } else if size <= 1024 {
83            4
84        } else {
85            1
86        }
87    };
88
89    const MIN_CAP_EXP: u32 = Self::MIN_NON_ZERO_CAP.trailing_zeros();
90
91    const BIAS: usize = if PREALLOC > 0 {
92        PREALLOC
93    } else {
94        Self::MIN_NON_ZERO_CAP
95    };
96
97    /// Offset to subtract from MSB to get shelf index.
98    const SHELF_OFFSET: u32 = if PREALLOC == 0 {
99        Self::MIN_CAP_EXP
100    } else {
101        Self::PREALLOC_EXP + 1
102    };
103
104    /// Creates a new empty `SegmentedVec`.
105    ///
106    /// # Example
107    ///
108    /// ```
109    /// use segmented_vec::SegmentedVec;
110    /// let vec: SegmentedVec<i32, 4> = SegmentedVec::new();
111    /// assert!(vec.is_empty());
112    /// ```
113    #[inline]
114    pub const fn new() -> Self {
115        Self {
116            prealloc_segment: MaybeUninit::uninit(),
117            dynamic_segments: Vec::new(),
118            len: 0,
119            write_ptr: std::ptr::null_mut(),
120            segment_remaining: 0,
121            _marker: PhantomData,
122        }
123    }
124
125    /// Initialize or update the write pointer cache.
126    #[inline]
127    fn init_write_ptr(&mut self) {
128        if PREALLOC > 0 && self.len < PREALLOC {
129            self.write_ptr = unsafe {
130                (*self.prealloc_segment.as_mut_ptr())
131                    .as_mut_ptr()
132                    .add(self.len)
133            };
134            self.segment_remaining = PREALLOC - self.len;
135        } else if !self.dynamic_segments.is_empty() {
136            let (shelf, box_idx, remaining) = Self::location_with_capacity(self.len);
137            self.write_ptr = unsafe { self.dynamic_segments[shelf].as_ptr().add(box_idx) };
138            self.segment_remaining = remaining;
139        } else if PREALLOC > 0 {
140            self.write_ptr = unsafe { (*self.prealloc_segment.as_mut_ptr()).as_mut_ptr() };
141            self.segment_remaining = PREALLOC;
142        } else {
143            self.write_ptr = std::ptr::null_mut();
144            self.segment_remaining = 0;
145        }
146    }
147
148    /// Returns the number of elements in the vector.
149    #[inline]
150    pub const fn len(&self) -> usize {
151        self.len
152    }
153
154    /// Returns `true` if the vector contains no elements.
155    #[inline]
156    pub const fn is_empty(&self) -> bool {
157        self.len == 0
158    }
159
160    /// Returns the current capacity of the vector.
161    #[inline]
162    pub fn capacity(&self) -> usize {
163        Self::compute_capacity(self.dynamic_segments.len() as u32)
164    }
165
166    /// Appends an element to the back of the vector.
167    ///
168    /// # Panics
169    ///
170    /// Panics if allocation fails.
171    ///
172    /// # Example
173    ///
174    /// ```
175    /// use segmented_vec::SegmentedVec;
176    /// let mut vec: SegmentedVec<i32> = SegmentedVec::new();
177    /// vec.push(1);
178    /// vec.push(2);
179    /// assert_eq!(vec.len(), 2);
180    /// ```
181    #[inline]
182    pub fn push(&mut self, value: T) {
183        // Fast path: we have space in the current segment
184        if self.segment_remaining > 0 {
185            unsafe {
186                std::ptr::write(self.write_ptr, value);
187                self.write_ptr = self.write_ptr.add(1);
188            }
189            self.segment_remaining -= 1;
190            self.len += 1;
191            return;
192        }
193
194        // Slow path: need to grow or move to next segment
195        self.push_slow(value);
196    }
197
198    #[cold]
199    #[inline(never)]
200    fn push_slow(&mut self, value: T) {
201        let new_len = self.len + 1;
202
203        // Special case: first push into prealloc segment
204        if PREALLOC > 0 && self.len == 0 {
205            unsafe {
206                let ptr = (*self.prealloc_segment.as_mut_ptr()).as_mut_ptr();
207                std::ptr::write(ptr, value);
208                self.write_ptr = ptr.add(1);
209            }
210            self.segment_remaining = PREALLOC - 1;
211            self.len = 1;
212            return;
213        }
214
215        // We're at a segment boundary (box_index = 0), so biased = len + BIAS is a power of 2
216        // Calculate shelf directly without full location()
217        let biased = self.len + Self::BIAS;
218        let shelf = (biased.trailing_zeros() - Self::SHELF_OFFSET) as usize;
219        let shelf_size = Self::shelf_size(shelf as u32);
220
221        self.grow_capacity(new_len);
222
223        unsafe {
224            let ptr = self.dynamic_segments.get_unchecked(shelf).as_ptr();
225            std::ptr::write(ptr, value);
226            self.write_ptr = ptr.add(1);
227        }
228        self.segment_remaining = shelf_size - 1;
229        self.len = new_len;
230    }
231
232    /// Removes the last element from the vector and returns it, or `None` if empty.
233    ///
234    /// # Example
235    ///
236    /// ```
237    /// use segmented_vec::SegmentedVec;
238    /// let mut vec: SegmentedVec<i32> = SegmentedVec::new();
239    /// vec.push(1);
240    /// vec.push(2);
241    /// assert_eq!(vec.pop(), Some(2));
242    /// assert_eq!(vec.pop(), Some(1));
243    /// assert_eq!(vec.pop(), None);
244    /// ```
245    #[inline]
246    pub fn pop(&mut self) -> Option<T> {
247        if self.len == 0 {
248            return None;
249        }
250        self.len -= 1;
251        // Update write pointer cache - go back one position
252        if self.segment_remaining < Self::current_segment_size(self.len) {
253            unsafe {
254                self.write_ptr = self.write_ptr.sub(1);
255            }
256            self.segment_remaining += 1;
257            Some(unsafe { std::ptr::read(self.write_ptr) })
258        } else {
259            // We crossed a segment boundary, recalculate
260            self.init_write_ptr();
261            Some(unsafe { self.unchecked_read(self.len) })
262        }
263    }
264
265    /// Get the size of the segment containing the given index.
266    #[inline]
267    fn current_segment_size(index: usize) -> usize {
268        // Calculates the value of the most significant bit of (index + bias).
269        1 << (index + Self::BIAS).ilog2()
270    }
271
272    /// Returns a reference to the element at the given index.
273    ///
274    /// Returns `None` if the index is out of bounds.
275    #[inline]
276    pub fn get(&self, index: usize) -> Option<&T> {
277        if index < self.len {
278            Some(unsafe { self.unchecked_at(index) })
279        } else {
280            None
281        }
282    }
283
284    /// Returns a mutable reference to the element at the given index.
285    ///
286    /// Returns `None` if the index is out of bounds.
287    #[inline]
288    pub fn get_mut(&mut self, index: usize) -> Option<&mut T> {
289        if index < self.len {
290            Some(unsafe { self.unchecked_at_mut(index) })
291        } else {
292            None
293        }
294    }
295
296    /// Returns a reference to the first element, or `None` if empty.
297    #[inline]
298    pub fn first(&self) -> Option<&T> {
299        self.get(0)
300    }
301
302    /// Returns a mutable reference to the first element, or `None` if empty.
303    #[inline]
304    pub fn first_mut(&mut self) -> Option<&mut T> {
305        self.get_mut(0)
306    }
307
308    /// Returns a reference to the last element, or `None` if empty.
309    #[inline]
310    pub fn last(&self) -> Option<&T> {
311        if self.len == 0 {
312            None
313        } else {
314            self.get(self.len - 1)
315        }
316    }
317
318    /// Returns a mutable reference to the last element, or `None` if empty.
319    #[inline]
320    pub fn last_mut(&mut self) -> Option<&mut T> {
321        if self.len == 0 {
322            None
323        } else {
324            self.get_mut(self.len - 1)
325        }
326    }
327
328    /// Returns `true` if the slice contains an element with the given value.
329    #[inline]
330    pub fn contains(&self, x: &T) -> bool
331    where
332        T: PartialEq,
333    {
334        self.as_slice().contains(x)
335    }
336
337    /// Returns `true` if `needle` is a prefix of the vector.
338    pub fn starts_with(&self, needle: &[T]) -> bool
339    where
340        T: PartialEq,
341    {
342        self.as_slice().starts_with(needle)
343    }
344
345    /// Returns `true` if `needle` is a suffix of the vector.
346    pub fn ends_with(&self, needle: &[T]) -> bool
347    where
348        T: PartialEq,
349    {
350        self.as_slice().ends_with(needle)
351    }
352
353    /// Binary searches this vector for a given element.
354    ///
355    /// If the value is found, returns `Ok(index)`. If not found, returns
356    /// `Err(index)` where `index` is the position where the element could be inserted.
357    pub fn binary_search(&self, x: &T) -> Result<usize, usize>
358    where
359        T: Ord,
360    {
361        self.as_slice().binary_search(x)
362    }
363
364    /// Binary searches this vector with a comparator function.
365    pub fn binary_search_by<F>(&self, f: F) -> Result<usize, usize>
366    where
367        F: FnMut(&T) -> Ordering,
368    {
369        self.as_slice().binary_search_by(f)
370    }
371
372    /// Binary searches this vector with a key extraction function.
373    pub fn binary_search_by_key<B, F>(&self, b: &B, f: F) -> Result<usize, usize>
374    where
375        F: FnMut(&T) -> B,
376        B: Ord,
377    {
378        self.as_slice().binary_search_by_key(b, f)
379    }
380
381    /// Swaps two elements in the vector.
382    ///
383    /// # Panics
384    ///
385    /// Panics if `a` or `b` are out of bounds.
386    #[inline]
387    pub fn swap(&mut self, a: usize, b: usize) {
388        self.as_mut_slice().swap(a, b)
389    }
390
391    /// Reverses the order of elements in the vector.
392    pub fn reverse(&mut self) {
393        self.as_mut_slice().reverse()
394    }
395
396    /// Fills the vector with the given value.
397    pub fn fill(&mut self, value: T)
398    where
399        T: Clone,
400    {
401        self.as_mut_slice().fill(value)
402    }
403
404    /// Fills the vector with values produced by a function.
405    pub fn fill_with<F>(&mut self, f: F)
406    where
407        F: FnMut() -> T,
408    {
409        self.as_mut_slice().fill_with(f)
410    }
411
412    /// Clears the vector, removing all elements.
413    ///
414    /// This does not deallocate the dynamic segments.
415    pub fn clear(&mut self) {
416        if self.len == 0 {
417            return;
418        }
419
420        if std::mem::needs_drop::<T>() {
421            // Drop elements in prealloc segment
422            if PREALLOC > 0 {
423                let count = self.len.min(PREALLOC);
424                let ptr = unsafe { (*self.prealloc_segment.as_mut_ptr()).as_mut_ptr() };
425                unsafe { std::ptr::drop_in_place(std::slice::from_raw_parts_mut(ptr, count)) };
426            }
427
428            // Drop elements in dynamic segments
429            if self.len > PREALLOC {
430                let mut remaining = self.len - PREALLOC;
431                for shelf in 0..self.dynamic_segments.len() {
432                    let size = Self::shelf_size(shelf as u32);
433                    let count = remaining.min(size);
434                    let ptr = unsafe { self.dynamic_segments.get_unchecked(shelf).as_ptr() };
435                    unsafe { std::ptr::drop_in_place(std::slice::from_raw_parts_mut(ptr, count)) };
436                    remaining -= count;
437                    if remaining == 0 {
438                        break;
439                    }
440                }
441            }
442        }
443
444        self.len = 0;
445        // Reset write pointer cache
446        if PREALLOC > 0 {
447            self.write_ptr = unsafe { (*self.prealloc_segment.as_mut_ptr()).as_mut_ptr() };
448            self.segment_remaining = PREALLOC;
449        } else {
450            self.write_ptr = std::ptr::null_mut();
451            self.segment_remaining = 0;
452        }
453    }
454
455    /// Shortens the vector, keeping the first `new_len` elements.
456    ///
457    /// If `new_len` is greater than or equal to the current length, this has no effect.
458    pub fn truncate(&mut self, new_len: usize) {
459        if new_len >= self.len {
460            return;
461        }
462
463        if std::mem::needs_drop::<T>() {
464            // Drop elements in prealloc segment (from new_len to min(self.len, PREALLOC))
465            if PREALLOC > 0 && new_len < PREALLOC {
466                let start = new_len;
467                let end = self.len.min(PREALLOC);
468                if start < end {
469                    let ptr = unsafe { (*self.prealloc_segment.as_mut_ptr()).as_mut_ptr().add(start) };
470                    unsafe { std::ptr::drop_in_place(std::slice::from_raw_parts_mut(ptr, end - start)) };
471                }
472            }
473
474            // Drop elements in dynamic segments
475            if self.len > PREALLOC {
476                let dynamic_new_len = new_len.saturating_sub(PREALLOC);
477                let dynamic_old_len = self.len - PREALLOC;
478
479                if dynamic_new_len < dynamic_old_len {
480                    let mut pos = 0usize;
481                    for shelf in 0..self.dynamic_segments.len() {
482                        let size = Self::shelf_size(shelf as u32);
483                        let seg_end = pos + size;
484
485                        // Calculate overlap with [dynamic_new_len, dynamic_old_len)
486                        let drop_start = dynamic_new_len.max(pos);
487                        let drop_end = dynamic_old_len.min(seg_end);
488
489                        if drop_start < drop_end {
490                            let offset = drop_start - pos;
491                            let count = drop_end - drop_start;
492                            let ptr = unsafe { self.dynamic_segments.get_unchecked(shelf).as_ptr().add(offset) };
493                            unsafe { std::ptr::drop_in_place(std::slice::from_raw_parts_mut(ptr, count)) };
494                        }
495
496                        pos = seg_end;
497                        if pos >= dynamic_old_len {
498                            break;
499                        }
500                    }
501                }
502            }
503        }
504
505        self.len = new_len;
506        // Update write pointer cache
507        if new_len > 0 {
508            self.init_write_ptr();
509        } else if PREALLOC > 0 {
510            self.write_ptr = unsafe { (*self.prealloc_segment.as_mut_ptr()).as_mut_ptr() };
511            self.segment_remaining = PREALLOC;
512        } else {
513            self.write_ptr = std::ptr::null_mut();
514            self.segment_remaining = 0;
515        }
516    }
517
518    /// Reserves capacity for at least `additional` more elements.
519    pub fn reserve(&mut self, additional: usize) {
520        let old_capacity = self.capacity();
521        self.grow_capacity(self.len + additional);
522        // Initialize write pointer if we didn't have capacity before
523        if old_capacity == 0 && self.capacity() > 0 && self.segment_remaining == 0 {
524            self.init_write_ptr();
525        }
526    }
527
528    /// Shrinks the capacity to match the current length.
529    ///
530    /// This deallocates unused dynamic segments.
531    pub fn shrink_to_fit(&mut self) {
532        self.shrink_capacity(self.len);
533    }
534
535    /// Returns an iterator over references to the elements.
536    #[inline]
537    pub fn iter(&self) -> Iter<'_, T, PREALLOC> {
538        // Initialize with null pointers - first next() call will set up the segment
539        Iter {
540            vec: self,
541            ptr: std::ptr::null(),
542            segment_end: std::ptr::null(),
543            index: 0,
544            shelf_index: 0,
545        }
546    }
547
548    /// Returns an iterator over mutable references to the elements.
549    #[inline]
550    pub fn iter_mut(&mut self) -> IterMut<'_, T, PREALLOC> {
551        // Initialize with null pointers - first next() call will set up the segment
552        IterMut {
553            vec: self,
554            ptr: std::ptr::null_mut(),
555            segment_end: std::ptr::null_mut(),
556            index: 0,
557            shelf_index: 0,
558        }
559    }
560
561    /// Returns an immutable slice view of the entire vector.
562    ///
563    /// # Example
564    ///
565    /// ```
566    /// use segmented_vec::SegmentedVec;
567    ///
568    /// let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
569    /// vec.extend(0..10);
570    ///
571    /// let slice = vec.as_slice();
572    /// assert_eq!(slice.len(), 10);
573    /// assert_eq!(slice[0], 0);
574    /// ```
575    #[inline]
576    pub fn as_slice(&self) -> SegmentedSlice<'_, T, PREALLOC> {
577        SegmentedSlice::new(self)
578    }
579
580    /// Returns a mutable slice view of the entire vector.
581    ///
582    /// # Example
583    ///
584    /// ```
585    /// use segmented_vec::SegmentedVec;
586    ///
587    /// let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
588    /// vec.extend(0..10);
589    ///
590    /// let mut slice = vec.as_mut_slice();
591    /// slice[0] = 100;
592    /// assert_eq!(vec[0], 100);
593    /// ```
594    #[inline]
595    pub fn as_mut_slice(&mut self) -> SegmentedSliceMut<'_, T, PREALLOC> {
596        SegmentedSliceMut::new(self)
597    }
598
599    /// Returns an immutable slice view of a range of the vector.
600    ///
601    /// # Panics
602    ///
603    /// Panics if the range is out of bounds.
604    ///
605    /// # Example
606    ///
607    /// ```
608    /// use segmented_vec::SegmentedVec;
609    ///
610    /// let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
611    /// vec.extend(0..10);
612    ///
613    /// let slice = vec.slice(2..5);
614    /// assert_eq!(slice.len(), 3);
615    /// assert_eq!(slice[0], 2);
616    /// ```
617    #[inline]
618    pub fn slice(&self, range: std::ops::Range<usize>) -> SegmentedSlice<'_, T, PREALLOC> {
619        assert!(range.start <= range.end && range.end <= self.len);
620        SegmentedSlice::from_range(self, range.start, range.end)
621    }
622
623    /// Returns a mutable slice view of a range of the vector.
624    ///
625    /// # Panics
626    ///
627    /// Panics if the range is out of bounds.
628    ///
629    /// # Example
630    ///
631    /// ```
632    /// use segmented_vec::SegmentedVec;
633    ///
634    /// let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
635    /// vec.extend(0..10);
636    ///
637    /// let mut slice = vec.slice_mut(2..5);
638    /// slice[0] = 100;
639    /// assert_eq!(vec[2], 100);
640    /// ```
641    #[inline]
642    pub fn slice_mut(
643        &mut self,
644        range: std::ops::Range<usize>,
645    ) -> SegmentedSliceMut<'_, T, PREALLOC> {
646        assert!(range.start <= range.end && range.end <= self.len);
647        SegmentedSliceMut::from_range(self, range.start, range.end)
648    }
649
650    /// Extends the vector by cloning elements from a slice.
651    pub fn extend_from_slice(&mut self, other: &[T])
652    where
653        T: Clone,
654    {
655        if other.is_empty() {
656            return;
657        }
658        self.reserve(other.len());
659
660        let mut src = other;
661
662        // Fill prealloc segment if there's room
663        if PREALLOC > 0 && self.len < PREALLOC {
664            let prealloc_remaining = PREALLOC - self.len;
665            let to_copy = src.len().min(prealloc_remaining);
666            let dest = unsafe {
667                (*self.prealloc_segment.as_mut_ptr())
668                    .as_mut_ptr()
669                    .add(self.len)
670            };
671            for i in 0..to_copy {
672                unsafe { std::ptr::write(dest.add(i), src[i].clone()) };
673            }
674            self.len += to_copy;
675            self.write_ptr = unsafe { dest.add(to_copy) };
676            self.segment_remaining = prealloc_remaining - to_copy;
677            src = &src[to_copy..];
678        }
679
680        // Fill dynamic segments
681        while !src.is_empty() {
682            let (shelf, box_idx, remaining) = Self::location_with_capacity(self.len);
683            let to_copy = src.len().min(remaining);
684            let dest = unsafe {
685                self.dynamic_segments
686                    .get_unchecked(shelf)
687                    .as_ptr()
688                    .add(box_idx)
689            };
690            for i in 0..to_copy {
691                unsafe { std::ptr::write(dest.add(i), src[i].clone()) };
692            }
693            self.len += to_copy;
694            self.segment_remaining -= to_copy;
695            src = &src[to_copy..];
696        }
697    }
698
699    /// Extends the vector by copying elements from a slice (for `Copy` types).
700    ///
701    /// This is more efficient than `extend_from_slice` for `Copy` types
702    /// as it uses bulk memory copy operations.
703    pub fn extend_from_copy_slice(&mut self, other: &[T])
704    where
705        T: Copy,
706    {
707        if other.is_empty() {
708            return;
709        }
710        self.reserve(other.len());
711
712        let mut src = other;
713
714        // Fill prealloc segment if there's room
715        if PREALLOC > 0 && self.len < PREALLOC {
716            let prealloc_remaining = PREALLOC - self.len;
717            let to_copy = src.len().min(prealloc_remaining);
718            unsafe {
719                let dest = (*self.prealloc_segment.as_mut_ptr())
720                    .as_mut_ptr()
721                    .add(self.len);
722                std::ptr::copy_nonoverlapping(src.as_ptr(), dest, to_copy);
723                self.write_ptr = dest.add(to_copy);
724            };
725            self.len += to_copy;
726            self.segment_remaining = prealloc_remaining - to_copy;
727            src = &src[to_copy..];
728        }
729
730        // Fill dynamic segments
731        while !src.is_empty() {
732            let (shelf, box_idx, remaining) = Self::location_with_capacity(self.len);
733            let to_copy = src.len().min(remaining);
734            self.segment_remaining = remaining;
735            unsafe {
736                let dest = self
737                    .dynamic_segments
738                    .get_unchecked(shelf)
739                    .as_ptr()
740                    .add(box_idx);
741                std::ptr::copy_nonoverlapping(src.as_ptr(), dest, to_copy);
742                self.write_ptr = self.write_ptr.add(to_copy);
743            };
744
745            self.len += to_copy;
746            self.segment_remaining -= to_copy;
747            src = &src[to_copy..];
748        }
749    }
750
751    /// Sorts the vector in place using a stable sorting algorithm.
752    ///
753    /// This sort is stable (i.e., does not reorder equal elements) and O(n * log(n)) worst-case.
754    ///
755    /// The algorithm is a merge sort adapted from the Rust standard library.
756    ///
757    /// # Examples
758    ///
759    /// ```
760    /// use segmented_vec::SegmentedVec;
761    ///
762    /// let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
763    /// vec.extend([3, 1, 4, 1, 5, 9, 2, 6]);
764    /// vec.sort();
765    /// assert_eq!(vec.iter().copied().collect::<Vec<_>>(), vec![1, 1, 2, 3, 4, 5, 6, 9]);
766    /// ```
767    #[inline]
768    pub fn sort(&mut self)
769    where
770        T: Ord,
771    {
772        self.as_mut_slice().sort();
773    }
774
775    /// Sorts the vector in place with a comparator function.
776    ///
777    /// This sort is stable (i.e., does not reorder equal elements) and O(n * log(n)) worst-case.
778    ///
779    /// The comparator function must define a total ordering for the elements.
780    ///
781    /// # Examples
782    ///
783    /// ```
784    /// use segmented_vec::SegmentedVec;
785    ///
786    /// let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
787    /// vec.extend([3, 1, 4, 1, 5, 9, 2, 6]);
788    /// vec.sort_by(|a, b| b.cmp(a)); // reverse order
789    /// assert_eq!(vec.iter().copied().collect::<Vec<_>>(), vec![9, 6, 5, 4, 3, 2, 1, 1]);
790    /// ```
791    pub fn sort_by<F>(&mut self, compare: F)
792    where
793        F: FnMut(&T, &T) -> Ordering,
794    {
795        self.as_mut_slice().sort_by(compare);
796    }
797
798    /// Sorts the vector in place with a key extraction function.
799    ///
800    /// This sort is stable (i.e., does not reorder equal elements) and O(m * n * log(n)) worst-case,
801    /// where the key function is O(m).
802    ///
803    /// # Examples
804    ///
805    /// ```
806    /// use segmented_vec::SegmentedVec;
807    ///
808    /// let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
809    /// vec.extend([-3, 1, -4, 1, 5, -9, 2, 6]);
810    /// vec.sort_by_key(|k| k.abs());
811    /// assert_eq!(vec.iter().copied().collect::<Vec<_>>(), vec![1, 1, 2, -3, -4, 5, 6, -9]);
812    /// ```
813    #[inline]
814    pub fn sort_by_key<K, F>(&mut self, f: F)
815    where
816        F: FnMut(&T) -> K,
817        K: Ord,
818    {
819        self.as_mut_slice().sort_by_key(f);
820    }
821
822    /// Sorts the vector in place using an unstable sorting algorithm.
823    ///
824    /// This sort is unstable (i.e., may reorder equal elements), in-place, and O(n * log(n)) worst-case.
825    ///
826    /// The algorithm is a quicksort with heapsort fallback, adapted from the Rust standard library.
827    ///
828    /// # Examples
829    ///
830    /// ```
831    /// use segmented_vec::SegmentedVec;
832    ///
833    /// let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
834    /// vec.extend([3, 1, 4, 1, 5, 9, 2, 6]);
835    /// vec.sort_unstable();
836    /// assert_eq!(vec.iter().copied().collect::<Vec<_>>(), vec![1, 1, 2, 3, 4, 5, 6, 9]);
837    /// ```
838    #[inline]
839    pub fn sort_unstable(&mut self)
840    where
841        T: Ord,
842    {
843        self.as_mut_slice().sort_unstable();
844    }
845
846    /// Sorts the vector in place with a comparator function using an unstable sorting algorithm.
847    ///
848    /// This sort is unstable (i.e., may reorder equal elements), in-place, and O(n * log(n)) worst-case.
849    ///
850    /// The comparator function must define a total ordering for the elements.
851    ///
852    /// # Examples
853    ///
854    /// ```
855    /// use segmented_vec::SegmentedVec;
856    ///
857    /// let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
858    /// vec.extend([3, 1, 4, 1, 5, 9, 2, 6]);
859    /// vec.sort_unstable_by(|a, b| b.cmp(a)); // reverse order
860    /// assert_eq!(vec.iter().copied().collect::<Vec<_>>(), vec![9, 6, 5, 4, 3, 2, 1, 1]);
861    /// ```
862    pub fn sort_unstable_by<F>(&mut self, compare: F)
863    where
864        F: FnMut(&T, &T) -> Ordering,
865    {
866        self.as_mut_slice().sort_unstable_by(compare);
867    }
868
869    /// Sorts the vector in place with a key extraction function using an unstable sorting algorithm.
870    ///
871    /// This sort is unstable (i.e., may reorder equal elements), in-place, and O(n * log(n)) worst-case,
872    /// where the key function is O(m).
873    ///
874    /// # Examples
875    ///
876    /// ```
877    /// use segmented_vec::SegmentedVec;
878    ///
879    /// let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
880    /// vec.extend([-3, 1, -4, 1, 5, -9, 2, 6]);
881    /// vec.sort_unstable_by_key(|k| k.abs());
882    /// // Note: unstable sort may reorder equal elements, so we just check it's sorted by abs value
883    /// let result: Vec<i32> = vec.iter().copied().collect();
884    /// for i in 1..result.len() {
885    ///     assert!(result[i-1].abs() <= result[i].abs());
886    /// }
887    /// ```
888    #[inline]
889    pub fn sort_unstable_by_key<K, F>(&mut self, f: F)
890    where
891        F: FnMut(&T) -> K,
892        K: Ord,
893    {
894        self.as_mut_slice().sort_unstable_by_key(f);
895    }
896
897    /// Checks if the elements of this vector are sorted.
898    pub fn is_sorted(&self) -> bool
899    where
900        T: PartialOrd,
901    {
902        self.as_slice().is_sorted()
903    }
904
905    /// Checks if the elements of this vector are sorted using the given comparator function.
906    pub fn is_sorted_by<F>(&self, compare: F) -> bool
907    where
908        F: FnMut(&T, &T) -> bool,
909    {
910        self.as_slice().is_sorted_by(compare)
911    }
912
913    /// Checks if the elements of this vector are sorted using the given key extraction function.
914    pub fn is_sorted_by_key<K, F>(&self, f: F) -> bool
915    where
916        F: FnMut(&T) -> K,
917        K: PartialOrd,
918    {
919        self.as_slice().is_sorted_by_key(f)
920    }
921
922    /// Returns the index of the partition point according to the given predicate.
923    pub fn partition_point<P>(&self, pred: P) -> usize
924    where
925        P: FnMut(&T) -> bool,
926    {
927        self.as_slice().partition_point(pred)
928    }
929
930    /// Rotates the vector in-place such that the first `mid` elements move to the end.
931    pub fn rotate_left(&mut self, mid: usize) {
932        self.as_mut_slice().rotate_left(mid);
933    }
934
935    /// Rotates the vector in-place such that the last `k` elements move to the front.
936    pub fn rotate_right(&mut self, k: usize) {
937        self.as_mut_slice().rotate_right(k);
938    }
939
940    /// Creates a new `SegmentedVec` with at least the specified capacity.
941    pub fn with_capacity(capacity: usize) -> Self {
942        let mut vec = Self::new();
943        vec.reserve(capacity);
944        vec
945    }
946
947    /// Inserts an element at position `index`, shifting all elements after it to the right.
948    ///
949    /// # Panics
950    ///
951    /// Panics if `index > len`.
952    pub fn insert(&mut self, index: usize, element: T) {
953        assert!(index <= self.len);
954        self.push(element);
955        // Rotate the new element into place
956        if index < self.len - 1 {
957            for i in (index..self.len - 1).rev() {
958                unsafe {
959                    let ptr_a = self.unchecked_at_mut(i) as *mut T;
960                    let ptr_b = self.unchecked_at_mut(i + 1) as *mut T;
961                    std::ptr::swap(ptr_a, ptr_b);
962                }
963            }
964        }
965    }
966
967    /// Removes and returns the element at position `index`, shifting all elements after it to the left.
968    ///
969    /// # Panics
970    ///
971    /// Panics if `index >= len`.
972    pub fn remove(&mut self, index: usize) -> T {
973        assert!(index < self.len);
974        // Shift elements left
975        for i in index..self.len - 1 {
976            unsafe {
977                let ptr_a = self.unchecked_at_mut(i) as *mut T;
978                let ptr_b = self.unchecked_at_mut(i + 1) as *mut T;
979                std::ptr::swap(ptr_a, ptr_b);
980            }
981        }
982        self.pop().unwrap()
983    }
984
985    /// Removes an element from the vector and returns it.
986    ///
987    /// The removed element is replaced by the last element of the vector.
988    /// This does not preserve ordering, but is O(1).
989    ///
990    /// # Panics
991    ///
992    /// Panics if `index >= len`.
993    pub fn swap_remove(&mut self, index: usize) -> T {
994        assert!(index < self.len);
995        let last_index = self.len - 1;
996        if index != last_index {
997            unsafe {
998                let ptr_a = self.unchecked_at_mut(index) as *mut T;
999                let ptr_b = self.unchecked_at_mut(last_index) as *mut T;
1000                std::ptr::swap(ptr_a, ptr_b);
1001            }
1002        }
1003        self.pop().unwrap()
1004    }
1005
1006    /// Retains only the elements specified by the predicate.
1007    ///
1008    /// Removes all elements for which `f(&element)` returns `false`.
1009    pub fn retain<F>(&mut self, mut f: F)
1010    where
1011        F: FnMut(&T) -> bool,
1012    {
1013        let mut i = 0;
1014        while i < self.len {
1015            if f(unsafe { self.unchecked_at(i) }) {
1016                i += 1;
1017            } else {
1018                self.remove(i);
1019            }
1020        }
1021    }
1022
1023    /// Retains only the elements specified by the predicate, passing a mutable reference.
1024    pub fn retain_mut<F>(&mut self, mut f: F)
1025    where
1026        F: FnMut(&mut T) -> bool,
1027    {
1028        let mut i = 0;
1029        while i < self.len {
1030            if f(unsafe { self.unchecked_at_mut(i) }) {
1031                i += 1;
1032            } else {
1033                self.remove(i);
1034            }
1035        }
1036    }
1037
1038    /// Removes consecutive duplicate elements.
1039    ///
1040    /// If the vector is sorted, this removes all duplicates.
1041    pub fn dedup(&mut self)
1042    where
1043        T: PartialEq,
1044    {
1045        self.dedup_by(|a, b| a == b);
1046    }
1047
1048    /// Removes consecutive elements that satisfy the given equality relation.
1049    pub fn dedup_by<F>(&mut self, mut same_bucket: F)
1050    where
1051        F: FnMut(&mut T, &mut T) -> bool,
1052    {
1053        if self.len <= 1 {
1054            return;
1055        }
1056        let mut write = 1;
1057        for read in 1..self.len {
1058            let should_keep = unsafe {
1059                let prev_ptr = self.unchecked_at_mut(write - 1) as *mut T;
1060                let curr_ptr = self.unchecked_at_mut(read) as *mut T;
1061                !same_bucket(&mut *prev_ptr, &mut *curr_ptr)
1062            };
1063            if should_keep {
1064                if read != write {
1065                    unsafe {
1066                        let ptr_src = self.unchecked_at_mut(read) as *mut T;
1067                        let ptr_dst = self.unchecked_at_mut(write) as *mut T;
1068                        std::ptr::swap(ptr_dst, ptr_src);
1069                    }
1070                }
1071                write += 1;
1072            } else {
1073                // Drop the duplicate
1074                unsafe {
1075                    std::ptr::drop_in_place(self.unchecked_at_mut(read));
1076                }
1077            }
1078        }
1079        self.len = write;
1080    }
1081
1082    /// Removes consecutive elements that map to the same key.
1083    pub fn dedup_by_key<K, F>(&mut self, mut key: F)
1084    where
1085        F: FnMut(&mut T) -> K,
1086        K: PartialEq,
1087    {
1088        self.dedup_by(|a, b| key(a) == key(b));
1089    }
1090
1091    /// Resizes the vector in-place so that `len` is equal to `new_len`.
1092    ///
1093    /// If `new_len` is greater than `len`, the vector is extended by the difference,
1094    /// with each additional slot filled with `value`.
1095    /// If `new_len` is less than `len`, the vector is simply truncated.
1096    pub fn resize(&mut self, new_len: usize, value: T)
1097    where
1098        T: Clone,
1099    {
1100        if new_len > self.len {
1101            self.reserve(new_len - self.len);
1102            while self.len < new_len {
1103                self.push(value.clone());
1104            }
1105        } else {
1106            self.truncate(new_len);
1107        }
1108    }
1109
1110    /// Resizes the vector in-place so that `len` is equal to `new_len`.
1111    ///
1112    /// If `new_len` is greater than `len`, the vector is extended by the difference,
1113    /// with each additional slot filled with the result of calling the closure `f`.
1114    pub fn resize_with<F>(&mut self, new_len: usize, mut f: F)
1115    where
1116        F: FnMut() -> T,
1117    {
1118        if new_len > self.len {
1119            self.reserve(new_len - self.len);
1120            while self.len < new_len {
1121                self.push(f());
1122            }
1123        } else {
1124            self.truncate(new_len);
1125        }
1126    }
1127
1128    /// Moves all the elements of `other` into `self`, leaving `other` empty.
1129    pub fn append(&mut self, other: &mut Self) {
1130        let other_len = other.len;
1131        self.reserve(other_len);
1132        let start = self.len;
1133        while let Some(item) = other.pop() {
1134            self.push(item);
1135        }
1136        // Reverse the appended portion since pop() returns in reverse order
1137        let mut left = start;
1138        let mut right = self.len;
1139        while left < right {
1140            right -= 1;
1141            if left < right {
1142                unsafe {
1143                    let ptr_a = self.unchecked_at_mut(left) as *mut T;
1144                    let ptr_b = self.unchecked_at_mut(right) as *mut T;
1145                    std::ptr::swap(ptr_a, ptr_b);
1146                }
1147                left += 1;
1148            }
1149        }
1150    }
1151
1152    /// Splits the vector into two at the given index.
1153    ///
1154    /// Returns a newly allocated vector containing the elements in the range `[at, len)`.
1155    /// After the call, the original vector will contain elements `[0, at)`.
1156    ///
1157    /// # Panics
1158    ///
1159    /// Panics if `at > len`.
1160    pub fn split_off(&mut self, at: usize) -> Self {
1161        assert!(at <= self.len);
1162        let mut other = Self::new();
1163        other.reserve(self.len - at);
1164        for i in at..self.len {
1165            other.push(unsafe { self.unchecked_read(i) });
1166        }
1167        self.len = at;
1168        other
1169    }
1170
1171    /// Returns an iterator over `chunk_size` elements of the vector at a time.
1172    ///
1173    /// # Panics
1174    ///
1175    /// Panics if `chunk_size` is 0.
1176    pub fn chunks(&self, chunk_size: usize) -> Chunks<'_, T, PREALLOC> {
1177        self.as_slice().chunks(chunk_size)
1178    }
1179
1180    /// Returns an iterator over all contiguous windows of length `size`.
1181    ///
1182    /// # Panics
1183    ///
1184    /// Panics if `size` is 0.
1185    pub fn windows(&self, size: usize) -> Windows<'_, T, PREALLOC> {
1186        self.as_slice().windows(size)
1187    }
1188
1189    /// Returns an iterator over `chunk_size` elements at a time, starting from the end.
1190    ///
1191    /// # Panics
1192    ///
1193    /// Panics if `chunk_size` is 0.
1194    pub fn rchunks(&self, chunk_size: usize) -> RChunks<'_, T, PREALLOC> {
1195        self.as_slice().rchunks(chunk_size)
1196    }
1197
1198    /// Returns an iterator over exactly `chunk_size` elements at a time.
1199    ///
1200    /// # Panics
1201    ///
1202    /// Panics if `chunk_size` is 0.
1203    pub fn chunks_exact(&self, chunk_size: usize) -> ChunksExact<'_, T, PREALLOC> {
1204        self.as_slice().chunks_exact(chunk_size)
1205    }
1206
1207    /// Creates a draining iterator that removes the specified range and yields the removed items.
1208    ///
1209    /// # Panics
1210    ///
1211    /// Panics if the starting point is greater than the end point or if the end point is greater than the length.
1212    pub fn drain(&mut self, range: std::ops::Range<usize>) -> Drain<'_, T, PREALLOC> {
1213        assert!(range.start <= range.end && range.end <= self.len);
1214        Drain {
1215            vec: self,
1216            range_start: range.start,
1217            range_end: range.end,
1218            index: range.start,
1219        }
1220    }
1221
1222    /// Copies the elements to a new `Vec<T>`.
1223    pub fn to_vec(&self) -> Vec<T>
1224    where
1225        T: Clone,
1226    {
1227        self.as_slice().to_vec()
1228    }
1229
1230    // --- Internal helper methods ---
1231
1232    /// Calculate the number of shelves needed for a given capacity.
1233    #[inline]
1234    fn shelf_count(box_count: usize) -> u32 {
1235        if box_count == 0 {
1236            return 0;
1237        }
1238        if PREALLOC == 0 {
1239            let val = box_count + Self::MIN_NON_ZERO_CAP;
1240            val.next_power_of_two().trailing_zeros() - Self::MIN_CAP_EXP
1241        } else {
1242            let val = box_count + PREALLOC;
1243            val.next_power_of_two().trailing_zeros() - Self::PREALLOC_EXP - 1
1244        }
1245    }
1246
1247    /// Calculate the size of a shelf at a given index.
1248    #[inline]
1249    fn shelf_size(shelf_index: u32) -> usize {
1250        if PREALLOC == 0 {
1251            Self::MIN_NON_ZERO_CAP << shelf_index
1252        } else {
1253            1usize << (shelf_index + Self::PREALLOC_EXP + 1)
1254        }
1255    }
1256
1257    /// Calculate which shelf and box index a list index falls into.
1258    /// Returns (shelf_index, box_index).
1259    #[inline]
1260    fn location(list_index: usize) -> (usize, usize) {
1261        let biased = list_index + Self::BIAS;
1262        let msb = biased.ilog2();
1263        let shelf = msb - Self::SHELF_OFFSET;
1264        // Clear the most significant bit to get box_index
1265        let box_idx = biased ^ (1usize << msb);
1266        (shelf as usize, box_idx)
1267    }
1268
1269    /// Calculate shelf, box index, and remaining capacity in one go.
1270    /// Returns (shelf_index, box_index, segment_remaining).
1271    #[inline]
1272    fn location_with_capacity(list_index: usize) -> (usize, usize, usize) {
1273        let biased = list_index + Self::BIAS;
1274        let msb = biased.ilog2();
1275        let shelf = msb - Self::SHELF_OFFSET;
1276        let box_idx = biased ^ (1usize << msb);
1277        // segment_remaining = shelf_size - box_idx = (1 << msb) - box_idx
1278        //                   = (1 << msb) - (biased - (1 << msb))
1279        //                   = (2 << msb) - biased
1280        let segment_remaining = (2usize << msb) - biased;
1281        (shelf as usize, box_idx, segment_remaining)
1282    }
1283
1284    /// Get an unchecked reference to an element.
1285    ///
1286    /// # Safety
1287    ///
1288    /// `index` must be less than `self.len`.
1289    #[inline]
1290    pub(crate) unsafe fn unchecked_at(&self, index: usize) -> &T {
1291        unsafe {
1292            if index < PREALLOC {
1293                &(*self.prealloc_segment.as_ptr())[index]
1294            } else {
1295                let (shelf, box_idx) = Self::location(index);
1296                &*self
1297                    .dynamic_segments
1298                    .get_unchecked(shelf)
1299                    .as_ptr()
1300                    .add(box_idx)
1301            }
1302        }
1303    }
1304
1305    /// Get an unchecked mutable reference to an element.
1306    ///
1307    /// # Safety
1308    ///
1309    /// `index` must be less than `self.len`.
1310    #[inline]
1311    pub(crate) unsafe fn unchecked_at_mut(&mut self, index: usize) -> &mut T {
1312        unsafe {
1313            if index < PREALLOC {
1314                &mut (*self.prealloc_segment.as_mut_ptr())[index]
1315            } else {
1316                let (shelf, box_idx) = Self::location(index);
1317                &mut *self
1318                    .dynamic_segments
1319                    .get_unchecked(shelf)
1320                    .as_ptr()
1321                    .add(box_idx)
1322            }
1323        }
1324    }
1325
1326    /// Read a value from an index.
1327    ///
1328    /// # Safety
1329    ///
1330    /// `index` must be less than `self.len`, and the value must not be read again.
1331    #[inline]
1332    unsafe fn unchecked_read(&self, index: usize) -> T {
1333        unsafe {
1334            if index < PREALLOC {
1335                std::ptr::read(&(*self.prealloc_segment.as_ptr())[index])
1336            } else {
1337                let (shelf, box_idx) = Self::location(index);
1338                std::ptr::read(
1339                    self.dynamic_segments
1340                        .get_unchecked(shelf)
1341                        .as_ptr()
1342                        .add(box_idx),
1343                )
1344            }
1345        }
1346    }
1347
1348    /// Grow capacity to accommodate at least `new_capacity` elements.
1349    fn grow_capacity(&mut self, new_capacity: usize) {
1350        let new_shelf_count = Self::shelf_count(new_capacity);
1351        let old_shelf_count = self.dynamic_segments.len() as u32;
1352
1353        if new_shelf_count > old_shelf_count {
1354            self.dynamic_segments
1355                .reserve((new_shelf_count - old_shelf_count) as usize);
1356
1357            for i in old_shelf_count..new_shelf_count {
1358                let size = Self::shelf_size(i);
1359                let layout = Layout::array::<T>(size).expect("Layout overflow");
1360                let ptr = if layout.size() == 0 {
1361                    NonNull::dangling()
1362                } else {
1363                    let ptr = unsafe { alloc::alloc(layout) };
1364                    NonNull::new(ptr as *mut T).expect("Allocation failed")
1365                };
1366                self.dynamic_segments.push(ptr);
1367            }
1368        }
1369    }
1370
1371    /// Compute total capacity given the number of dynamic shelves.
1372    #[inline]
1373    fn compute_capacity(shelf_count: u32) -> usize {
1374        if shelf_count == 0 {
1375            PREALLOC
1376        } else if PREALLOC == 0 {
1377            (Self::MIN_NON_ZERO_CAP << shelf_count) - Self::MIN_NON_ZERO_CAP
1378        } else {
1379            (1usize << (Self::PREALLOC_EXP + 1 + shelf_count)) - PREALLOC
1380        }
1381    }
1382
1383    /// Shrink capacity to the minimum needed for `new_capacity` elements.
1384    fn shrink_capacity(&mut self, new_capacity: usize) {
1385        if new_capacity <= PREALLOC {
1386            // Free all dynamic segments
1387            self.free_shelves(self.dynamic_segments.len() as u32, 0);
1388            self.dynamic_segments.clear();
1389            self.dynamic_segments.shrink_to_fit();
1390            return;
1391        }
1392
1393        let new_shelf_count = Self::shelf_count(new_capacity);
1394        let old_shelf_count = self.dynamic_segments.len() as u32;
1395
1396        if new_shelf_count < old_shelf_count {
1397            self.free_shelves(old_shelf_count, new_shelf_count);
1398            self.dynamic_segments.truncate(new_shelf_count as usize);
1399            self.dynamic_segments.shrink_to_fit();
1400        }
1401    }
1402
1403    /// Free shelves from `from_count` down to `to_count` (exclusive).
1404    fn free_shelves(&mut self, from_count: u32, to_count: u32) {
1405        for i in (to_count..from_count).rev() {
1406            let size = Self::shelf_size(i);
1407            let layout = Layout::array::<T>(size).expect("Layout overflow");
1408            if layout.size() > 0 {
1409                unsafe {
1410                    alloc::dealloc(
1411                        self.dynamic_segments[i as usize].as_ptr() as *mut u8,
1412                        layout,
1413                    );
1414                }
1415            }
1416        }
1417    }
1418}
1419
1420impl<T, const PREALLOC: usize> Drop for SegmentedVec<T, PREALLOC> {
1421    fn drop(&mut self) {
1422        // Drop all elements
1423        self.clear();
1424        // Free all dynamic segments
1425        self.free_shelves(self.dynamic_segments.len() as u32, 0);
1426    }
1427}
1428
1429impl<T, const PREALLOC: usize> sort::IndexedAccess<T> for SegmentedVec<T, PREALLOC> {
1430    #[inline]
1431    fn get_ref(&self, index: usize) -> &T {
1432        debug_assert!(index < self.len);
1433        unsafe { self.unchecked_at(index) }
1434    }
1435
1436    #[inline]
1437    fn get_ptr(&self, index: usize) -> *const T {
1438        debug_assert!(index < self.len);
1439        if index < PREALLOC {
1440            unsafe { (*self.prealloc_segment.as_ptr()).as_ptr().add(index) }
1441        } else {
1442            let (shelf, box_idx) = Self::location(index);
1443            unsafe {
1444                self.dynamic_segments
1445                    .get_unchecked(shelf)
1446                    .as_ptr()
1447                    .add(box_idx)
1448            }
1449        }
1450    }
1451
1452    #[inline]
1453    fn get_ptr_mut(&mut self, index: usize) -> *mut T {
1454        debug_assert!(index < self.len);
1455        if index < PREALLOC {
1456            unsafe {
1457                (*self.prealloc_segment.as_mut_ptr())
1458                    .as_mut_ptr()
1459                    .add(index)
1460            }
1461        } else {
1462            let (shelf, box_idx) = Self::location(index);
1463            unsafe {
1464                self.dynamic_segments
1465                    .get_unchecked(shelf)
1466                    .as_ptr()
1467                    .add(box_idx)
1468            }
1469        }
1470    }
1471
1472    #[inline]
1473    fn swap(&mut self, a: usize, b: usize) {
1474        if a == b {
1475            return;
1476        }
1477        debug_assert!(a < self.len && b < self.len);
1478        unsafe {
1479            let ptr_a = self.get_ptr_mut(a);
1480            let ptr_b = self.get_ptr_mut(b);
1481            std::ptr::swap(ptr_a, ptr_b);
1482        }
1483    }
1484}
1485
1486impl<T, const PREALLOC: usize> Default for SegmentedVec<T, PREALLOC> {
1487    fn default() -> Self {
1488        Self::new()
1489    }
1490}
1491
1492impl<T, const PREALLOC: usize> Index<usize> for SegmentedVec<T, PREALLOC> {
1493    type Output = T;
1494
1495    fn index(&self, index: usize) -> &Self::Output {
1496        self.get(index).expect("index out of bounds")
1497    }
1498}
1499
1500impl<T, const PREALLOC: usize> IndexMut<usize> for SegmentedVec<T, PREALLOC> {
1501    fn index_mut(&mut self, index: usize) -> &mut Self::Output {
1502        self.get_mut(index).expect("index out of bounds")
1503    }
1504}
1505
1506impl<T: Clone, const PREALLOC: usize> Clone for SegmentedVec<T, PREALLOC> {
1507    fn clone(&self) -> Self {
1508        if self.len == 0 {
1509            return Self::new();
1510        }
1511
1512        let mut new_vec = Self::new();
1513        new_vec.reserve(self.len);
1514
1515        // Clone prealloc segment
1516        if PREALLOC > 0 {
1517            let count = self.len.min(PREALLOC);
1518            let src = unsafe { (*self.prealloc_segment.as_ptr()).as_ptr() };
1519            let dst = unsafe { (*new_vec.prealloc_segment.as_mut_ptr()).as_mut_ptr() };
1520            for i in 0..count {
1521                unsafe { std::ptr::write(dst.add(i), (*src.add(i)).clone()) };
1522            }
1523            new_vec.len = count;
1524        }
1525
1526        // Clone dynamic segments
1527        if self.len > PREALLOC {
1528            let mut remaining = self.len - PREALLOC;
1529            for shelf in 0..self.dynamic_segments.len() {
1530                let size = Self::shelf_size(shelf as u32);
1531                let count = remaining.min(size);
1532                let src = unsafe { self.dynamic_segments.get_unchecked(shelf).as_ptr() };
1533                let dst = unsafe { new_vec.dynamic_segments.get_unchecked(shelf).as_ptr() };
1534                for i in 0..count {
1535                    unsafe { std::ptr::write(dst.add(i), (*src.add(i)).clone()) };
1536                }
1537                new_vec.len += count;
1538                remaining -= count;
1539                if remaining == 0 {
1540                    break;
1541                }
1542            }
1543        }
1544
1545        // Set up write pointer
1546        if new_vec.len < new_vec.capacity() {
1547            new_vec.init_write_ptr();
1548        } else {
1549            new_vec.write_ptr = std::ptr::null_mut();
1550            new_vec.segment_remaining = 0;
1551        }
1552
1553        new_vec
1554    }
1555}
1556
1557impl<T: std::fmt::Debug, const PREALLOC: usize> std::fmt::Debug for SegmentedVec<T, PREALLOC> {
1558    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
1559        f.debug_list().entries(self.iter()).finish()
1560    }
1561}
1562
1563impl<T: PartialEq, const PREALLOC: usize> PartialEq for SegmentedVec<T, PREALLOC> {
1564    fn eq(&self, other: &Self) -> bool {
1565        if self.len != other.len {
1566            return false;
1567        }
1568        for i in 0..self.len {
1569            if unsafe { self.unchecked_at(i) } != unsafe { other.unchecked_at(i) } {
1570                return false;
1571            }
1572        }
1573        true
1574    }
1575}
1576
1577impl<T: Eq, const PREALLOC: usize> Eq for SegmentedVec<T, PREALLOC> {}
1578
1579impl<T, const PREALLOC: usize> FromIterator<T> for SegmentedVec<T, PREALLOC> {
1580    fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> Self {
1581        let iter = iter.into_iter();
1582        let (lower, _) = iter.size_hint();
1583        let mut vec = Self::new();
1584        vec.reserve(lower);
1585        for item in iter {
1586            vec.push(item);
1587        }
1588        vec
1589    }
1590}
1591
1592impl<T, const PREALLOC: usize> Extend<T> for SegmentedVec<T, PREALLOC> {
1593    fn extend<I: IntoIterator<Item = T>>(&mut self, iter: I) {
1594        let iter = iter.into_iter();
1595        let (lower, _) = iter.size_hint();
1596        self.reserve(lower);
1597        for item in iter {
1598            self.push(item);
1599        }
1600    }
1601}
1602
1603// --- Iterator implementations ---
1604
1605/// An iterator over references to elements of a `SegmentedVec`.
1606pub struct Iter<'a, T, const PREALLOC: usize> {
1607    vec: &'a SegmentedVec<T, PREALLOC>,
1608    /// Current pointer within segment
1609    ptr: *const T,
1610    /// End of current segment (min of segment capacity and vec.len)
1611    segment_end: *const T,
1612    /// Current logical index
1613    index: usize,
1614    /// Current shelf index (for dynamic segments)
1615    shelf_index: u32,
1616}
1617
1618impl<'a, T, const PREALLOC: usize> Iterator for Iter<'a, T, PREALLOC> {
1619    type Item = &'a T;
1620
1621    #[inline]
1622    fn next(&mut self) -> Option<Self::Item> {
1623        if self.ptr < self.segment_end {
1624            let result = unsafe { &*self.ptr };
1625            self.ptr = unsafe { self.ptr.add(1) };
1626            self.index += 1;
1627            return Some(result);
1628        }
1629        self.next_segment()
1630    }
1631
1632    #[inline]
1633    fn size_hint(&self) -> (usize, Option<usize>) {
1634        let remaining = self.vec.len.saturating_sub(self.index);
1635        (remaining, Some(remaining))
1636    }
1637}
1638
1639impl<'a, T, const PREALLOC: usize> Iter<'a, T, PREALLOC> {
1640    #[inline]
1641    fn next_segment(&mut self) -> Option<&'a T> {
1642        if self.index >= self.vec.len {
1643            return None;
1644        }
1645
1646        // Move to next segment
1647        if self.index < PREALLOC {
1648            // In prealloc segment
1649            let ptr = unsafe {
1650                (*self.vec.prealloc_segment.as_ptr())
1651                    .as_ptr()
1652                    .add(self.index)
1653            };
1654            let remaining = PREALLOC - self.index;
1655            let segment_len = remaining.min(self.vec.len - self.index);
1656            self.ptr = ptr;
1657            self.segment_end = unsafe { ptr.add(segment_len) };
1658        } else {
1659            // In dynamic segments
1660            let shelf_idx = self.shelf_index as usize;
1661            let shelf_size = SegmentedVec::<T, PREALLOC>::shelf_size(self.shelf_index);
1662            let ptr = self.vec.dynamic_segments[shelf_idx].as_ptr();
1663            let segment_len = shelf_size.min(self.vec.len - self.index);
1664            self.ptr = ptr;
1665            self.segment_end = unsafe { ptr.add(segment_len) };
1666            self.shelf_index += 1;
1667        }
1668
1669        let result = unsafe { &*self.ptr };
1670        self.ptr = unsafe { self.ptr.add(1) };
1671        self.index += 1;
1672        Some(result)
1673    }
1674}
1675
1676impl<'a, T, const PREALLOC: usize> ExactSizeIterator for Iter<'a, T, PREALLOC> {}
1677
1678/// An iterator over mutable references to elements of a `SegmentedVec`.
1679pub struct IterMut<'a, T, const PREALLOC: usize> {
1680    vec: &'a mut SegmentedVec<T, PREALLOC>,
1681    /// Current pointer within segment
1682    ptr: *mut T,
1683    /// End of current segment (min of segment capacity and vec.len)
1684    segment_end: *mut T,
1685    /// Current logical index
1686    index: usize,
1687    /// Current shelf index (for dynamic segments)
1688    shelf_index: u32,
1689}
1690
1691impl<'a, T, const PREALLOC: usize> Iterator for IterMut<'a, T, PREALLOC> {
1692    type Item = &'a mut T;
1693
1694    #[inline]
1695    fn next(&mut self) -> Option<Self::Item> {
1696        if self.ptr < self.segment_end {
1697            let result = self.ptr;
1698            self.ptr = unsafe { self.ptr.add(1) };
1699            self.index += 1;
1700            return Some(unsafe { &mut *result });
1701        }
1702        self.next_segment()
1703    }
1704
1705    #[inline]
1706    fn size_hint(&self) -> (usize, Option<usize>) {
1707        let remaining = self.vec.len.saturating_sub(self.index);
1708        (remaining, Some(remaining))
1709    }
1710}
1711
1712impl<'a, T, const PREALLOC: usize> IterMut<'a, T, PREALLOC> {
1713    #[inline]
1714    fn next_segment(&mut self) -> Option<&'a mut T> {
1715        if self.index >= self.vec.len {
1716            return None;
1717        }
1718
1719        // Move to next segment
1720        if self.index < PREALLOC {
1721            // In prealloc segment
1722            let ptr = unsafe {
1723                (*self.vec.prealloc_segment.as_mut_ptr())
1724                    .as_mut_ptr()
1725                    .add(self.index)
1726            };
1727            let remaining = PREALLOC - self.index;
1728            let segment_len = remaining.min(self.vec.len - self.index);
1729            self.ptr = ptr;
1730            self.segment_end = unsafe { ptr.add(segment_len) };
1731        } else {
1732            // In dynamic segments
1733            let shelf_idx = self.shelf_index as usize;
1734            let shelf_size = SegmentedVec::<T, PREALLOC>::shelf_size(self.shelf_index);
1735            // as_ptr() returns *mut T since NonNull<T> stores *mut T
1736            let ptr = self.vec.dynamic_segments[shelf_idx].as_ptr();
1737            let segment_len = shelf_size.min(self.vec.len - self.index);
1738            self.ptr = ptr;
1739            self.segment_end = unsafe { ptr.add(segment_len) };
1740            self.shelf_index += 1;
1741        }
1742
1743        let result = self.ptr;
1744        self.ptr = unsafe { self.ptr.add(1) };
1745        self.index += 1;
1746        Some(unsafe { &mut *result })
1747    }
1748}
1749
1750impl<'a, T, const PREALLOC: usize> ExactSizeIterator for IterMut<'a, T, PREALLOC> {}
1751
1752impl<T, const PREALLOC: usize> IntoIterator for SegmentedVec<T, PREALLOC> {
1753    type Item = T;
1754    type IntoIter = IntoIter<T, PREALLOC>;
1755
1756    fn into_iter(self) -> Self::IntoIter {
1757        IntoIter {
1758            vec: self,
1759            index: 0,
1760        }
1761    }
1762}
1763
1764impl<'a, T, const PREALLOC: usize> IntoIterator for &'a SegmentedVec<T, PREALLOC> {
1765    type Item = &'a T;
1766    type IntoIter = Iter<'a, T, PREALLOC>;
1767
1768    fn into_iter(self) -> Self::IntoIter {
1769        self.iter()
1770    }
1771}
1772
1773impl<'a, T, const PREALLOC: usize> IntoIterator for &'a mut SegmentedVec<T, PREALLOC> {
1774    type Item = &'a mut T;
1775    type IntoIter = IterMut<'a, T, PREALLOC>;
1776
1777    fn into_iter(self) -> Self::IntoIter {
1778        self.iter_mut()
1779    }
1780}
1781
1782/// An owning iterator over elements of a `SegmentedVec`.
1783pub struct IntoIter<T, const PREALLOC: usize> {
1784    vec: SegmentedVec<T, PREALLOC>,
1785    index: usize,
1786}
1787
1788impl<T, const PREALLOC: usize> Iterator for IntoIter<T, PREALLOC> {
1789    type Item = T;
1790
1791    fn next(&mut self) -> Option<Self::Item> {
1792        if self.index >= self.vec.len {
1793            return None;
1794        }
1795        let value = unsafe { self.vec.unchecked_read(self.index) };
1796        self.index += 1;
1797        Some(value)
1798    }
1799
1800    fn size_hint(&self) -> (usize, Option<usize>) {
1801        let remaining = self.vec.len - self.index;
1802        (remaining, Some(remaining))
1803    }
1804}
1805
1806impl<T, const PREALLOC: usize> ExactSizeIterator for IntoIter<T, PREALLOC> {}
1807
1808impl<T, const PREALLOC: usize> Drop for IntoIter<T, PREALLOC> {
1809    fn drop(&mut self) {
1810        // Drop remaining elements that weren't consumed
1811        for i in self.index..self.vec.len {
1812            unsafe {
1813                std::ptr::drop_in_place(self.vec.unchecked_at_mut(i));
1814            }
1815        }
1816        // Prevent the Vec from dropping elements again
1817        self.vec.len = 0;
1818    }
1819}
1820
1821/// A draining iterator for `SegmentedVec`.
1822///
1823/// This struct is created by the `drain` method on `SegmentedVec`.
1824pub struct Drain<'a, T, const PREALLOC: usize> {
1825    vec: &'a mut SegmentedVec<T, PREALLOC>,
1826    range_start: usize,
1827    range_end: usize,
1828    index: usize,
1829}
1830
1831impl<'a, T, const PREALLOC: usize> Iterator for Drain<'a, T, PREALLOC> {
1832    type Item = T;
1833
1834    fn next(&mut self) -> Option<Self::Item> {
1835        if self.index >= self.range_end {
1836            None
1837        } else {
1838            let value = unsafe { std::ptr::read(self.vec.unchecked_at(self.index)) };
1839            self.index += 1;
1840            Some(value)
1841        }
1842    }
1843
1844    fn size_hint(&self) -> (usize, Option<usize>) {
1845        let remaining = self.range_end - self.index;
1846        (remaining, Some(remaining))
1847    }
1848}
1849
1850impl<'a, T, const PREALLOC: usize> DoubleEndedIterator for Drain<'a, T, PREALLOC> {
1851    fn next_back(&mut self) -> Option<Self::Item> {
1852        if self.index >= self.range_end {
1853            None
1854        } else {
1855            self.range_end -= 1;
1856            Some(unsafe { std::ptr::read(self.vec.unchecked_at(self.range_end)) })
1857        }
1858    }
1859}
1860
1861impl<'a, T, const PREALLOC: usize> ExactSizeIterator for Drain<'a, T, PREALLOC> {}
1862
1863impl<'a, T, const PREALLOC: usize> Drop for Drain<'a, T, PREALLOC> {
1864    fn drop(&mut self) {
1865        // Drop any remaining elements in the range
1866        for i in self.index..self.range_end {
1867            unsafe {
1868                std::ptr::drop_in_place(self.vec.unchecked_at_mut(i));
1869            }
1870        }
1871
1872        // Shift elements after the range to fill the gap
1873        let original_range_end = self.range_end;
1874        let original_len = self.vec.len;
1875        let drain_count = original_range_end - self.range_start;
1876
1877        for i in 0..(original_len - original_range_end) {
1878            unsafe {
1879                let src = self.vec.unchecked_at(original_range_end + i) as *const T;
1880                let dst = self.vec.unchecked_at_mut(self.range_start + i) as *mut T;
1881                std::ptr::copy_nonoverlapping(src, dst, 1);
1882            }
1883        }
1884
1885        self.vec.len = original_len - drain_count;
1886    }
1887}
1888
1889#[cfg(test)]
1890mod tests {
1891    use super::*;
1892
1893    #[test]
1894    fn test_new_empty() {
1895        let vec: SegmentedVec<i32, 4> = SegmentedVec::new();
1896        assert!(vec.is_empty());
1897        assert_eq!(vec.len(), 0);
1898    }
1899
1900    #[test]
1901    fn test_push_pop() {
1902        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
1903        vec.push(1);
1904        vec.push(2);
1905        vec.push(3);
1906        assert_eq!(vec.len(), 3);
1907        assert_eq!(vec.pop(), Some(3));
1908        assert_eq!(vec.pop(), Some(2));
1909        assert_eq!(vec.pop(), Some(1));
1910        assert_eq!(vec.pop(), None);
1911    }
1912
1913    #[test]
1914    fn test_get() {
1915        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
1916        vec.push(10);
1917        vec.push(20);
1918        vec.push(30);
1919        assert_eq!(vec.get(0), Some(&10));
1920        assert_eq!(vec.get(1), Some(&20));
1921        assert_eq!(vec.get(2), Some(&30));
1922        assert_eq!(vec.get(3), None);
1923    }
1924
1925    #[test]
1926    fn test_index() {
1927        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
1928        vec.push(10);
1929        vec.push(20);
1930        assert_eq!(vec[0], 10);
1931        assert_eq!(vec[1], 20);
1932        vec[0] = 100;
1933        assert_eq!(vec[0], 100);
1934    }
1935
1936    #[test]
1937    fn test_stable_pointers() {
1938        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
1939        vec.push(1);
1940        let ptr = &vec[0] as *const i32;
1941
1942        // Push many more elements to trigger segment allocations
1943        for i in 2..1000 {
1944            vec.push(i);
1945        }
1946
1947        // The pointer should still be valid
1948        assert_eq!(unsafe { *ptr }, 1);
1949    }
1950
1951    #[test]
1952    fn test_iter() {
1953        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
1954        for i in 0..100 {
1955            vec.push(i);
1956        }
1957
1958        let collected: Vec<i32> = vec.iter().copied().collect();
1959        let expected: Vec<i32> = (0..100).collect();
1960        assert_eq!(collected, expected);
1961    }
1962
1963    #[test]
1964    fn test_iter_mut() {
1965        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
1966        for i in 0..10 {
1967            vec.push(i);
1968        }
1969
1970        for item in vec.iter_mut() {
1971            *item *= 2;
1972        }
1973
1974        let collected: Vec<i32> = vec.iter().copied().collect();
1975        let expected: Vec<i32> = (0..10).map(|x| x * 2).collect();
1976        assert_eq!(collected, expected);
1977    }
1978
1979    #[test]
1980    fn test_into_iter() {
1981        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
1982        for i in 0..10 {
1983            vec.push(i);
1984        }
1985
1986        let collected: Vec<i32> = vec.into_iter().collect();
1987        let expected: Vec<i32> = (0..10).collect();
1988        assert_eq!(collected, expected);
1989    }
1990
1991    #[test]
1992    fn test_from_iter() {
1993        let vec: SegmentedVec<i32, 4> = (0..10).collect();
1994        assert_eq!(vec.len(), 10);
1995        for i in 0..10 {
1996            assert_eq!(vec[i], i as i32);
1997        }
1998    }
1999
2000    #[test]
2001    fn test_extend() {
2002        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2003        vec.extend(0..5);
2004        vec.extend(5..10);
2005        assert_eq!(vec.len(), 10);
2006        for i in 0..10 {
2007            assert_eq!(vec[i], i as i32);
2008        }
2009    }
2010
2011    #[test]
2012    fn test_clear() {
2013        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2014        vec.extend(0..10);
2015        vec.clear();
2016        assert!(vec.is_empty());
2017        assert_eq!(vec.len(), 0);
2018    }
2019
2020    #[test]
2021    fn test_truncate() {
2022        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2023        vec.extend(0..10);
2024        vec.truncate(5);
2025        assert_eq!(vec.len(), 5);
2026        for i in 0..5 {
2027            assert_eq!(vec[i], i as i32);
2028        }
2029    }
2030
2031    #[test]
2032    fn test_zero_prealloc() {
2033        let mut vec: SegmentedVec<i32, 0> = SegmentedVec::new();
2034        for i in 0..100 {
2035            vec.push(i);
2036        }
2037
2038        for i in 0..100 {
2039            assert_eq!(vec[i], i as i32);
2040        }
2041
2042        assert_eq!(vec.pop(), Some(99));
2043        assert_eq!(vec.len(), 99);
2044    }
2045
2046    #[test]
2047    fn test_various_prealloc_sizes() {
2048        fn test_prealloc<const N: usize>() {
2049            let mut vec: SegmentedVec<i32, N> = SegmentedVec::new();
2050            for i in 0..100 {
2051                vec.push(i);
2052            }
2053            for i in 0..100 {
2054                assert_eq!(vec[i], i as i32);
2055            }
2056        }
2057
2058        test_prealloc::<0>();
2059        test_prealloc::<1>();
2060        test_prealloc::<2>();
2061        test_prealloc::<4>();
2062        test_prealloc::<8>();
2063        test_prealloc::<16>();
2064    }
2065
2066    #[test]
2067    fn test_clone() {
2068        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2069        vec.extend(0..10);
2070        let vec2 = vec.clone();
2071        assert_eq!(vec, vec2);
2072    }
2073
2074    #[test]
2075    fn test_debug() {
2076        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2077        vec.extend(0..3);
2078        let debug_str = format!("{:?}", vec);
2079        assert_eq!(debug_str, "[0, 1, 2]");
2080    }
2081
2082    #[test]
2083    fn test_first_last() {
2084        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2085        assert_eq!(vec.first(), None);
2086        assert_eq!(vec.last(), None);
2087
2088        vec.push(1);
2089        assert_eq!(vec.first(), Some(&1));
2090        assert_eq!(vec.last(), Some(&1));
2091
2092        vec.push(2);
2093        vec.push(3);
2094        assert_eq!(vec.first(), Some(&1));
2095        assert_eq!(vec.last(), Some(&3));
2096    }
2097
2098    #[test]
2099    fn test_drop_elements() {
2100        use std::cell::Cell;
2101        use std::rc::Rc;
2102
2103        let drop_count = Rc::new(Cell::new(0));
2104
2105        struct DropCounter {
2106            counter: Rc<Cell<i32>>,
2107        }
2108
2109        impl Drop for DropCounter {
2110            fn drop(&mut self) {
2111                self.counter.set(self.counter.get() + 1);
2112            }
2113        }
2114
2115        {
2116            let mut vec: SegmentedVec<DropCounter, 4> = SegmentedVec::new();
2117            for _ in 0..10 {
2118                vec.push(DropCounter {
2119                    counter: Rc::clone(&drop_count),
2120                });
2121            }
2122        }
2123
2124        assert_eq!(drop_count.get(), 10);
2125    }
2126
2127    #[test]
2128    fn test_shrink_to_fit() {
2129        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2130        vec.extend(0..100);
2131        vec.truncate(5);
2132        vec.shrink_to_fit();
2133        assert_eq!(vec.len(), 5);
2134        for i in 0..5 {
2135            assert_eq!(vec[i], i as i32);
2136        }
2137    }
2138
2139    #[test]
2140    fn test_sort() {
2141        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2142        vec.extend([5, 2, 8, 1, 9, 3, 7, 4, 6, 0]);
2143        vec.sort();
2144        let result: Vec<i32> = vec.iter().copied().collect();
2145        assert_eq!(result, vec![0, 1, 2, 3, 4, 5, 6, 7, 8, 9]);
2146    }
2147
2148    #[test]
2149    fn test_sort_empty() {
2150        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2151        vec.sort();
2152        assert!(vec.is_empty());
2153    }
2154
2155    #[test]
2156    fn test_sort_single() {
2157        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2158        vec.push(42);
2159        vec.sort();
2160        assert_eq!(vec[0], 42);
2161    }
2162
2163    #[test]
2164    fn test_sort_already_sorted() {
2165        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2166        vec.extend(0..10);
2167        vec.sort();
2168        let result: Vec<i32> = vec.iter().copied().collect();
2169        assert_eq!(result, (0..10).collect::<Vec<_>>());
2170    }
2171
2172    #[test]
2173    fn test_sort_reverse_sorted() {
2174        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2175        vec.extend((0..10).rev());
2176        vec.sort();
2177        let result: Vec<i32> = vec.iter().copied().collect();
2178        assert_eq!(result, (0..10).collect::<Vec<_>>());
2179    }
2180
2181    #[test]
2182    fn test_sort_by() {
2183        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2184        vec.extend([5, 2, 8, 1, 9, 3, 7, 4, 6, 0]);
2185        vec.sort_by(|a, b| b.cmp(a)); // reverse order
2186        let result: Vec<i32> = vec.iter().copied().collect();
2187        assert_eq!(result, vec![9, 8, 7, 6, 5, 4, 3, 2, 1, 0]);
2188    }
2189
2190    #[test]
2191    fn test_sort_by_key() {
2192        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2193        vec.extend([-5, 2, -8, 1, -9, 3, -7, 4, -6, 0]);
2194        vec.sort_by_key(|k| k.abs());
2195        let result: Vec<i32> = vec.iter().copied().collect();
2196        assert_eq!(result, vec![0, 1, 2, 3, 4, -5, -6, -7, -8, -9]);
2197    }
2198
2199    #[test]
2200    fn test_sort_stable() {
2201        // Test that sort is stable (equal elements maintain their relative order)
2202        #[derive(Debug, Clone, Eq, PartialEq)]
2203        struct Item {
2204            key: i32,
2205            order: usize,
2206        }
2207
2208        let mut vec: SegmentedVec<Item, 4> = SegmentedVec::new();
2209        vec.push(Item { key: 1, order: 0 });
2210        vec.push(Item { key: 2, order: 1 });
2211        vec.push(Item { key: 1, order: 2 });
2212        vec.push(Item { key: 2, order: 3 });
2213        vec.push(Item { key: 1, order: 4 });
2214
2215        vec.sort_by_key(|item| item.key);
2216
2217        // All items with key=1 should come first, in original order
2218        assert_eq!(vec[0], Item { key: 1, order: 0 });
2219        assert_eq!(vec[1], Item { key: 1, order: 2 });
2220        assert_eq!(vec[2], Item { key: 1, order: 4 });
2221        // Then items with key=2, in original order
2222        assert_eq!(vec[3], Item { key: 2, order: 1 });
2223        assert_eq!(vec[4], Item { key: 2, order: 3 });
2224    }
2225
2226    #[test]
2227    fn test_sort_unstable() {
2228        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2229        vec.extend([5, 2, 8, 1, 9, 3, 7, 4, 6, 0]);
2230        vec.sort_unstable();
2231        let result: Vec<i32> = vec.iter().copied().collect();
2232        assert_eq!(result, vec![0, 1, 2, 3, 4, 5, 6, 7, 8, 9]);
2233    }
2234
2235    #[test]
2236    fn test_sort_unstable_by() {
2237        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2238        vec.extend([5, 2, 8, 1, 9, 3, 7, 4, 6, 0]);
2239        vec.sort_unstable_by(|a, b| b.cmp(a)); // reverse order
2240        let result: Vec<i32> = vec.iter().copied().collect();
2241        assert_eq!(result, vec![9, 8, 7, 6, 5, 4, 3, 2, 1, 0]);
2242    }
2243
2244    #[test]
2245    fn test_sort_unstable_by_key() {
2246        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2247        vec.extend([-5, 2, -8, 1, -9, 3, -7, 4, -6, 0]);
2248        vec.sort_unstable_by_key(|k| k.abs());
2249        // Just verify it's sorted by absolute value (unstable may reorder equal elements)
2250        let result: Vec<i32> = vec.iter().copied().collect();
2251        for i in 1..result.len() {
2252            assert!(result[i - 1].abs() <= result[i].abs());
2253        }
2254    }
2255
2256    #[test]
2257    fn test_sort_large() {
2258        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2259        // Add numbers in reverse order
2260        vec.extend((0..1000).rev());
2261        vec.sort();
2262        for i in 0..1000 {
2263            assert_eq!(vec[i], i as i32);
2264        }
2265    }
2266
2267    #[test]
2268    fn test_sort_unstable_large() {
2269        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2270        // Add numbers in reverse order
2271        vec.extend((0..1000).rev());
2272        vec.sort_unstable();
2273        for i in 0..1000 {
2274            assert_eq!(vec[i], i as i32);
2275        }
2276    }
2277
2278    #[test]
2279    fn test_sort_with_zero_prealloc() {
2280        let mut vec: SegmentedVec<i32, 0> = SegmentedVec::new();
2281        vec.extend([5, 2, 8, 1, 9, 3, 7, 4, 6, 0]);
2282        vec.sort();
2283        let result: Vec<i32> = vec.iter().copied().collect();
2284        assert_eq!(result, vec![0, 1, 2, 3, 4, 5, 6, 7, 8, 9]);
2285    }
2286
2287    #[test]
2288    fn test_sort_pointers_remain_stable_after_sort() {
2289        // Verify that after sorting, pointers to elements are still valid
2290        // (they point to different values, but the memory locations are stable)
2291        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2292        vec.extend([5, 2, 8, 1, 9]);
2293
2294        // Get pointer to first element before sort
2295        let ptr = &vec[0] as *const i32;
2296
2297        vec.sort();
2298
2299        // After sort, the pointer still points to valid memory (now contains sorted value)
2300        assert_eq!(unsafe { *ptr }, 1); // First element after sort is 1
2301    }
2302
2303    // --- Slice tests ---
2304
2305    #[test]
2306    fn test_as_slice() {
2307        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2308        vec.extend(0..10);
2309
2310        let slice = vec.as_slice();
2311        assert_eq!(slice.len(), 10);
2312        assert_eq!(slice[0], 0);
2313        assert_eq!(slice[9], 9);
2314    }
2315
2316    #[test]
2317    fn test_as_mut_slice() {
2318        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2319        vec.extend(0..10);
2320
2321        {
2322            let mut slice = vec.as_mut_slice();
2323            slice[0] = 100;
2324            slice[9] = 200;
2325        }
2326
2327        assert_eq!(vec[0], 100);
2328        assert_eq!(vec[9], 200);
2329    }
2330
2331    #[test]
2332    fn test_slice_range() {
2333        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2334        vec.extend(0..10);
2335
2336        let slice = vec.slice(2..5);
2337        assert_eq!(slice.len(), 3);
2338        assert_eq!(slice[0], 2);
2339        assert_eq!(slice[1], 3);
2340        assert_eq!(slice[2], 4);
2341    }
2342
2343    #[test]
2344    fn test_slice_mut_range() {
2345        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2346        vec.extend(0..10);
2347
2348        {
2349            let mut slice = vec.slice_mut(2..5);
2350            slice[0] = 100;
2351            slice[2] = 200;
2352        }
2353
2354        assert_eq!(vec[2], 100);
2355        assert_eq!(vec[4], 200);
2356    }
2357
2358    #[test]
2359    fn test_slice_first_last() {
2360        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2361        vec.extend(0..10);
2362
2363        let slice = vec.as_slice();
2364        assert_eq!(slice.first(), Some(&0));
2365        assert_eq!(slice.last(), Some(&9));
2366    }
2367
2368    #[test]
2369    fn test_slice_iter() {
2370        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2371        vec.extend(0..10);
2372
2373        let slice = vec.as_slice();
2374        let collected: Vec<i32> = slice.iter().copied().collect();
2375        assert_eq!(collected, (0..10).collect::<Vec<_>>());
2376    }
2377
2378    #[test]
2379    fn test_slice_iter_rev() {
2380        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2381        vec.extend(0..10);
2382
2383        let slice = vec.as_slice();
2384        let collected: Vec<i32> = slice.iter().rev().copied().collect();
2385        assert_eq!(collected, (0..10).rev().collect::<Vec<_>>());
2386    }
2387
2388    #[test]
2389    fn test_slice_contains() {
2390        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2391        vec.extend(0..10);
2392
2393        let slice = vec.as_slice();
2394        assert!(slice.contains(&5));
2395        assert!(!slice.contains(&100));
2396    }
2397
2398    #[test]
2399    fn test_slice_binary_search() {
2400        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2401        vec.extend(0..100);
2402
2403        let slice = vec.as_slice();
2404        assert_eq!(slice.binary_search(&50), Ok(50));
2405        assert_eq!(slice.binary_search(&0), Ok(0));
2406        assert_eq!(slice.binary_search(&99), Ok(99));
2407        assert_eq!(slice.binary_search(&100), Err(100));
2408    }
2409
2410    #[test]
2411    fn test_slice_split_at() {
2412        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2413        vec.extend(0..10);
2414
2415        let slice = vec.as_slice();
2416        let (left, right) = slice.split_at(5);
2417        assert_eq!(left.len(), 5);
2418        assert_eq!(right.len(), 5);
2419        assert_eq!(left[0], 0);
2420        assert_eq!(right[0], 5);
2421    }
2422
2423    #[test]
2424    fn test_slice_to_vec() {
2425        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2426        vec.extend(0..10);
2427
2428        let slice = vec.as_slice();
2429        let converted = slice.to_vec();
2430        assert_eq!(converted, (0..10).collect::<Vec<_>>());
2431    }
2432
2433    #[test]
2434    fn test_slice_mut_sort() {
2435        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2436        vec.extend([5, 3, 1, 4, 2, 8, 7, 6, 0, 9]);
2437
2438        // Sort only the middle part
2439        {
2440            let mut slice = vec.slice_mut(2..8);
2441            slice.sort();
2442        }
2443
2444        let result: Vec<i32> = vec.iter().copied().collect();
2445        assert_eq!(result, vec![5, 3, 1, 2, 4, 6, 7, 8, 0, 9]);
2446    }
2447
2448    #[test]
2449    fn test_slice_mut_reverse() {
2450        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2451        vec.extend(0..10);
2452
2453        {
2454            let mut slice = vec.slice_mut(2..8);
2455            slice.reverse();
2456        }
2457
2458        let result: Vec<i32> = vec.iter().copied().collect();
2459        assert_eq!(result, vec![0, 1, 7, 6, 5, 4, 3, 2, 8, 9]);
2460    }
2461
2462    #[test]
2463    fn test_slice_mut_fill() {
2464        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2465        vec.extend(0..10);
2466
2467        {
2468            let mut slice = vec.slice_mut(2..5);
2469            slice.fill(99);
2470        }
2471
2472        let result: Vec<i32> = vec.iter().copied().collect();
2473        assert_eq!(result, vec![0, 1, 99, 99, 99, 5, 6, 7, 8, 9]);
2474    }
2475
2476    #[test]
2477    fn test_slice_starts_with() {
2478        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2479        vec.extend(0..10);
2480
2481        let slice = vec.as_slice();
2482        assert!(slice.starts_with(&[0, 1, 2]));
2483        assert!(!slice.starts_with(&[1, 2, 3]));
2484    }
2485
2486    #[test]
2487    fn test_slice_ends_with() {
2488        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2489        vec.extend(0..10);
2490
2491        let slice = vec.as_slice();
2492        assert!(slice.ends_with(&[7, 8, 9]));
2493        assert!(!slice.ends_with(&[6, 7, 8]));
2494    }
2495
2496    #[test]
2497    fn test_slice_eq() {
2498        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2499        vec.extend(0..10);
2500
2501        let slice = vec.as_slice();
2502        assert_eq!(slice, (0..10).collect::<Vec<_>>());
2503    }
2504
2505    #[test]
2506    fn test_min_non_zero_cap() {
2507        // For u8 (1 byte), first segment should be 8 elements
2508        let mut vec_u8: SegmentedVec<u8, 0> = SegmentedVec::new();
2509        vec_u8.push(1);
2510        assert_eq!(vec_u8.capacity(), 8);
2511
2512        // For i32 (4 bytes, <= 1024), first segment should be 4 elements
2513        let mut vec_i32: SegmentedVec<i32, 0> = SegmentedVec::new();
2514        vec_i32.push(1);
2515        assert_eq!(vec_i32.capacity(), 4);
2516
2517        // Verify indexing still works correctly with the larger first segment
2518        for i in 0u8..100 {
2519            vec_u8.push(i);
2520        }
2521        for i in 0..101 {
2522            assert_eq!(vec_u8[i], if i == 0 { 1 } else { (i - 1) as u8 });
2523        }
2524    }
2525
2526    #[test]
2527    fn test_extend_from_copy_slice() {
2528        // Test with PREALLOC=0
2529        let mut vec: SegmentedVec<i32, 0> = SegmentedVec::new();
2530        let data: Vec<i32> = (0..100).collect();
2531        vec.extend_from_copy_slice(&data);
2532        assert_eq!(vec.len(), 100);
2533        for i in 0..100 {
2534            assert_eq!(vec[i], i as i32);
2535        }
2536
2537        // Test with PREALLOC=4, starting empty
2538        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2539        vec.extend_from_copy_slice(&data);
2540        assert_eq!(vec.len(), 100);
2541        for i in 0..100 {
2542            assert_eq!(vec[i], i as i32);
2543        }
2544
2545        // Test with PREALLOC=4, starting with some elements
2546        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2547        vec.push(999);
2548        vec.push(998);
2549        vec.extend_from_copy_slice(&data[..10]);
2550        assert_eq!(vec.len(), 12);
2551        assert_eq!(vec[0], 999);
2552        assert_eq!(vec[1], 998);
2553        for i in 0..10 {
2554            assert_eq!(vec[i + 2], i as i32);
2555        }
2556
2557        // Test empty slice
2558        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2559        vec.extend_from_copy_slice(&[]);
2560        assert!(vec.is_empty());
2561
2562        // Test extend to non-boundary, then push (regression test)
2563        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2564        vec.extend_from_copy_slice(&[1, 2, 3]); // 3 elements, not at boundary
2565        assert_eq!(vec.len(), 3);
2566        vec.push(4); // Should use fast path, not crash in push_slow
2567        vec.push(5);
2568        vec.push(6);
2569        assert_eq!(vec.len(), 6);
2570        for i in 0..6 {
2571            assert_eq!(vec[i], (i + 1) as i32);
2572        }
2573
2574        // Test extend to exact capacity boundary, then push
2575        let mut vec: SegmentedVec<i32, 4> = SegmentedVec::new();
2576        vec.extend_from_copy_slice(&[1, 2, 3, 4]); // Fills prealloc exactly
2577        assert_eq!(vec.len(), 4);
2578        vec.push(5); // Should correctly allocate new segment
2579        assert_eq!(vec.len(), 5);
2580        assert_eq!(vec[4], 5);
2581    }
2582}