Skip to main content

embed_collections/
seg_list.rs

1//! Segmented List - A segmented list with cache-friendly node sizes.
2//!
3//! Support push() / pop() from the tail, and one-time consume all elements.
4//! It does not support random access.
5//!
6//! Each segment's capacity is calculated at runtime based on T's size
7//! to fit within a cache line. (The first segment is 2* CACHE_LINE_SIZE, the subsequent allocation
8//! use CACHE_LINE_SIZE * 4)
9//!
10//! It's faster than Vec when the number of items is small (1~64), because it does not re-allocate
11//! during `push()`. It's slower than Vec when the number is very large (due to pointer dereference cost).
12//!
13//! # NOTE
14//!
15//! T is allow to larger than `2 * CACHE_LINE_SIZE`, in this case `SegList` will ensure at least 2
16//! items in one segment. But it's not efficient when T larger than 64B, you should consider put T into Box.
17//!
18//! # Example
19//!
20//! ```rust
21//! use embed_collections::seg_list::SegList;
22//!
23//! // Create a new SegList
24//! let mut list: SegList<i32> = SegList::new();
25//!
26//! // Push elements to the back
27//! list.push(10);
28//! list.push(20);
29//! list.push(30);
30//!
31//! assert_eq!(list.len(), 3);
32//! assert_eq!(list.first(), Some(&10));
33//! assert_eq!(list.last(), Some(&30));
34//!
35//! // Pop elements from the back (LIFO order)
36//! assert_eq!(list.pop(), Some(30));
37//! assert_eq!(list.pop(), Some(20));
38//! assert_eq!(list.pop(), Some(10));
39//! assert_eq!(list.pop(), None);
40//! assert!(list.is_empty());
41//!
42//! // Iterate over elements
43//! list.push(1);
44//! list.push(2);
45//! list.push(3);
46//! let sum: i32 = list.iter().sum();
47//! assert_eq!(sum, 6);
48//!
49//! // Mutable iteration
50//! for item in list.iter_mut() {
51//!     *item *= 10;
52//! }
53//! assert_eq!(list.last(), Some(&30));
54//!
55//! // Drain all elements (consumes the list)
56//! let drained: Vec<i32> = list.drain().collect();
57//! assert_eq!(drained, vec![10, 20, 30]);
58//! ```
59
60use crate::CACHE_LINE_SIZE;
61use alloc::alloc::{alloc, dealloc, handle_alloc_error};
62use core::alloc::Layout;
63use core::mem::{MaybeUninit, align_of, size_of};
64use core::ptr::{NonNull, null_mut};
65
66/// Segmented list with cache-friendly segment sizes.
67///
68/// Support push() / pop() from the tail, and one-time consume all elements.
69/// It does not support random access.
70///
71/// Each segment's capacity is calculated at runtime based on T's size
72/// to fit within a cache line. (The first segment is 2 * CACHE_LINE_SIZE, the subsequent allocation
73/// use CACHE_LINE_SIZE * 4)
74///
75/// It's faster than Vec when the number of items is small (1~64), because it does not re-allocate
76/// during `push()`. It's slower than Vec when the number is very large (due to pointer dereference cost).
77///
78/// # NOTE
79///
80/// T is allow to larger than `2 * CACHE_LINE_SIZE`, in this case SegList will ensure at least 2
81/// items in one segment. But it's not efficient when T larger than 128B, you should consider put T into Box.
82///
83/// Refer to [module level](crate::seg_list) doc for examples.
84pub struct SegList<T> {
85    /// Pointer to the last segment (tail.get_header().next points to first element), to reduce the main struct size
86    tail: NonNull<SegHeader<T>>,
87    /// Total number of elements in the list
88    count: usize,
89}
90
91unsafe impl<T: Send> Send for SegList<T> {}
92unsafe impl<T: Send> Sync for SegList<T> {}
93
94impl<T> SegList<T> {
95    /// Create a new empty SegList with one allocated segment
96    pub fn new() -> Self {
97        let mut seg = unsafe { Segment::<T>::alloc(null_mut(), null_mut(), true) };
98        let header_ptr = seg.header.as_ptr();
99        let header = seg.get_header_mut();
100        // Make it circular: tail.next points to head (itself for now)
101        header.next = header_ptr;
102        Self { tail: seg.header, count: 0 }
103    }
104
105    /// Returns true if the list is empty
106    #[inline(always)]
107    pub fn is_empty(&self) -> bool {
108        self.count == 0
109    }
110
111    /// Get the base capacity of the first segment
112    #[inline(always)]
113    pub const fn segment_cap() -> usize {
114        Segment::<T>::base_cap()
115    }
116
117    /// Returns the total number of elements in the list
118    #[inline(always)]
119    pub fn len(&self) -> usize {
120        self.count
121    }
122
123    /// Push an element to the back of the list
124    #[inline]
125    pub fn push(&mut self, item: T) {
126        unsafe {
127            let mut tail_seg = Segment::from_raw(self.tail);
128            if tail_seg.is_full() {
129                let tail_ptr = tail_seg.header.as_ptr();
130                let cur = tail_seg.get_header_mut();
131                // Subsequent segments use LARGE_LAYOUT (cacheline * 2)
132                let new_seg = Segment::alloc(tail_ptr, cur.next, false);
133                cur.next = new_seg.header.as_ptr();
134                self.tail = new_seg.header;
135                tail_seg = new_seg;
136            }
137            tail_seg.push(item);
138        }
139        self.count += 1;
140    }
141
142    /// Pop an element from the back of the list
143    #[inline]
144    pub fn pop(&mut self) -> Option<T> {
145        if self.count == 0 {
146            return None;
147        }
148        unsafe {
149            let mut tail_seg = Segment::from_raw(self.tail);
150            let (item, is_empty) = tail_seg.pop();
151            if is_empty {
152                let cur = tail_seg.get_header_mut();
153                let first_ptr = cur.next;
154                let cur_prev = cur.prev;
155                if self.tail.as_ptr() != first_ptr && !cur_prev.is_null() {
156                    // More than one segment: remove tail segment
157                    self.tail = NonNull::new_unchecked(cur_prev);
158                    (*self.tail.as_ptr()).next = first_ptr;
159                    tail_seg.dealloc();
160                }
161                // If only one segment, keep it for future use
162            }
163            self.count -= 1;
164            Some(item)
165        }
166    }
167
168    // Break the cycle and free all segments
169    #[inline(always)]
170    fn break_first_node(&mut self) -> Segment<T> {
171        let tail_header = unsafe { self.tail.as_mut() };
172        let first = tail_header.next;
173        tail_header.next = null_mut();
174        unsafe { Segment::from_raw(NonNull::new_unchecked(first)) }
175    }
176
177    #[inline(always)]
178    fn first_ptr(&self) -> NonNull<SegHeader<T>> {
179        // SAFETY: tail always points to a valid segment with at least one element.
180        // get first segment through next ptr from the tail
181        unsafe {
182            let tail_header = self.tail.as_ref();
183            let first = tail_header.next;
184            NonNull::new_unchecked(first)
185        }
186    }
187
188    /// Returns an iterator over the list
189    #[inline]
190    pub fn iter(&self) -> SegListIter<'_, T> {
191        let first_seg = unsafe { Segment::from_raw(self.first_ptr()) };
192        SegListIter {
193            base: IterBase { cur: first_seg, cur_idx: 0, remaining: self.count, forward: true },
194            _marker: core::marker::PhantomData,
195        }
196    }
197
198    /// Returns a reverse iterator over the list
199    #[inline]
200    pub fn iter_rev(&self) -> SegListIter<'_, T> {
201        let tail_seg = unsafe { Segment::from_raw(self.tail) };
202        let tail_header = tail_seg.get_header();
203        let start_idx = if tail_header.count > 0 { tail_header.count as usize - 1 } else { 0 };
204        SegListIter {
205            base: IterBase {
206                cur: tail_seg,
207                cur_idx: start_idx,
208                remaining: self.count,
209                forward: false,
210            },
211            _marker: core::marker::PhantomData,
212        }
213    }
214
215    /// Returns a mutable iterator over the list
216    #[inline]
217    pub fn iter_mut(&mut self) -> SegListIterMut<'_, T> {
218        let first_seg = unsafe { Segment::from_raw(self.first_ptr()) };
219        SegListIterMut {
220            base: IterBase { cur: first_seg, cur_idx: 0, remaining: self.count, forward: true },
221            _marker: core::marker::PhantomData,
222        }
223    }
224
225    /// Returns a mutable reverse iterator over the list
226    #[inline]
227    pub fn iter_mut_rev(&mut self) -> SegListIterMut<'_, T> {
228        let tail_seg = unsafe { Segment::from_raw(self.tail) };
229        let tail_header = tail_seg.get_header();
230        let start_idx = if tail_header.count > 0 { tail_header.count as usize - 1 } else { 0 };
231        SegListIterMut {
232            base: IterBase {
233                cur: tail_seg,
234                cur_idx: start_idx,
235                remaining: self.count,
236                forward: false,
237            },
238            _marker: core::marker::PhantomData,
239        }
240    }
241
242    /// Returns a draining iterator that consumes the list and yields elements from head to tail
243    pub fn drain(mut self) -> SegListDrain<T> {
244        // Break the cycle and get the first segment
245        let mut first = self.break_first_node();
246        let cur = if self.count == 0 {
247            unsafe {
248                first.dealloc();
249            }
250            None
251        } else {
252            Some(first)
253        };
254        // To prevent drop from being called
255        core::mem::forget(self);
256        SegListDrain { cur, cur_idx: 0, forward: true }
257    }
258
259    /// Returns a draining iterator that consumes the list and yields elements from tail to head
260    pub fn into_rev(mut self) -> SegListDrain<T> {
261        // break the cycle
262        let mut first = self.break_first_node();
263        let (cur, start_idx) = if self.count == 0 {
264            unsafe {
265                first.dealloc();
266            }
267            (None, 0)
268        } else {
269            let tail_seg = unsafe { Segment::from_raw(self.tail) };
270            let tail_header = tail_seg.get_header();
271            let start_idx = if tail_header.count > 0 { tail_header.count as usize - 1 } else { 0 };
272            (Some(tail_seg), start_idx)
273        };
274        core::mem::forget(self);
275        SegListDrain { cur, cur_idx: start_idx, forward: false }
276    }
277
278    /// Returns a reference to the first element in the list
279    #[inline]
280    pub fn first(&self) -> Option<&T> {
281        if self.count == 0 {
282            return None;
283        }
284        // SAFETY: tail always points to a valid segment with at least one element.
285        // get first segment through next ptr from the tail
286        unsafe {
287            let first_seg = Segment::from_raw(self.first_ptr());
288            Some((*first_seg.item_ptr(0)).assume_init_ref())
289        }
290    }
291
292    /// Returns a mut reference to the first element in the list
293    #[inline]
294    pub fn first_mut(&self) -> Option<&T> {
295        if self.count == 0 {
296            return None;
297        }
298        // SAFETY: tail always points to a valid segment with at least one element.
299        // get first segment through next ptr from the tail
300        unsafe {
301            let first_seg = Segment::from_raw(self.first_ptr());
302            Some((*first_seg.item_ptr(0)).assume_init_mut())
303        }
304    }
305
306    /// Returns a reference to the last element in the list
307    #[inline]
308    pub fn last(&self) -> Option<&T> {
309        // SAFETY: tail always points to a valid segment with at least one element
310        unsafe {
311            let tail_seg = Segment::from_raw(self.tail);
312            let header = tail_seg.get_header();
313            if header.count == 0 {
314                return None;
315            }
316            let idx = (header.count - 1) as usize;
317            Some((*tail_seg.item_ptr(idx)).assume_init_ref())
318        }
319    }
320
321    /// Returns a mutable reference to the last element in the list
322    #[inline]
323    pub fn last_mut(&mut self) -> Option<&mut T> {
324        // SAFETY: tail always points to a valid segment with at least one element
325        unsafe {
326            let tail_seg = Segment::from_raw(self.tail);
327            let header = tail_seg.get_header();
328            if header.count == 0 {
329                return None;
330            }
331            let idx = (header.count - 1) as usize;
332            Some((*tail_seg.item_ptr(idx)).assume_init_mut())
333        }
334    }
335
336    /// Clear all elements from the list
337    #[inline]
338    pub fn clear(&mut self) {
339        while self.pop().is_some() {}
340    }
341}
342
343impl<T> Default for SegList<T> {
344    fn default() -> Self {
345        Self::new()
346    }
347}
348
349impl<T> Drop for SegList<T> {
350    fn drop(&mut self) {
351        // Break the cycle and get the first segment
352        let mut cur = self.break_first_node();
353        loop {
354            // Save next pointer before dealloc
355            let next = cur.get_header().next;
356            unsafe { cur.dealloc_with_items() };
357            if next.is_null() {
358                break;
359            }
360            cur = unsafe { Segment::from_raw(NonNull::new_unchecked(next)) };
361        }
362    }
363}
364
365impl<T> IntoIterator for SegList<T> {
366    type Item = T;
367    type IntoIter = SegListDrain<T>;
368
369    #[inline(always)]
370    fn into_iter(self) -> Self::IntoIter {
371        self.drain()
372    }
373}
374
375impl<T: core::fmt::Debug> core::fmt::Debug for SegList<T> {
376    fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
377        f.debug_struct("SegList").field("len", &self.len()).finish()
378    }
379}
380
381/// Segment header containing metadata
382#[repr(C)]
383struct SegHeader<T> {
384    /// Count of valid elements in this segment
385    count: u32,
386    /// Capacity of this segment (may vary for different segments)
387    cap: u32,
388    prev: *mut SegHeader<T>,
389    /// Next segment in the list
390    next: *mut SegHeader<T>,
391    _marker: core::marker::PhantomData<T>,
392}
393
394/// A segment containing header and element storage
395struct Segment<T> {
396    /// Pointer to the header
397    header: NonNull<SegHeader<T>>,
398}
399
400impl<T> Segment<T> {
401    // data_offset is the same for both base and large layouts
402    const DATA_OFFSET: usize = Self::calc_data_offset();
403
404    // Pre-calculate layout for cacheline-sized segment (first segment)
405    // (cap, layout)
406    const BASE_LAYOUT: (usize, Layout) = Self::calc_layout_const(CACHE_LINE_SIZE * 2);
407
408    // Pre-calculate layout for cacheline*2-sized segment (subsequent segments)
409    // (cap, layout)
410    const LARGE_LAYOUT: (usize, Layout) = Self::calc_layout_const(CACHE_LINE_SIZE * 4);
411
412    /// Const fn to calculate data offset (same for all segments)
413    const fn calc_data_offset() -> usize {
414        let mut data_offset = size_of::<SegHeader<T>>();
415        let t_size = size_of::<T>();
416        let t_align = align_of::<MaybeUninit<T>>();
417
418        if t_size != 0 {
419            data_offset = (data_offset + t_align - 1) & !(t_align - 1);
420        }
421        data_offset
422    }
423
424    /// Const fn to calculate layout for a given cache line size
425    const fn calc_layout_const(cache_line: usize) -> (usize, Layout) {
426        let t_size = size_of::<T>();
427        let data_offset = Self::DATA_OFFSET;
428        let capacity;
429        let final_alloc_size;
430        let final_align;
431
432        if t_size == 0 {
433            // 0-size does not actually take place
434            capacity = 1024;
435            final_alloc_size = cache_line;
436            final_align = cache_line;
437        } else {
438            let min_elements = 2;
439            let min_required_size = data_offset + (t_size * min_elements);
440            let alloc_size = (min_required_size + cache_line - 1) & !(cache_line - 1);
441            final_align = if cache_line > align_of::<MaybeUninit<T>>() {
442                cache_line
443            } else {
444                align_of::<MaybeUninit<T>>()
445            };
446            final_alloc_size = (alloc_size + final_align - 1) & !(final_align - 1);
447            capacity = (final_alloc_size - data_offset) / t_size;
448            // rust 1.57 support assert in const fn
449            assert!(capacity >= min_elements);
450        }
451
452        match Layout::from_size_align(final_alloc_size, final_align) {
453            Ok(l) => (capacity, l),
454            Err(_) => panic!("Invalid layout"),
455        }
456    }
457
458    /// Get the base capacity (first segment's capacity)
459    #[inline(always)]
460    const fn base_cap() -> usize {
461        Self::BASE_LAYOUT.0
462    }
463
464    /// Get the large capacity (subsequent segments' capacity)
465    #[inline(always)]
466    const fn large_cap() -> usize {
467        Self::LARGE_LAYOUT.0
468    }
469
470    /// Get the data offset (offset of first element from header start)
471    #[inline(always)]
472    const fn data_offset() -> usize {
473        Self::DATA_OFFSET
474    }
475
476    /// Create a new empty segment
477    /// is_first: true for first segment (uses BASE_LAYOUT), false for subsequent (uses LARGE_LAYOUT)
478    #[inline]
479    unsafe fn alloc(prev: *mut SegHeader<T>, next: *mut SegHeader<T>, is_first: bool) -> Self {
480        let (cap, layout) = if is_first {
481            (Self::base_cap() as u32, Self::BASE_LAYOUT.1)
482        } else {
483            (Self::large_cap() as u32, Self::LARGE_LAYOUT.1)
484        };
485        let ptr: *mut u8 = unsafe { alloc(layout) };
486        if ptr.is_null() {
487            handle_alloc_error(layout);
488        }
489        unsafe {
490            let p = NonNull::new_unchecked(ptr as *mut SegHeader<T>);
491            let header = p.as_ptr();
492            // Initialize header
493            (*header).count = 0;
494            (*header).cap = cap;
495            (*header).prev = prev;
496            (*header).next = next;
497            Self::from_raw(p)
498        }
499    }
500
501    #[inline(always)]
502    unsafe fn dealloc_with_items(&mut self) {
503        unsafe {
504            if core::mem::needs_drop::<T>() {
505                for i in 0..self.len() {
506                    (*self.item_ptr(i)).assume_init_drop();
507                }
508            }
509            self.dealloc();
510        }
511    }
512
513    #[inline(always)]
514    unsafe fn dealloc(&mut self) {
515        // Deallocate the entire segment (header + items)
516        unsafe {
517            let cap = (*self.header.as_ptr()).cap as usize;
518            let layout =
519                if cap == Self::base_cap() { Self::BASE_LAYOUT.1 } else { Self::LARGE_LAYOUT.1 };
520            dealloc(self.header.as_ptr() as *mut u8, layout);
521        }
522    }
523
524    #[inline(always)]
525    unsafe fn from_raw(header: NonNull<SegHeader<T>>) -> Self {
526        Self { header }
527    }
528
529    /// Get the count of valid elements in this segment
530    #[inline(always)]
531    fn len(&self) -> usize {
532        unsafe { (*self.header.as_ptr()).count as usize }
533    }
534
535    #[inline(always)]
536    fn get_header(&self) -> &SegHeader<T> {
537        unsafe { self.header.as_ref() }
538    }
539
540    #[inline(always)]
541    fn get_header_mut(&mut self) -> &mut SegHeader<T> {
542        unsafe { self.header.as_mut() }
543    }
544
545    /// Check if segment has no valid elements
546    #[inline(always)]
547    fn is_empty(&self) -> bool {
548        self.len() == 0
549    }
550
551    /// Check if segment is full
552    #[inline(always)]
553    fn is_full(&self) -> bool {
554        let header = self.get_header();
555        header.count >= header.cap
556    }
557
558    /// Get pointer to item at index
559    #[inline]
560    fn item_ptr(&self, index: usize) -> *mut MaybeUninit<T> {
561        unsafe {
562            let items =
563                (self.header.as_ptr() as *mut u8).add(Self::data_offset()) as *mut MaybeUninit<T>;
564            items.add(index)
565        }
566    }
567
568    /// Push an element to this segment (if not full)
569    #[inline]
570    fn push(&mut self, item: T) {
571        debug_assert!(!self.is_full());
572        let idx = self.get_header().count as usize;
573        unsafe {
574            (*self.item_ptr(idx)).write(item);
575        }
576        self.get_header_mut().count = (idx + 1) as u32;
577    }
578
579    /// return (item, is_empty_now)
580    #[inline]
581    fn pop(&mut self) -> (T, bool) {
582        debug_assert!(!self.is_empty());
583        let idx = self.get_header().count - 1;
584        let item = unsafe { (*self.item_ptr(idx as usize)).assume_init_read() };
585        self.get_header_mut().count = idx;
586        (item, idx == 0)
587    }
588}
589
590/// Base iterator implementation shared by immutable and mutable iterators.
591/// Manages iteration state and returns raw pointers to elements.
592/// `forward` is true for forward iteration, false for backward iteration.
593struct IterBase<T> {
594    cur: Segment<T>,
595    cur_idx: usize,
596    remaining: usize,
597    forward: bool,
598}
599
600impl<T> IterBase<T> {
601    /// Advances the iterator and returns a pointer to the next element's MaybeUninit.
602    #[inline]
603    fn next(&mut self) -> Option<*mut MaybeUninit<T>> {
604        if self.remaining == 0 {
605            return None;
606        }
607        self.remaining -= 1;
608
609        if self.forward {
610            let cur_header = self.cur.get_header();
611            let idx = if self.cur_idx >= cur_header.count as usize {
612                let next = cur_header.next;
613                self.cur = unsafe { Segment::from_raw(NonNull::new_unchecked(next)) };
614                self.cur_idx = 1;
615                0
616            } else {
617                let _idx = self.cur_idx;
618                self.cur_idx = _idx + 1;
619                _idx
620            };
621            Some(self.cur.item_ptr(idx))
622        } else {
623            let idx = self.cur_idx;
624            let item_ptr = self.cur.item_ptr(idx);
625            if self.cur_idx == 0 {
626                let cur_header = self.cur.get_header();
627                if !cur_header.prev.is_null() {
628                    self.cur =
629                        unsafe { Segment::from_raw(NonNull::new_unchecked(cur_header.prev)) };
630                    let prev_header = self.cur.get_header();
631                    self.cur_idx = prev_header.count as usize - 1;
632                }
633            } else {
634                self.cur_idx -= 1;
635            }
636            Some(item_ptr)
637        }
638    }
639
640    #[inline]
641    fn size_hint(&self) -> (usize, Option<usize>) {
642        (self.remaining, Some(self.remaining))
643    }
644}
645
646/// Immutable iterator over SegList
647pub struct SegListIter<'a, T> {
648    base: IterBase<T>,
649    _marker: core::marker::PhantomData<&'a T>,
650}
651
652impl<'a, T> Iterator for SegListIter<'a, T> {
653    type Item = &'a T;
654
655    #[inline]
656    fn next(&mut self) -> Option<Self::Item> {
657        self.base.next().map(|ptr| unsafe { (*ptr).assume_init_ref() })
658    }
659
660    #[inline]
661    fn size_hint(&self) -> (usize, Option<usize>) {
662        self.base.size_hint()
663    }
664}
665
666impl<'a, T> ExactSizeIterator for SegListIter<'a, T> {}
667
668/// Mutable iterator over SegList
669pub struct SegListIterMut<'a, T> {
670    base: IterBase<T>,
671    _marker: core::marker::PhantomData<&'a mut T>,
672}
673
674impl<'a, T> Iterator for SegListIterMut<'a, T> {
675    type Item = &'a mut T;
676
677    #[inline]
678    fn next(&mut self) -> Option<Self::Item> {
679        self.base.next().map(|ptr| unsafe { (*ptr).assume_init_mut() })
680    }
681
682    #[inline]
683    fn size_hint(&self) -> (usize, Option<usize>) {
684        self.base.size_hint()
685    }
686}
687
688impl<'a, T> ExactSizeIterator for SegListIterMut<'a, T> {}
689
690/// Draining iterator over SegList
691/// Consumes elements from head to tail (FIFO order) or tail to head (LIFO order)
692pub struct SegListDrain<T> {
693    cur: Option<Segment<T>>,
694    cur_idx: usize,
695    forward: bool,
696}
697
698impl<T> Iterator for SegListDrain<T> {
699    type Item = T;
700
701    #[inline]
702    fn next(&mut self) -> Option<Self::Item> {
703        let cur_seg = self.cur.as_mut()?;
704        unsafe {
705            let item = (*cur_seg.item_ptr(self.cur_idx)).assume_init_read();
706            let header = cur_seg.get_header();
707            if self.forward {
708                let next_idx = self.cur_idx + 1;
709                if next_idx >= header.count as usize {
710                    let next = header.next;
711                    cur_seg.dealloc();
712                    if next.is_null() {
713                        self.cur = None;
714                    } else {
715                        self.cur = Some(Segment::from_raw(NonNull::new_unchecked(next)));
716                        self.cur_idx = 0;
717                    }
718                } else {
719                    self.cur_idx = next_idx;
720                }
721            } else if self.cur_idx == 0 {
722                let prev = header.prev;
723                cur_seg.dealloc();
724                if prev.is_null() {
725                    self.cur = None;
726                } else {
727                    let _cur = Segment::from_raw(NonNull::new_unchecked(prev));
728                    self.cur_idx = _cur.get_header().count as usize - 1;
729                    self.cur = Some(_cur);
730                }
731            } else {
732                self.cur_idx -= 1;
733            }
734            Some(item)
735        }
736    }
737
738    #[inline]
739    fn size_hint(&self) -> (usize, Option<usize>) {
740        let remaining = if let Some(_cur) = &self.cur {
741            // Estimate remaining elements - this is approximate for backward iteration
742            // across multiple segments, but sufficient for size_hint
743            1 // At least one element if cur is Some
744        } else {
745            0
746        };
747        (remaining, None)
748    }
749}
750
751impl<T> Drop for SegListDrain<T> {
752    fn drop(&mut self) {
753        if let Some(mut cur) = self.cur.take() {
754            unsafe {
755                if self.forward {
756                    // Save next pointer before dealloc
757                    let header = cur.get_header();
758                    let mut next = header.next;
759                    // Drop remaining elements in this segment (from current index to end)
760                    if core::mem::needs_drop::<T>() {
761                        for i in self.cur_idx..header.count as usize {
762                            (*cur.item_ptr(i)).assume_init_drop();
763                        }
764                    }
765                    cur.dealloc();
766                    while !next.is_null() {
767                        cur = Segment::from_raw(NonNull::new_unchecked(next));
768                        next = cur.get_header().next;
769                        cur.dealloc_with_items();
770                    }
771                } else {
772                    // Save prev pointer before dealloc
773                    let mut prev = cur.get_header().prev;
774                    // Drop remaining elements in this segment (from start to current index)
775                    if core::mem::needs_drop::<T>() {
776                        for i in 0..=self.cur_idx {
777                            (*cur.item_ptr(i)).assume_init_drop();
778                        }
779                    }
780                    cur.dealloc();
781                    while !prev.is_null() {
782                        cur = Segment::from_raw(NonNull::new_unchecked(prev));
783                        prev = cur.get_header().prev;
784                        cur.dealloc_with_items();
785                    }
786                }
787            }
788        }
789    }
790}
791
792#[cfg(test)]
793mod tests {
794    use super::*;
795    use core::sync::atomic::{AtomicUsize, Ordering};
796
797    static DROP_COUNT: AtomicUsize = AtomicUsize::new(0);
798
799    #[derive(Debug)]
800    struct DropTracker(i32);
801
802    impl Drop for DropTracker {
803        fn drop(&mut self) {
804            DROP_COUNT.fetch_add(1, Ordering::SeqCst);
805        }
806    }
807
808    fn reset_drop_count() {
809        DROP_COUNT.store(0, Ordering::SeqCst);
810    }
811
812    fn get_drop_count() -> usize {
813        DROP_COUNT.load(Ordering::SeqCst)
814    }
815
816    #[test]
817    fn test_multiple_segments() {
818        let mut list: SegList<i32> = SegList::new();
819        if CACHE_LINE_SIZE == 128 {
820            assert_eq!(Segment::<i32>::base_cap(), 26);
821        }
822
823        for i in 0..100 {
824            list.push(i);
825        }
826
827        assert_eq!(list.len(), 100);
828
829        for i in (0..100).rev() {
830            assert_eq!(list.pop(), Some(i));
831        }
832
833        assert_eq!(list.pop(), None);
834    }
835
836    #[test]
837    fn test_iter_single_segment() {
838        // Test with small number of elements (single segment)
839        let mut list: SegList<i32> = SegList::new();
840
841        for i in 0..10 {
842            list.push(i);
843        }
844        assert_eq!(list.first(), Some(&0));
845        assert_eq!(list.last(), Some(&9));
846
847        // Test immutable iterator
848        let collected: Vec<i32> = list.iter().copied().collect();
849        assert_eq!(collected, vec![0, 1, 2, 3, 4, 5, 6, 7, 8, 9]);
850
851        // Test mutable iterator - modify elements
852        for item in list.iter_mut() {
853            *item *= 2;
854        }
855        assert_eq!(list.first(), Some(&0));
856        assert_eq!(list.last(), Some(&18));
857
858        // Verify modification
859        let collected: Vec<i32> = list.iter().copied().collect();
860        assert_eq!(collected, vec![0, 2, 4, 6, 8, 10, 12, 14, 16, 18]);
861
862        // Test pop() - should return elements in LIFO order
863        for i in (0..10).rev() {
864            assert_eq!(list.pop(), Some(i * 2));
865        }
866        assert_eq!(list.pop(), None);
867        assert!(list.is_empty());
868    }
869
870    #[test]
871    fn test_iter_multi_segment() {
872        // Test with many elements (multiple segments)
873        let mut list: SegList<i32> = SegList::new();
874
875        for i in 0..200 {
876            list.push(i * 10);
877        }
878        assert_eq!(list.first(), Some(&0));
879        assert_eq!(list.last(), Some(&1990));
880
881        // Test immutable iterator across multiple segments
882        let collected: Vec<i32> = list.iter().copied().collect();
883        let expected: Vec<i32> = (0..200).map(|i| i * 10).collect();
884        assert_eq!(collected, expected);
885
886        // Test mutable iterator - modify elements
887        for item in list.iter_mut() {
888            *item += 1;
889        }
890        assert_eq!(list.first(), Some(&1));
891        assert_eq!(list.last(), Some(&1991));
892
893        // Verify modification
894        let collected: Vec<i32> = list.iter().copied().collect();
895        let expected: Vec<i32> = (0..200).map(|i| i * 10 + 1).collect();
896        assert_eq!(collected, expected);
897
898        // Test pop() - should return elements in LIFO order across multiple segments
899        for i in (0..200).rev() {
900            assert_eq!(list.pop(), Some(i * 10 + 1));
901        }
902        assert_eq!(list.pop(), None);
903        assert!(list.is_empty());
904    }
905
906    #[test]
907    fn test_drain() {
908        // Get capacity per segment for DropTracker (i32)
909        let cap = Segment::<DropTracker>::base_cap();
910
911        // Scenario 1: Single segment, drain completely
912        {
913            reset_drop_count();
914            {
915                let mut list: SegList<DropTracker> = SegList::new();
916                // Push fewer elements than one segment capacity
917                for i in 0..5 {
918                    list.push(DropTracker(i));
919                }
920                assert!(list.len() < cap);
921
922                // Drain all elements
923                let drained: Vec<i32> = list.drain().map(|d| d.0).collect();
924                assert_eq!(drained, vec![0, 1, 2, 3, 4]);
925            }
926            // All 5 elements should be dropped by drain
927            assert_eq!(get_drop_count(), 5);
928        }
929
930        // Scenario 2: Three segments, drain first segment partially, then drop remaining
931        {
932            reset_drop_count();
933            {
934                let mut list: SegList<DropTracker> = SegList::new();
935                // Push elements to create 3 segments (cap * 2 + 5 = more than 2 segments)
936                let total = cap * 2 + 5;
937                for i in 0..total {
938                    list.push(DropTracker(i as i32));
939                }
940
941                // Drain only half of first segment
942                let drain_count = cap / 2;
943                let mut drain_iter = list.drain();
944                for i in 0..drain_count {
945                    assert_eq!(drain_iter.next().unwrap().0, i as i32);
946                }
947                // Drop the drain iterator early (remaining elements should be dropped)
948                drop(drain_iter);
949            }
950            // All elements should be dropped (drained + remaining in list)
951            assert_eq!(get_drop_count(), cap * 2 + 5);
952        }
953
954        // Scenario 3: Three segments, drain exactly first segment, then drop remaining
955        {
956            reset_drop_count();
957            {
958                let mut list: SegList<DropTracker> = SegList::new();
959                // Push elements to create 3 segments
960                let total = cap * 2 + 3;
961                for i in 0..total {
962                    list.push(DropTracker(i as i32));
963                }
964
965                // Drain exactly first segment
966                let mut drain_iter = list.drain();
967                for i in 0..cap {
968                    assert_eq!(drain_iter.next().unwrap().0, i as i32);
969                }
970                // Drop the drain iterator early
971                drop(drain_iter);
972            }
973            // All elements should be dropped
974            assert_eq!(get_drop_count(), cap * 2 + 3);
975        }
976    }
977
978    #[test]
979    fn test_drop_single_segment() {
980        reset_drop_count();
981        {
982            let mut list: SegList<DropTracker> = SegList::new();
983            let cap = Segment::<DropTracker>::base_cap();
984
985            // Push fewer elements than one segment capacity
986            for i in 0..5 {
987                list.push(DropTracker(i));
988            }
989            assert!(list.len() < cap);
990
991            // list drops here, should drop all elements
992        }
993
994        assert_eq!(get_drop_count(), 5);
995    }
996
997    #[test]
998    fn test_drop_multi_segment() {
999        let cap = Segment::<DropTracker>::base_cap();
1000        reset_drop_count();
1001        {
1002            let mut list: SegList<DropTracker> = SegList::new();
1003
1004            // Push elements to create multiple segments (3 segments)
1005            let total = cap * 2 + 10;
1006            for i in 0..total {
1007                list.push(DropTracker(i as i32));
1008            }
1009            // list drops here, should drop all elements across all segments
1010        }
1011        assert_eq!(get_drop_count(), cap * 2 + 10);
1012    }
1013
1014    #[test]
1015    fn test_clear() {
1016        let mut list: SegList<i32> = SegList::new();
1017
1018        for i in 0..50 {
1019            list.push(i);
1020        }
1021
1022        list.clear();
1023
1024        assert!(list.is_empty());
1025        assert_eq!(list.len(), 0);
1026        assert_eq!(list.pop(), None);
1027    }
1028
1029    /// Large struct that larger than cache line
1030    #[derive(Debug, Clone, Copy, PartialEq)]
1031    struct LargeStruct {
1032        data: [u64; 16], // 128 bytes
1033    }
1034
1035    impl LargeStruct {
1036        fn new(val: u64) -> Self {
1037            Self { data: [val; 16] }
1038        }
1039    }
1040
1041    #[test]
1042    fn test_size() {
1043        assert_eq!(size_of::<SegHeader::<LargeStruct>>(), 24);
1044        let data_offset = Segment::<LargeStruct>::data_offset();
1045        let base_cap = Segment::<LargeStruct>::base_cap();
1046        let large_cap = Segment::<LargeStruct>::large_cap();
1047        let base_layout = Segment::<LargeStruct>::BASE_LAYOUT.1;
1048        let large_layout = Segment::<LargeStruct>::LARGE_LAYOUT.1;
1049        println!(
1050            "LargeStruct: offset={}, base(cap={} size={} align={}), large(cap={} size={} align={})",
1051            data_offset,
1052            base_cap,
1053            base_layout.size(),
1054            base_layout.align(),
1055            large_cap,
1056            large_layout.size(),
1057            large_layout.align()
1058        );
1059        let data_offset = Segment::<u64>::data_offset();
1060        let base_cap = Segment::<u64>::base_cap();
1061        let large_cap = Segment::<u64>::large_cap();
1062        let base_layout = Segment::<u64>::BASE_LAYOUT.1;
1063        let large_layout = Segment::<u64>::LARGE_LAYOUT.1;
1064        println!(
1065            "u64: offset={}, base(cap={} size={} align={}), large(cap={} size={} align={})",
1066            data_offset,
1067            base_cap,
1068            base_layout.size(),
1069            base_layout.align(),
1070            large_cap,
1071            large_layout.size(),
1072            large_layout.align()
1073        );
1074        let data_offset = Segment::<u32>::data_offset();
1075        let base_cap = Segment::<u32>::base_cap();
1076        let large_cap = Segment::<u32>::large_cap();
1077        let base_layout = Segment::<u32>::BASE_LAYOUT.1;
1078        let large_layout = Segment::<u32>::LARGE_LAYOUT.1;
1079        println!(
1080            "u32: offset={}, base(cap={} size={} align={}), large(cap={} size={} align={})",
1081            data_offset,
1082            base_cap,
1083            base_layout.size(),
1084            base_layout.align(),
1085            large_cap,
1086            large_layout.size(),
1087            large_layout.align()
1088        );
1089        let data_offset = Segment::<u16>::data_offset();
1090        let base_cap = Segment::<u16>::base_cap();
1091        let large_cap = Segment::<u16>::large_cap();
1092        let base_layout = Segment::<u16>::BASE_LAYOUT.1;
1093        let large_layout = Segment::<u16>::LARGE_LAYOUT.1;
1094        println!(
1095            "u16: offset={}, base(cap={} size={} align={}), large(cap={} size={} align={})",
1096            data_offset,
1097            base_cap,
1098            base_layout.size(),
1099            base_layout.align(),
1100            large_cap,
1101            large_layout.size(),
1102            large_layout.align()
1103        );
1104    }
1105
1106    #[test]
1107    fn test_large_type_push_pop() {
1108        let mut list: SegList<LargeStruct> = SegList::new();
1109        // Push elements - each segment can only hold a few due to large element size
1110        for i in 0..50 {
1111            list.push(LargeStruct::new(i));
1112        }
1113        assert_eq!(list.len(), 50);
1114
1115        // Pop all elements - should work correctly across multiple segments
1116        for i in (0..50).rev() {
1117            let val = list.pop().unwrap();
1118            assert_eq!(val.data[0], i);
1119            assert_eq!(val.data[7], i);
1120        }
1121        assert!(list.is_empty());
1122        assert_eq!(list.pop(), None);
1123    }
1124
1125    #[test]
1126    fn test_large_type_iter() {
1127        let mut list: SegList<LargeStruct> = SegList::new();
1128
1129        // Push elements
1130        for i in 0..30 {
1131            list.push(LargeStruct::new(i * 10));
1132        }
1133
1134        // Test iterator
1135        let collected: Vec<u64> = list.iter().map(|s| s.data[0]).collect();
1136        let expected: Vec<u64> = (0..30).map(|i| i * 10).collect();
1137        assert_eq!(collected, expected);
1138    }
1139
1140    #[test]
1141    fn test_large_type_iter_mut() {
1142        let mut list: SegList<LargeStruct> = SegList::new();
1143
1144        // Push elements
1145        for i in 0..20 {
1146            list.push(LargeStruct::new(i));
1147        }
1148
1149        // Modify through iterator
1150        for item in list.iter_mut() {
1151            item.data[0] *= 2;
1152        }
1153
1154        // Verify modification
1155        let collected: Vec<u64> = list.iter().map(|s| s.data[0]).collect();
1156        let expected: Vec<u64> = (0..20).map(|i| i * 2).collect();
1157        assert_eq!(collected, expected);
1158    }
1159
1160    #[test]
1161    fn test_large_type_drain() {
1162        let mut list: SegList<LargeStruct> = SegList::new();
1163
1164        // Push elements
1165        for i in 0..40 {
1166            list.push(LargeStruct::new(i));
1167        }
1168
1169        // Drain and verify FIFO order
1170        let drained: Vec<u64> = list.drain().map(|s| s.data[0]).collect();
1171        let expected: Vec<u64> = (0..40).collect();
1172        assert_eq!(drained, expected);
1173    }
1174
1175    #[test]
1176    fn test_large_type_clear() {
1177        let mut list: SegList<LargeStruct> = SegList::new();
1178
1179        // Push elements
1180        for i in 0..60 {
1181            list.push(LargeStruct::new(i));
1182        }
1183        assert_eq!(list.len(), 60);
1184
1185        // Clear
1186        list.clear();
1187        assert!(list.is_empty());
1188        assert_eq!(list.len(), 0);
1189        assert_eq!(list.pop(), None);
1190    }
1191
1192    #[test]
1193    fn test_iter_rev_single_segment() {
1194        // Test with small number of elements (single segment)
1195        let mut list: SegList<i32> = SegList::new();
1196
1197        for i in 0..10 {
1198            list.push(i);
1199        }
1200
1201        // Test reverse iterator
1202        let collected: Vec<i32> = list.iter_rev().copied().collect();
1203        let expected: Vec<i32> = (0..10).rev().collect();
1204        assert_eq!(collected, expected);
1205
1206        // Test mutable reverse iterator
1207        for item in list.iter_mut_rev() {
1208            *item *= 10;
1209        }
1210
1211        // Verify modification
1212        assert_eq!(list.first(), Some(&0));
1213        assert_eq!(list.last(), Some(&90));
1214
1215        let collected: Vec<i32> = list.iter().copied().collect();
1216        let expected: Vec<i32> = vec![0, 10, 20, 30, 40, 50, 60, 70, 80, 90];
1217        assert_eq!(collected, expected);
1218    }
1219
1220    #[test]
1221    fn test_iter_rev_multi_segment() {
1222        // Test with many elements (multiple segments)
1223        let mut list: SegList<i32> = SegList::new();
1224
1225        for i in 0..200 {
1226            list.push(i);
1227        }
1228
1229        // Test reverse iterator across multiple segments
1230        let collected: Vec<i32> = list.iter_rev().copied().collect();
1231        let expected: Vec<i32> = (0..200).rev().collect();
1232        assert_eq!(collected, expected);
1233
1234        // Test mutable reverse iterator across multiple segments
1235        for item in list.iter_mut_rev() {
1236            *item += 1000;
1237        }
1238
1239        // Verify modification
1240        assert_eq!(list.first(), Some(&1000));
1241        assert_eq!(list.last(), Some(&1199));
1242
1243        let collected: Vec<i32> = list.iter().copied().collect();
1244        let expected: Vec<i32> = (0..200).map(|i| i + 1000).collect();
1245        assert_eq!(collected, expected);
1246    }
1247
1248    #[test]
1249    fn test_iter_rev_empty() {
1250        let list: SegList<i32> = SegList::new();
1251
1252        let collected: Vec<i32> = list.iter_rev().copied().collect();
1253        assert!(collected.is_empty());
1254
1255        let mut list_mut: SegList<i32> = SegList::new();
1256        let count = list_mut.iter_mut_rev().count();
1257        assert_eq!(count, 0);
1258    }
1259
1260    #[test]
1261    fn test_iter_rev_exact_size() {
1262        let mut list: SegList<i32> = SegList::new();
1263
1264        for i in 0..50 {
1265            list.push(i);
1266        }
1267
1268        // Test ExactSizeIterator on reverse iterator
1269        let iter = list.iter_rev();
1270        assert_eq!(iter.len(), 50);
1271
1272        let mut iter = list.iter_rev();
1273        assert_eq!(iter.len(), 50);
1274        iter.next();
1275        assert_eq!(iter.len(), 49);
1276        iter.next();
1277        assert_eq!(iter.len(), 48);
1278
1279        // Test ExactSizeIterator on mutable reverse iterator
1280        let mut iter_mut = list.iter_mut_rev();
1281        assert_eq!(iter_mut.len(), 50);
1282        iter_mut.next();
1283        assert_eq!(iter_mut.len(), 49);
1284    }
1285
1286    #[test]
1287    fn test_into_rev_single_segment() {
1288        // Test with small number of elements (single segment)
1289        let mut list: SegList<i32> = SegList::new();
1290
1291        for i in 0..10 {
1292            list.push(i);
1293        }
1294
1295        // Test into_rev - should yield elements in LIFO order
1296        let drained: Vec<i32> = list.into_rev().collect();
1297        let expected: Vec<i32> = (0..10).rev().collect();
1298        assert_eq!(drained, expected);
1299    }
1300
1301    #[test]
1302    fn test_into_rev_multi_segment() {
1303        // Test with many elements (multiple segments)
1304        let mut list: SegList<i32> = SegList::new();
1305
1306        for i in 0..200 {
1307            list.push(i);
1308        }
1309
1310        // Test into_rev - should yield elements in LIFO order across segments
1311        let drained: Vec<i32> = list.into_rev().collect();
1312        let expected: Vec<i32> = (0..200).rev().collect();
1313        assert_eq!(drained, expected);
1314    }
1315
1316    #[test]
1317    fn test_into_rev_empty() {
1318        let list: SegList<i32> = SegList::new();
1319
1320        let drained: Vec<i32> = list.into_rev().collect();
1321        assert!(drained.is_empty());
1322    }
1323
1324    #[test]
1325    fn test_into_rev_partial() {
1326        // Test partial consumption of into_rev
1327        let mut list: SegList<i32> = SegList::new();
1328
1329        for i in 0..50 {
1330            list.push(i);
1331        }
1332
1333        // Only drain half
1334        let mut drain = list.into_rev();
1335        let mut drained = Vec::new();
1336        for _ in 0..25 {
1337            if let Some(item) = drain.next() {
1338                drained.push(item);
1339            }
1340        }
1341        // Drop drain - remaining elements should be dropped properly
1342        drop(drain);
1343
1344        // Verify we got the last 25 elements in reverse order
1345        let expected: Vec<i32> = (25..50).rev().collect();
1346        assert_eq!(drained, expected);
1347    }
1348}