1use alloc::alloc::{alloc, dealloc, handle_alloc_error};
61use core::alloc::Layout;
62use core::mem::{MaybeUninit, align_of, size_of};
63use core::ptr::{NonNull, null_mut};
64
65#[cfg(any(
67 target_arch = "x86_64",
68 target_arch = "aarch64",
69 target_arch = "arm64ec",
70 target_arch = "powerpc64",
71))]
72pub const CACHE_LINE_SIZE: usize = 128;
73#[cfg(not(any(
74 target_arch = "x86_64",
75 target_arch = "aarch64",
76 target_arch = "arm64ec",
77 target_arch = "powerpc64",
78)))]
79pub const CACHE_LINE_SIZE: usize = 64;
80
81pub struct SegList<T> {
100 tail: NonNull<SegHeader<T>>,
102 count: usize,
104}
105
106unsafe impl<T: Send> Send for SegList<T> {}
107unsafe impl<T: Send> Sync for SegList<T> {}
108
109impl<T> SegList<T> {
110 pub fn new() -> Self {
112 let mut seg = unsafe { Segment::<T>::alloc(null_mut(), null_mut(), true) };
113 let header_ptr = seg.header.as_ptr();
114 let header = seg.get_header_mut();
115 header.next = header_ptr;
117 Self { tail: seg.header, count: 0 }
118 }
119
120 #[inline(always)]
122 pub fn is_empty(&self) -> bool {
123 self.count == 0
124 }
125
126 #[inline(always)]
128 pub const fn segment_cap() -> usize {
129 Segment::<T>::base_cap()
130 }
131
132 #[inline(always)]
134 pub fn len(&self) -> usize {
135 self.count
136 }
137
138 #[inline]
140 pub fn push(&mut self, item: T) {
141 unsafe {
142 let mut tail_seg = Segment::from_raw(self.tail);
143 if tail_seg.is_full() {
144 let tail_ptr = tail_seg.header.as_ptr();
145 let cur = tail_seg.get_header_mut();
146 let new_seg = Segment::alloc(tail_ptr, cur.next, false);
148 cur.next = new_seg.header.as_ptr();
149 self.tail = new_seg.header;
150 tail_seg = new_seg;
151 }
152 tail_seg.push(item);
153 }
154 self.count += 1;
155 }
156
157 #[inline]
159 pub fn pop(&mut self) -> Option<T> {
160 if self.count == 0 {
161 return None;
162 }
163 unsafe {
164 let mut tail_seg = Segment::from_raw(self.tail);
165 let (item, is_empty) = tail_seg.pop();
166 if is_empty {
167 let cur = tail_seg.get_header_mut();
168 let first_ptr = cur.next;
169 let cur_prev = cur.prev;
170 if self.tail.as_ptr() != first_ptr && !cur_prev.is_null() {
171 self.tail = NonNull::new_unchecked(cur_prev);
173 (*self.tail.as_ptr()).next = first_ptr;
174 tail_seg.dealloc();
175 }
176 }
178 self.count -= 1;
179 Some(item)
180 }
181 }
182
183 #[inline(always)]
185 fn break_first_node(&mut self) -> Segment<T> {
186 let tail_header = unsafe { self.tail.as_mut() };
187 let first = tail_header.next;
188 tail_header.next = null_mut();
189 unsafe { Segment::from_raw(NonNull::new_unchecked(first)) }
190 }
191
192 #[inline(always)]
193 fn first_ptr(&self) -> NonNull<SegHeader<T>> {
194 unsafe {
197 let tail_header = self.tail.as_ref();
198 let first = tail_header.next;
199 NonNull::new_unchecked(first)
200 }
201 }
202
203 #[inline]
205 pub fn iter(&self) -> SegListIter<'_, T> {
206 let first_seg = unsafe { Segment::from_raw(self.first_ptr()) };
207 SegListIter {
208 cur: first_seg,
209 cur_idx: 0,
210 remaining: self.count,
211 _marker: core::marker::PhantomData,
212 }
213 }
214
215 #[inline]
217 pub fn iter_mut(&mut self) -> SegListIterMut<'_, T> {
218 let first_seg = unsafe { Segment::from_raw(self.first_ptr()) };
219 SegListIterMut {
220 cur: first_seg,
221 cur_idx: 0,
222 remaining: self.count,
223 _marker: core::marker::PhantomData,
224 }
225 }
226
227 pub fn drain(mut self) -> SegListDrain<T> {
229 let first = self.break_first_node();
231 core::mem::forget(self);
233 SegListDrain { cur: Some(first), cur_idx: 0 }
234 }
235
236 #[inline]
238 pub fn first(&self) -> Option<&T> {
239 if self.count == 0 {
240 return None;
241 }
242 unsafe {
245 let first_seg = Segment::from_raw(self.first_ptr());
246 Some((*first_seg.item_ptr(0)).assume_init_ref())
247 }
248 }
249
250 #[inline]
252 pub fn first_mut(&self) -> Option<&T> {
253 if self.count == 0 {
254 return None;
255 }
256 unsafe {
259 let first_seg = Segment::from_raw(self.first_ptr());
260 Some((*first_seg.item_ptr(0)).assume_init_mut())
261 }
262 }
263
264 #[inline]
266 pub fn last(&self) -> Option<&T> {
267 unsafe {
269 let tail_seg = Segment::from_raw(self.tail);
270 let header = tail_seg.get_header();
271 if header.count == 0 {
272 return None;
273 }
274 let idx = (header.count - 1) as usize;
275 Some((*tail_seg.item_ptr(idx)).assume_init_ref())
276 }
277 }
278
279 #[inline]
281 pub fn last_mut(&mut self) -> Option<&mut T> {
282 unsafe {
284 let tail_seg = Segment::from_raw(self.tail);
285 let header = tail_seg.get_header();
286 if header.count == 0 {
287 return None;
288 }
289 let idx = (header.count - 1) as usize;
290 Some((*tail_seg.item_ptr(idx)).assume_init_mut())
291 }
292 }
293
294 #[inline]
296 pub fn clear(&mut self) {
297 while self.pop().is_some() {}
298 }
299}
300
301impl<T> Default for SegList<T> {
302 fn default() -> Self {
303 Self::new()
304 }
305}
306
307impl<T> Drop for SegList<T> {
308 fn drop(&mut self) {
309 let mut cur = self.break_first_node();
311 loop {
312 let next = cur.get_header().next;
314 unsafe { cur.dealloc_with_items() };
315 if next.is_null() {
316 break;
317 }
318 cur = unsafe { Segment::from_raw(NonNull::new_unchecked(next)) };
319 }
320 }
321}
322
323impl<T> IntoIterator for SegList<T> {
324 type Item = T;
325 type IntoIter = SegListDrain<T>;
326
327 #[inline(always)]
328 fn into_iter(self) -> Self::IntoIter {
329 self.drain()
330 }
331}
332
333impl<T: core::fmt::Debug> core::fmt::Debug for SegList<T> {
334 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
335 f.debug_struct("SegList").field("len", &self.len()).finish()
336 }
337}
338
339#[repr(C)]
341struct SegHeader<T> {
342 count: u32,
344 cap: u32,
346 prev: *mut SegHeader<T>,
347 next: *mut SegHeader<T>,
349 _marker: core::marker::PhantomData<T>,
350}
351
352struct Segment<T> {
354 header: NonNull<SegHeader<T>>,
356}
357
358impl<T> Segment<T> {
359 const DATA_OFFSET: usize = Self::calc_data_offset();
361
362 const BASE_LAYOUT: (usize, Layout) = Self::calc_layout_const(CACHE_LINE_SIZE);
365
366 const LARGE_LAYOUT: (usize, Layout) = Self::calc_layout_const(CACHE_LINE_SIZE * 2);
369
370 const fn calc_data_offset() -> usize {
372 let mut data_offset = size_of::<SegHeader<T>>();
373 let t_size = size_of::<T>();
374 let t_align = align_of::<MaybeUninit<T>>();
375
376 if t_size != 0 {
377 data_offset = (data_offset + t_align - 1) & !(t_align - 1);
378 }
379 data_offset
380 }
381
382 const fn calc_layout_const(cache_line: usize) -> (usize, Layout) {
384 let t_size = size_of::<T>();
385 let data_offset = Self::DATA_OFFSET;
386 let capacity;
387 let final_alloc_size;
388 let final_align;
389
390 if t_size == 0 {
391 capacity = 1024;
393 final_alloc_size = cache_line;
394 final_align = cache_line;
395 } else {
396 let min_elements = 2;
397 let min_required_size = data_offset + (t_size * min_elements);
398 let alloc_size = (min_required_size + cache_line - 1) & !(cache_line - 1);
399 final_align = if cache_line > align_of::<MaybeUninit<T>>() {
400 cache_line
401 } else {
402 align_of::<MaybeUninit<T>>()
403 };
404 final_alloc_size = (alloc_size + final_align - 1) & !(final_align - 1);
405 capacity = (final_alloc_size - data_offset) / t_size;
406 assert!(capacity >= min_elements);
408 }
409
410 match Layout::from_size_align(final_alloc_size, final_align) {
411 Ok(l) => (capacity, l),
412 Err(_) => panic!("Invalid layout"),
413 }
414 }
415
416 #[inline(always)]
418 const fn base_cap() -> usize {
419 Self::BASE_LAYOUT.0
420 }
421
422 #[inline(always)]
424 const fn large_cap() -> usize {
425 Self::LARGE_LAYOUT.0
426 }
427
428 #[inline(always)]
430 const fn data_offset() -> usize {
431 Self::DATA_OFFSET
432 }
433
434 #[inline]
437 unsafe fn alloc(prev: *mut SegHeader<T>, next: *mut SegHeader<T>, is_first: bool) -> Self {
438 let (cap, layout) = if is_first {
439 (Self::base_cap() as u32, Self::BASE_LAYOUT.1)
440 } else {
441 (Self::large_cap() as u32, Self::LARGE_LAYOUT.1)
442 };
443 let ptr: *mut u8 = unsafe { alloc(layout) };
444 if ptr.is_null() {
445 handle_alloc_error(layout);
446 }
447 unsafe {
448 let p = NonNull::new_unchecked(ptr as *mut SegHeader<T>);
449 let header = p.as_ptr();
450 (*header).count = 0;
452 (*header).cap = cap;
453 (*header).prev = prev;
454 (*header).next = next;
455 Self::from_raw(p)
456 }
457 }
458
459 #[inline(always)]
460 unsafe fn dealloc_with_items(&mut self) {
461 unsafe {
462 if core::mem::needs_drop::<T>() {
463 for i in 0..self.len() {
464 (*self.item_ptr(i)).assume_init_drop();
465 }
466 }
467 self.dealloc();
468 }
469 }
470
471 #[inline(always)]
472 unsafe fn dealloc(&mut self) {
473 unsafe {
475 let cap = (*self.header.as_ptr()).cap as usize;
476 let layout =
477 if cap == Self::base_cap() { Self::BASE_LAYOUT.1 } else { Self::LARGE_LAYOUT.1 };
478 dealloc(self.header.as_ptr() as *mut u8, layout);
479 }
480 }
481
482 #[inline(always)]
483 unsafe fn from_raw(header: NonNull<SegHeader<T>>) -> Self {
484 Self { header }
485 }
486
487 #[inline(always)]
489 fn len(&self) -> usize {
490 unsafe { (*self.header.as_ptr()).count as usize }
491 }
492
493 #[inline(always)]
494 fn get_header(&self) -> &SegHeader<T> {
495 unsafe { self.header.as_ref() }
496 }
497
498 #[inline(always)]
499 fn get_header_mut(&mut self) -> &mut SegHeader<T> {
500 unsafe { self.header.as_mut() }
501 }
502
503 #[inline(always)]
505 fn is_empty(&self) -> bool {
506 self.len() == 0
507 }
508
509 #[inline(always)]
511 fn is_full(&self) -> bool {
512 let header = self.get_header();
513 header.count >= header.cap
514 }
515
516 #[inline]
518 fn item_ptr(&self, index: usize) -> *mut MaybeUninit<T> {
519 unsafe {
520 let items =
521 (self.header.as_ptr() as *mut u8).add(Self::data_offset()) as *mut MaybeUninit<T>;
522 items.add(index)
523 }
524 }
525
526 #[inline]
528 fn push(&mut self, item: T) {
529 debug_assert!(!self.is_full());
530 let idx = self.get_header().count as usize;
531 unsafe {
532 (*self.item_ptr(idx)).write(item);
533 }
534 self.get_header_mut().count = (idx + 1) as u32;
535 }
536
537 #[inline]
539 fn pop(&mut self) -> (T, bool) {
540 debug_assert!(!self.is_empty());
541 let idx = self.get_header().count - 1;
542 let item = unsafe { (*self.item_ptr(idx as usize)).assume_init_read() };
543 self.get_header_mut().count = idx;
544 (item, idx == 0)
545 }
546}
547
548pub struct SegListIter<'a, T> {
550 cur: Segment<T>,
551 cur_idx: usize,
552 remaining: usize,
553 _marker: core::marker::PhantomData<&'a T>,
554}
555
556impl<'a, T> Iterator for SegListIter<'a, T> {
557 type Item = &'a T;
558
559 #[inline]
560 fn next(&mut self) -> Option<Self::Item> {
561 if self.remaining == 0 {
562 return None;
563 }
564 let cur_header = self.cur.get_header();
565 self.remaining -= 1;
566 let idx = if self.cur_idx >= cur_header.count as usize {
567 let next = cur_header.next;
568 self.cur = unsafe { Segment::from_raw(NonNull::new_unchecked(next)) };
570 self.cur_idx = 1;
571 0
572 } else {
573 let _idx = self.cur_idx;
574 self.cur_idx = _idx + 1;
575 _idx
576 };
577 return Some(unsafe { (*self.cur.item_ptr(idx)).assume_init_ref() });
578 }
579}
580
581pub struct SegListIterMut<'a, T> {
583 cur: Segment<T>,
584 cur_idx: usize,
585 remaining: usize,
586 _marker: core::marker::PhantomData<&'a mut T>,
587}
588
589impl<'a, T> Iterator for SegListIterMut<'a, T> {
590 type Item = &'a mut T;
591
592 #[inline]
593 fn next(&mut self) -> Option<Self::Item> {
594 if self.remaining == 0 {
595 return None;
596 }
597 let cur_header = self.cur.get_header();
598 self.remaining -= 1;
599 let idx = if self.cur_idx >= cur_header.count as usize {
600 let next = cur_header.next;
601 self.cur = unsafe { Segment::from_raw(NonNull::new_unchecked(next)) };
603 self.cur_idx = 1;
604 0
605 } else {
606 let _idx = self.cur_idx;
607 self.cur_idx += 1;
608 _idx
609 };
610 return Some(unsafe { (*self.cur.item_ptr(idx)).assume_init_mut() });
611 }
612}
613
614pub struct SegListDrain<T> {
617 cur: Option<Segment<T>>,
618 cur_idx: usize,
619}
620
621impl<T> Iterator for SegListDrain<T> {
622 type Item = T;
623
624 #[inline]
625 fn next(&mut self) -> Option<Self::Item> {
626 let cur_seg = self.cur.as_mut()?;
627 unsafe {
628 let item = (*cur_seg.item_ptr(self.cur_idx)).assume_init_read();
629 let next_idx = self.cur_idx + 1;
630 let header = cur_seg.get_header();
631 if next_idx >= header.count as usize {
633 let next = header.next;
634 cur_seg.dealloc();
635 if next.is_null() {
636 self.cur = None;
637 } else {
638 self.cur = Some(Segment::from_raw(NonNull::new_unchecked(next)));
639 self.cur_idx = 0;
640 }
641 } else {
642 self.cur_idx = next_idx;
643 }
644 Some(item)
645 }
646 }
647}
648
649impl<T> Drop for SegListDrain<T> {
650 fn drop(&mut self) {
651 let mut next: *mut SegHeader<T>;
652 if let Some(mut cur) = self.cur.take() {
653 unsafe {
654 let header = cur.get_header();
656 next = header.next;
657 if core::mem::needs_drop::<T>() {
659 for i in self.cur_idx..header.count as usize {
660 (*cur.item_ptr(i)).assume_init_drop();
661 }
662 }
663 cur.dealloc();
664 while !next.is_null() {
665 cur = Segment::from_raw(NonNull::new_unchecked(next));
666 let header = cur.get_header();
667 next = header.next;
668 cur.dealloc_with_items();
669 }
670 }
671 }
672 }
673}
674
675#[cfg(test)]
676mod tests {
677 use super::*;
678 use core::sync::atomic::{AtomicUsize, Ordering};
679
680 static DROP_COUNT: AtomicUsize = AtomicUsize::new(0);
681
682 #[derive(Debug)]
683 struct DropTracker(i32);
684
685 impl Drop for DropTracker {
686 fn drop(&mut self) {
687 DROP_COUNT.fetch_add(1, Ordering::SeqCst);
688 }
689 }
690
691 fn reset_drop_count() {
692 DROP_COUNT.store(0, Ordering::SeqCst);
693 }
694
695 fn get_drop_count() -> usize {
696 DROP_COUNT.load(Ordering::SeqCst)
697 }
698
699 #[test]
700 fn test_multiple_segments() {
701 let mut list: SegList<i32> = SegList::new();
702 if CACHE_LINE_SIZE == 128 {
703 assert_eq!(Segment::<i32>::base_cap(), 26);
704 }
705
706 for i in 0..100 {
707 list.push(i);
708 }
709
710 assert_eq!(list.len(), 100);
711
712 for i in (0..100).rev() {
713 assert_eq!(list.pop(), Some(i));
714 }
715
716 assert_eq!(list.pop(), None);
717 }
718
719 #[test]
720 fn test_iter_single_segment() {
721 let mut list: SegList<i32> = SegList::new();
723
724 for i in 0..10 {
725 list.push(i);
726 }
727 assert_eq!(list.first(), Some(&0));
728 assert_eq!(list.last(), Some(&9));
729
730 let collected: Vec<i32> = list.iter().copied().collect();
732 assert_eq!(collected, vec![0, 1, 2, 3, 4, 5, 6, 7, 8, 9]);
733
734 for item in list.iter_mut() {
736 *item *= 2;
737 }
738 assert_eq!(list.first(), Some(&0));
739 assert_eq!(list.last(), Some(&18));
740
741 let collected: Vec<i32> = list.iter().copied().collect();
743 assert_eq!(collected, vec![0, 2, 4, 6, 8, 10, 12, 14, 16, 18]);
744
745 for i in (0..10).rev() {
747 assert_eq!(list.pop(), Some(i * 2));
748 }
749 assert_eq!(list.pop(), None);
750 assert!(list.is_empty());
751 }
752
753 #[test]
754 fn test_iter_multi_segment() {
755 let mut list: SegList<i32> = SegList::new();
757
758 for i in 0..200 {
759 list.push(i * 10);
760 }
761 assert_eq!(list.first(), Some(&0));
762 assert_eq!(list.last(), Some(&1990));
763
764 let collected: Vec<i32> = list.iter().copied().collect();
766 let expected: Vec<i32> = (0..200).map(|i| i * 10).collect();
767 assert_eq!(collected, expected);
768
769 for item in list.iter_mut() {
771 *item += 1;
772 }
773 assert_eq!(list.first(), Some(&1));
774 assert_eq!(list.last(), Some(&1991));
775
776 let collected: Vec<i32> = list.iter().copied().collect();
778 let expected: Vec<i32> = (0..200).map(|i| i * 10 + 1).collect();
779 assert_eq!(collected, expected);
780
781 for i in (0..200).rev() {
783 assert_eq!(list.pop(), Some(i * 10 + 1));
784 }
785 assert_eq!(list.pop(), None);
786 assert!(list.is_empty());
787 }
788
789 #[test]
790 fn test_drain() {
791 let cap = Segment::<DropTracker>::base_cap();
793
794 {
796 reset_drop_count();
797 {
798 let mut list: SegList<DropTracker> = SegList::new();
799 for i in 0..5 {
801 list.push(DropTracker(i));
802 }
803 assert!(list.len() < cap);
804
805 let drained: Vec<i32> = list.drain().map(|d| d.0).collect();
807 assert_eq!(drained, vec![0, 1, 2, 3, 4]);
808 }
809 assert_eq!(get_drop_count(), 5);
811 }
812
813 {
815 reset_drop_count();
816 {
817 let mut list: SegList<DropTracker> = SegList::new();
818 let total = cap * 2 + 5;
820 for i in 0..total {
821 list.push(DropTracker(i as i32));
822 }
823
824 let drain_count = cap / 2;
826 let mut drain_iter = list.drain();
827 for i in 0..drain_count {
828 assert_eq!(drain_iter.next().unwrap().0, i as i32);
829 }
830 drop(drain_iter);
832 }
833 assert_eq!(get_drop_count(), cap * 2 + 5);
835 }
836
837 {
839 reset_drop_count();
840 {
841 let mut list: SegList<DropTracker> = SegList::new();
842 let total = cap * 2 + 3;
844 for i in 0..total {
845 list.push(DropTracker(i as i32));
846 }
847
848 let mut drain_iter = list.drain();
850 for i in 0..cap {
851 assert_eq!(drain_iter.next().unwrap().0, i as i32);
852 }
853 drop(drain_iter);
855 }
856 assert_eq!(get_drop_count(), cap * 2 + 3);
858 }
859 }
860
861 #[test]
862 fn test_drop_single_segment() {
863 reset_drop_count();
864 {
865 let mut list: SegList<DropTracker> = SegList::new();
866 let cap = Segment::<DropTracker>::base_cap();
867
868 for i in 0..5 {
870 list.push(DropTracker(i));
871 }
872 assert!(list.len() < cap);
873
874 }
876
877 assert_eq!(get_drop_count(), 5);
878 }
879
880 #[test]
881 fn test_drop_multi_segment() {
882 let cap = Segment::<DropTracker>::base_cap();
883 reset_drop_count();
884 {
885 let mut list: SegList<DropTracker> = SegList::new();
886
887 let total = cap * 2 + 10;
889 for i in 0..total {
890 list.push(DropTracker(i as i32));
891 }
892 }
894 assert_eq!(get_drop_count(), cap * 2 + 10);
895 }
896
897 #[test]
898 fn test_clear() {
899 let mut list: SegList<i32> = SegList::new();
900
901 for i in 0..50 {
902 list.push(i);
903 }
904
905 list.clear();
906
907 assert!(list.is_empty());
908 assert_eq!(list.len(), 0);
909 assert_eq!(list.pop(), None);
910 }
911
912 #[derive(Debug, Clone, Copy, PartialEq)]
914 struct LargeStruct {
915 data: [u64; 16], }
917
918 impl LargeStruct {
919 fn new(val: u64) -> Self {
920 Self { data: [val; 16] }
921 }
922 }
923
924 #[test]
925 fn test_size() {
926 assert_eq!(size_of::<SegHeader::<LargeStruct>>(), 24);
927 let data_offset = Segment::<LargeStruct>::data_offset();
928 let base_cap = Segment::<LargeStruct>::base_cap();
929 let large_cap = Segment::<LargeStruct>::large_cap();
930 let base_layout = Segment::<LargeStruct>::BASE_LAYOUT.1;
931 let large_layout = Segment::<LargeStruct>::LARGE_LAYOUT.1;
932 println!(
933 "LargeStruct: offset={}, base(cap={} size={} align={}), large(cap={} size={} align={})",
934 data_offset,
935 base_cap,
936 base_layout.size(),
937 base_layout.align(),
938 large_cap,
939 large_layout.size(),
940 large_layout.align()
941 );
942 let data_offset = Segment::<u64>::data_offset();
943 let base_cap = Segment::<u64>::base_cap();
944 let large_cap = Segment::<u64>::large_cap();
945 let base_layout = Segment::<u64>::BASE_LAYOUT.1;
946 let large_layout = Segment::<u64>::LARGE_LAYOUT.1;
947 println!(
948 "u64: offset={}, base(cap={} size={} align={}), large(cap={} size={} align={})",
949 data_offset,
950 base_cap,
951 base_layout.size(),
952 base_layout.align(),
953 large_cap,
954 large_layout.size(),
955 large_layout.align()
956 );
957 let data_offset = Segment::<u32>::data_offset();
958 let base_cap = Segment::<u32>::base_cap();
959 let large_cap = Segment::<u32>::large_cap();
960 let base_layout = Segment::<u32>::BASE_LAYOUT.1;
961 let large_layout = Segment::<u32>::LARGE_LAYOUT.1;
962 println!(
963 "u32: offset={}, base(cap={} size={} align={}), large(cap={} size={} align={})",
964 data_offset,
965 base_cap,
966 base_layout.size(),
967 base_layout.align(),
968 large_cap,
969 large_layout.size(),
970 large_layout.align()
971 );
972 let data_offset = Segment::<u16>::data_offset();
973 let base_cap = Segment::<u16>::base_cap();
974 let large_cap = Segment::<u16>::large_cap();
975 let base_layout = Segment::<u16>::BASE_LAYOUT.1;
976 let large_layout = Segment::<u16>::LARGE_LAYOUT.1;
977 println!(
978 "u16: offset={}, base(cap={} size={} align={}), large(cap={} size={} align={})",
979 data_offset,
980 base_cap,
981 base_layout.size(),
982 base_layout.align(),
983 large_cap,
984 large_layout.size(),
985 large_layout.align()
986 );
987 }
988
989 #[test]
990 fn test_large_type_push_pop() {
991 let mut list: SegList<LargeStruct> = SegList::new();
992 for i in 0..50 {
994 list.push(LargeStruct::new(i));
995 }
996 assert_eq!(list.len(), 50);
997
998 for i in (0..50).rev() {
1000 let val = list.pop().unwrap();
1001 assert_eq!(val.data[0], i);
1002 assert_eq!(val.data[7], i);
1003 }
1004 assert!(list.is_empty());
1005 assert_eq!(list.pop(), None);
1006 }
1007
1008 #[test]
1009 fn test_large_type_iter() {
1010 let mut list: SegList<LargeStruct> = SegList::new();
1011
1012 for i in 0..30 {
1014 list.push(LargeStruct::new(i * 10));
1015 }
1016
1017 let collected: Vec<u64> = list.iter().map(|s| s.data[0]).collect();
1019 let expected: Vec<u64> = (0..30).map(|i| i * 10).collect();
1020 assert_eq!(collected, expected);
1021 }
1022
1023 #[test]
1024 fn test_large_type_iter_mut() {
1025 let mut list: SegList<LargeStruct> = SegList::new();
1026
1027 for i in 0..20 {
1029 list.push(LargeStruct::new(i));
1030 }
1031
1032 for item in list.iter_mut() {
1034 item.data[0] *= 2;
1035 }
1036
1037 let collected: Vec<u64> = list.iter().map(|s| s.data[0]).collect();
1039 let expected: Vec<u64> = (0..20).map(|i| i * 2).collect();
1040 assert_eq!(collected, expected);
1041 }
1042
1043 #[test]
1044 fn test_large_type_drain() {
1045 let mut list: SegList<LargeStruct> = SegList::new();
1046
1047 for i in 0..40 {
1049 list.push(LargeStruct::new(i));
1050 }
1051
1052 let drained: Vec<u64> = list.drain().map(|s| s.data[0]).collect();
1054 let expected: Vec<u64> = (0..40).collect();
1055 assert_eq!(drained, expected);
1056 }
1057
1058 #[test]
1059 fn test_large_type_clear() {
1060 let mut list: SegList<LargeStruct> = SegList::new();
1061
1062 for i in 0..60 {
1064 list.push(LargeStruct::new(i));
1065 }
1066 assert_eq!(list.len(), 60);
1067
1068 list.clear();
1070 assert!(list.is_empty());
1071 assert_eq!(list.len(), 0);
1072 assert_eq!(list.pop(), None);
1073 }
1074}