Skip to main content

spacetimedb_primitives/
col_list.rs

1#![forbid(unsafe_op_in_unsafe_fn)]
2
3extern crate alloc;
4
5use crate::ColId;
6use alloc::alloc::{alloc, dealloc, handle_alloc_error, realloc};
7use core::{
8    alloc::Layout,
9    cmp::Ordering,
10    fmt,
11    hash::{Hash, Hasher},
12    iter,
13    mem::{size_of, ManuallyDrop},
14    ops::{Deref, DerefMut},
15    ptr::NonNull,
16    slice::{from_raw_parts, from_raw_parts_mut},
17};
18use either::Either;
19use itertools::Itertools;
20
21/// Constructs a `ColList` like so `col_list![0, 2]`.
22///
23/// Mostly provided for testing.
24#[macro_export]
25macro_rules! col_list {
26    ($($elem:expr),* $(,)?) => {{
27        $crate::ColList::from([$($elem),*])
28    }};
29}
30
31/// This represents a list of [`ColId`]s
32/// but packed into a `u64` in a way that takes advantage of the fact that
33/// in almost all cases, we won't store a `ColId` larger than 62.
34/// In the rare case that we store larger ids, we fall back to a thin vec approach.
35///
36/// We also fall back to a thin vec if the ids stored are not in sorted order, from low to high,
37/// or if the list contains duplicates.
38///
39/// If you want a set of columns, use [`ColSet`] instead. It is more likely to be compressed,
40/// and so is a better choice if you don't require ordering information.
41#[repr(C)]
42pub union ColList {
43    /// Used to determine whether the list is stored inline or not.
44    check: usize,
45    /// The list is stored inline as a bitset.
46    inline: ColListInline,
47    /// A heap allocated version of the list.
48    heap: ManuallyDrop<ColListVec>,
49}
50
51// SAFETY: The data is owned by `ColList` so this is OK.
52unsafe impl Sync for ColList {}
53// SAFETY: The data is owned by `ColList` so this is OK.
54unsafe impl Send for ColList {}
55
56impl<C: Into<ColId>> From<C> for ColList {
57    fn from(value: C) -> Self {
58        Self::new(value.into())
59    }
60}
61
62impl<C: Into<ColId>, const N: usize> From<[C; N]> for ColList {
63    fn from(cols: [C; N]) -> Self {
64        cols.map(|c| c.into()).into_iter().collect()
65    }
66}
67
68impl<C: Into<ColId>> FromIterator<C> for ColList {
69    fn from_iter<I: IntoIterator<Item = C>>(iter: I) -> Self {
70        let iter = iter.into_iter();
71        let (lower_bound, _) = iter.size_hint();
72        let mut list = Self::with_capacity(lower_bound as u16);
73        list.extend(iter);
74        list
75    }
76}
77
78impl<C: Into<ColId>> Extend<C> for ColList {
79    fn extend<T: IntoIterator<Item = C>>(&mut self, iter: T) {
80        let iter = iter.into_iter();
81        for col in iter {
82            self.push(col.into());
83        }
84    }
85}
86
87impl Default for ColList {
88    fn default() -> Self {
89        Self::with_capacity(0)
90    }
91}
92
93impl ColList {
94    /// Returns an empty list.
95    pub fn empty() -> Self {
96        Self::from_inline(0)
97    }
98
99    /// Returns a list with a single column.
100    /// As long `col` is below `62`, this will not allocate.
101    pub fn new(col: ColId) -> Self {
102        let mut list = Self::from_inline(0);
103        list.push_inner(col, true);
104        list
105    }
106
107    /// Returns an empty list with a capacity to hold `cap` elements.
108    pub fn with_capacity(cap: u16) -> Self {
109        // We speculate that all elements < `Self::FIRST_HEAP_COL`.
110        if cap < Self::FIRST_HEAP_COL_U16 {
111            Self::from_inline(0)
112        } else {
113            Self::from_heap(ColListVec::with_capacity(cap))
114        }
115    }
116
117    /// Constructs a list from a `u64` bitset
118    /// where the highest bit is unset.
119    ///
120    /// Panics in debug mode if the highest bit is set.
121    fn from_inline(list: u64) -> Self {
122        debug_assert_eq!(list & (1 << Self::FIRST_HEAP_COL), 0);
123        // (1) Move the whole inline bitset by one bit to the left.
124        // Mark the now-zero lowest bit so we know the list is inline.
125        let inline = ColListInline(list << 1 | 1);
126        // SAFETY: Lowest bit is set, so this will be interpreted as inline and not a pointer.
127        let ret = Self { inline };
128        debug_assert!(ret.is_inline());
129        ret
130    }
131
132    /// Constructs a list in heap form from `vec`.
133    fn from_heap(vec: ColListVec) -> Self {
134        let heap = ManuallyDrop::new(vec);
135        Self { heap }
136    }
137
138    /// Returns `head` if that is the only element.
139    pub fn as_singleton(&self) -> Option<ColId> {
140        let mut iter = self.iter();
141        match (iter.next(), iter.next()) {
142            (h @ Some(_), None) => h,
143            _ => None,
144        }
145    }
146
147    /// Returns the head of the list, if any.
148    pub fn head(&self) -> Option<ColId> {
149        self.iter().next()
150    }
151
152    /// Returns the last of the list, if any.
153    pub fn last(&self) -> Option<ColId> {
154        match self.as_inline() {
155            Ok(inline) => inline.last(),
156            Err(heap) => heap.last().copied(),
157        }
158    }
159
160    /// Returns whether `needle` is contained in the list.
161    ///
162    /// This an be faster than using `list.iter().any(|c| c == needle)`.
163    pub fn contains(&self, needle: ColId) -> bool {
164        match self.as_inline() {
165            Ok(inline) => inline.contains(needle),
166            Err(heap) => heap.contains(&needle),
167        }
168    }
169
170    /// Returns an iterator over all the columns in this list.
171    pub fn iter(&self) -> impl '_ + Clone + Iterator<Item = ColId> {
172        match self.as_inline() {
173            Ok(inline) => Either::Left(inline.iter()),
174            Err(heap) => Either::Right(heap.iter().copied()),
175        }
176    }
177
178    /// Convert to a `Box<[u16]>`.
179    pub fn to_u16_vec(&self) -> alloc::boxed::Box<[u16]> {
180        self.iter().map(u16::from).collect()
181    }
182
183    /// Returns the length of the list.
184    pub fn len(&self) -> u16 {
185        match self.as_inline() {
186            Ok(inline) => inline.len(),
187            Err(heap) => heap.len(),
188        }
189    }
190
191    /// Returns whether the list is empty.
192    pub fn is_empty(&self) -> bool {
193        self.len() == 0
194    }
195
196    /// Push `col` onto the list.
197    ///
198    /// If `col >= 63` or `col <= last_col`, the list will become heap allocated if not already.
199    pub fn push(&mut self, col: ColId) {
200        self.push_inner(col, self.last().is_none_or(|l| l < col));
201    }
202
203    /// Sort and deduplicate the list.
204    /// If the list is already sorted and deduplicated, does nothing.
205    /// This will typically result in an inline list unless there are large `ColId`s in play.
206    fn sort_dedup(&mut self) {
207        if let Err(heap) = self.as_inline_mut() {
208            heap.sort();
209
210            // Don't reallocate if the list is already sorted and deduplicated.
211            let is_deduped = is_sorted_and_deduped(heap);
212            let wants_inline = heap.last().unwrap_or(&ColId(0)).0 < Self::FIRST_HEAP_COL_U16;
213            if !is_deduped || wants_inline {
214                *self = Self::from_iter(heap.iter().copied().dedup());
215            }
216        }
217    }
218
219    /// Push `col` onto the list.
220    ///
221    /// If `col >= 63` or `!preserves_set_order`,
222    /// the list will become heap allocated if not already.
223    #[inline]
224    fn push_inner(&mut self, col: ColId, preserves_set_order: bool) {
225        let val = u16::from(col) as u64;
226        match (val < Self::FIRST_HEAP_COL && preserves_set_order, self.as_inline_mut()) {
227            (true, Ok(inline)) => inline.0 |= 1 << (val + 1),
228            // Converts the list to its non-inline heap form.
229            // This is unlikely to happen.
230            (false, Ok(inline)) => *self = Self::from_heap(inline.heapify_and_push(col)),
231            (_, Err(heap)) => heap.push(col),
232        }
233    }
234
235    /// The first `ColId` that would make the list heap allocated.
236    const FIRST_HEAP_COL: u64 = size_of::<u64>() as u64 * 8 - 1;
237
238    /// The first `ColId` that would make the list heap allocated.
239    const FIRST_HEAP_COL_U16: u16 = Self::FIRST_HEAP_COL as u16;
240
241    /// Returns the list either as inline or heap based.
242    #[inline]
243    fn as_inline(&self) -> Result<&ColListInline, &ManuallyDrop<ColListVec>> {
244        if self.is_inline() {
245            // SAFETY: confirmed that it is inline so this field is active.
246            Ok(unsafe { &self.inline })
247        } else {
248            // SAFETY: confirmed it's not, so `heap` is active instead.
249            Err(unsafe { &self.heap })
250        }
251    }
252
253    /// Returns the list either as inline or heap based.
254    #[inline]
255    fn as_inline_mut(&mut self) -> Result<&mut ColListInline, &mut ManuallyDrop<ColListVec>> {
256        if self.is_inline() {
257            // SAFETY: confirmed that it is inline so this field is active.
258            Ok(unsafe { &mut self.inline })
259        } else {
260            // SAFETY: confirmed it's not, so `heap` is active instead.
261            Err(unsafe { &mut self.heap })
262        }
263    }
264
265    #[inline]
266    fn is_inline(&self) -> bool {
267        // Check whether the lowest bit has been marked.
268        // This bit is unused by the heap case as the pointer must be aligned for `u16`.
269        // That is, we know that if the `self.heap` variant is active,
270        // then `self.heap.addr() % align_of::<u16> == 0`.
271        // So if `self.check % align_of::<u16> == 1`, as checked below,
272        // we now it's the inline case and not the heap case.
273
274        // SAFETY: Even when `inline`, and on a < 64-bit target,
275        // we can treat the union as a `usize` to check the lowest bit.
276        let addr = unsafe { self.check };
277        addr & 1 != 0
278    }
279}
280
281#[cfg(feature = "memory-usage")]
282impl spacetimedb_memory_usage::MemoryUsage for ColList {
283    fn heap_usage(&self) -> usize {
284        match self.as_inline() {
285            Ok(_) => 0,
286            Err(heap) => heap.capacity() as usize,
287        }
288    }
289}
290
291impl Drop for ColList {
292    fn drop(&mut self) {
293        if let Err(heap) = self.as_inline_mut() {
294            // SAFETY: Only called once, so we will not have use-after-free or double-free.
295            unsafe { ManuallyDrop::drop(heap) };
296        }
297    }
298}
299
300impl Clone for ColList {
301    fn clone(&self) -> Self {
302        match self.as_inline() {
303            Ok(inline) => Self { inline: *inline },
304            Err(heap) => Self { heap: heap.clone() },
305        }
306    }
307}
308
309impl Eq for ColList {}
310impl PartialEq for ColList {
311    fn eq(&self, other: &Self) -> bool {
312        match (self.as_inline(), other.as_inline()) {
313            (Ok(lhs), Ok(rhs)) => lhs == rhs,
314            (Err(lhs), Err(rhs)) => ***lhs == ***rhs,
315            _ => false,
316        }
317    }
318}
319
320impl Ord for ColList {
321    fn cmp(&self, other: &Self) -> Ordering {
322        self.iter().cmp(other.iter())
323    }
324}
325impl PartialOrd for ColList {
326    fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
327        Some(self.cmp(other))
328    }
329}
330
331impl Hash for ColList {
332    fn hash<H: Hasher>(&self, state: &mut H) {
333        match self.as_inline() {
334            Ok(inline) => inline.0.hash(state),
335            Err(heap) => heap.hash(state),
336        }
337    }
338}
339
340impl fmt::Debug for ColList {
341    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
342        f.debug_list().entries(self.iter()).finish()
343    }
344}
345
346impl From<ColSet> for ColList {
347    fn from(value: ColSet) -> Self {
348        value.0
349    }
350}
351
352/// A borrowed list of columns or a single one.
353pub enum ColOrCols<'a> {
354    /// A single column.
355    Col(ColId),
356    /// A list of columns.
357    ColList(&'a ColList),
358}
359
360impl ColOrCols<'_> {
361    /// Returns `Some(col)` iff `self` is singleton.
362    pub fn as_singleton(&self) -> Option<ColId> {
363        match self {
364            Self::Col(col) => Some(*col),
365            Self::ColList(cols) => cols.as_singleton(),
366        }
367    }
368
369    /// Returns an iterator over all the columns in this list.
370    pub fn iter(&self) -> impl '_ + Iterator<Item = ColId> {
371        match self {
372            Self::Col(col) => Either::Left(iter::once(*col)),
373            Self::ColList(cols) => Either::Right(cols.iter()),
374        }
375    }
376
377    /// Returns the length of this list.
378    pub fn len(&self) -> u16 {
379        match self {
380            Self::Col(_) => 1,
381            Self::ColList(cols) => cols.len(),
382        }
383    }
384
385    /// Returns whether the list is empty.
386    pub fn is_empty(&self) -> bool {
387        self.len() == 0
388    }
389
390    /// Converts to a [`ColList`].
391    pub fn to_owned(self) -> ColList {
392        match self {
393            Self::Col(col) => [col].into(),
394            Self::ColList(list) => list.clone(),
395        }
396    }
397}
398
399impl PartialEq<ColList> for ColOrCols<'_> {
400    fn eq(&self, other: &ColList) -> bool {
401        self.iter().eq(other.iter())
402    }
403}
404impl PartialEq for ColOrCols<'_> {
405    fn eq(&self, other: &Self) -> bool {
406        self.iter().eq(other.iter())
407    }
408}
409
410impl Eq for ColOrCols<'_> {}
411impl Ord for ColOrCols<'_> {
412    fn cmp(&self, other: &Self) -> Ordering {
413        self.iter().cmp(other.iter())
414    }
415}
416impl PartialOrd for ColOrCols<'_> {
417    fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
418        Some(self.cmp(other))
419    }
420}
421
422/// A compressed set of columns. Like a `ColList`, but guaranteed to be sorted and to contain no duplicate entries.
423/// Dereferences to a `ColList` for convenience.
424#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
425pub struct ColSet(ColList);
426
427impl ColSet {
428    /// Check if a `ColSet` contains a given column.
429    pub fn contains(&self, needle: ColId) -> bool {
430        match self.as_inline() {
431            Ok(inline) => inline.contains(needle),
432            // We can use binary search because the vector is guaranteed to be sorted.
433            Err(heap) => heap.binary_search(&needle).is_ok(),
434        }
435    }
436
437    // Don't implement `insert` because repeated insertions will be O(n^2) if we want to keep the set sorted on the heap.
438    // Use iterator methods to create a new `ColSet` instead.
439}
440
441impl<C: Into<ColId>> FromIterator<C> for ColSet {
442    fn from_iter<T: IntoIterator<Item = C>>(iter: T) -> Self {
443        // TODO: implement a fast path here that avoids allocation, by lying about
444        // `preserves_set_order` to `push_inner`.
445        Self::from(iter.into_iter().collect::<ColList>())
446    }
447}
448
449impl From<ColList> for ColSet {
450    fn from(mut list: ColList) -> Self {
451        list.sort_dedup();
452        Self(list)
453    }
454}
455
456impl From<&ColList> for ColSet {
457    fn from(value: &ColList) -> Self {
458        value.iter().collect()
459    }
460}
461
462impl From<ColOrCols<'_>> for ColSet {
463    fn from(value: ColOrCols<'_>) -> Self {
464        match value {
465            ColOrCols::Col(col) => ColSet(col.into()),
466            ColOrCols::ColList(cols) => cols.into(),
467        }
468    }
469}
470
471impl<C: Into<ColId>> From<C> for ColSet {
472    fn from(value: C) -> Self {
473        Self::from(ColList::new(value.into()))
474    }
475}
476
477impl Deref for ColSet {
478    type Target = ColList;
479
480    fn deref(&self) -> &Self::Target {
481        &self.0
482    }
483}
484
485impl fmt::Debug for ColSet {
486    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
487        f.debug_set().entries(self.iter()).finish()
488    }
489}
490
491/// The inline version of a [`ColList`].
492#[derive(Clone, Copy, PartialEq)]
493struct ColListInline(u64);
494
495impl ColListInline {
496    /// Returns whether `needle` is part of this list.
497    fn contains(&self, needle: ColId) -> bool {
498        let col = needle.0;
499        let inline = self.undo_mark();
500        col < ColList::FIRST_HEAP_COL_U16 && inline & (1u64 << col) != 0
501    }
502
503    /// Returns an iterator over the [`ColId`]s stored by this list.
504    fn iter(&self) -> impl '_ + Clone + Iterator<Item = ColId> {
505        let mut value = self.undo_mark();
506        iter::from_fn(move || {
507            if value == 0 {
508                // No set bits; quit!
509                None
510            } else {
511                // Count trailing zeros and then zero out the first set bit.
512                // For e.g., `0b11001`, this would yield `[0, 3, 4]` as expected.
513                let id = ColId(value.trailing_zeros() as u16);
514                value &= value.wrapping_sub(1);
515                Some(id)
516            }
517        })
518    }
519
520    /// Returns the last element of the list.
521    fn last(&self) -> Option<ColId> {
522        (u64::BITS - self.undo_mark().leading_zeros())
523            .checked_sub(1)
524            .map(|c| ColId(c as _))
525    }
526
527    /// Returns the length of the list.
528    fn len(&self) -> u16 {
529        self.undo_mark().count_ones() as u16
530    }
531
532    /// Undoes the shift in (1).
533    #[inline]
534    fn undo_mark(&self) -> u64 {
535        self.0 >> 1
536    }
537
538    /// Returns an equivalent list in heap form instead of inline, and adds `col` to it.
539    /// The capacity of the vec will be `2 * (self.len() + 1)`
540    fn heapify_and_push(&self, col: ColId) -> ColListVec {
541        let mut vec = ColListVec::with_capacity(2 * (self.len() + 1));
542        for col in self.iter() {
543            vec.push(col)
544        }
545        vec.push(col);
546        vec
547    }
548}
549
550/// The thin-vec heap based version of a [`ColList`].
551struct ColListVec(NonNull<u16>);
552
553impl ColListVec {
554    /// Returns an empty vector with `capacity`.
555    fn with_capacity(capacity: u16) -> Self {
556        // Allocate the vector using the global allocator.
557        let layout = Self::layout(capacity);
558        // SAFETY: the size of `[u16; 2 + capacity]` is always non-zero.
559        let ptr = unsafe { alloc(layout) }.cast::<u16>();
560        let Some(ptr_non_null) = NonNull::new(ptr) else {
561            handle_alloc_error(layout)
562        };
563
564        let mut this = Self(ptr_non_null);
565        // SAFETY: `0 <= capacity` and claiming no elements are init trivially holds.
566        unsafe {
567            this.set_len(0);
568        }
569        // SAFETY: `capacity` matches that of the allocation.
570        unsafe { this.set_capacity(capacity) };
571        this
572    }
573
574    /// Returns the length of the list.
575    fn len(&self) -> u16 {
576        let ptr = self.0.as_ptr();
577        // SAFETY: `ptr` is properly aligned for `u16` and is valid for reads.
578        unsafe { *ptr }
579    }
580
581    /// SAFETY: `new_len <= self.capacity()` and `new_len` <= number of initialized elements.
582    unsafe fn set_len(&mut self, new_len: u16) {
583        let ptr = self.0.as_ptr();
584        // SAFETY:
585        // - `ptr` is valid for writes as we have exclusive access.
586        // - It's also properly aligned for `u16`.
587        unsafe {
588            *ptr = new_len;
589        }
590    }
591
592    /// Returns the capacity of the allocation in terms of elements.
593    fn capacity(&self) -> u16 {
594        let ptr = self.0.as_ptr();
595        // SAFETY: `ptr + 1 u16` is in bounds of the allocation and it doesn't overflow isize.
596        let capacity_ptr = unsafe { ptr.add(1) };
597        // SAFETY: `capacity_ptr` is properly aligned for `u16` and is valid for reads.
598        unsafe { *capacity_ptr }
599    }
600
601    /// Sets the capacity of the allocation in terms of elements.
602    ///
603    /// SAFETY: `cap` must match the actual capacity of the allocation.
604    unsafe fn set_capacity(&mut self, cap: u16) {
605        let ptr = self.0.as_ptr();
606        // SAFETY: `ptr + 1 u16` is in bounds of the allocation and it doesn't overflow isize.
607        let cap_ptr = unsafe { ptr.add(1) };
608        // SAFETY: `cap_ptr` is valid for writes as we have ownership of the allocation.
609        // It's also properly aligned for `u16`.
610        unsafe {
611            *cap_ptr = cap;
612        }
613    }
614
615    /// Push an element to the list.
616    fn push(&mut self, val: ColId) {
617        let len = self.len();
618        let cap = self.capacity();
619
620        if len == cap {
621            // We're at capacity, reallocate using standard * 2 exponential factor.
622            let new_cap = cap.checked_mul(2).expect("capacity overflow");
623            let new_layout = Self::layout(new_cap);
624            // Reallocation will will move the data as well.
625            let old_layout = Self::layout(cap);
626            let old_ptr = self.0.as_ptr().cast();
627            // SAFETY:
628            // - `base_ptr` came from the global allocator
629            // - `old_layout` is the same layout used for the original allocation.
630            // - `new_layout.size()` is non-zero and <= `isize::MAX`.
631            let new_ptr = unsafe { realloc(old_ptr, old_layout, new_layout.size()) }.cast();
632            let Some(ptr_non_null) = NonNull::new(new_ptr) else {
633                handle_alloc_error(new_layout);
634            };
635            // Use new pointer and set capacity.
636            self.0 = ptr_non_null;
637            // SAFETY: `new_cap` matches that of the allocation.
638            unsafe { self.set_capacity(new_cap) };
639        }
640
641        // Write the element and increase the length.
642        let base_ptr = self.0.as_ptr();
643        let elem_offset = 2 + len as usize;
644        // SAFETY: Allocated for `2 + capacity` `u16`s and `len <= capacity`, so we're in bounds.
645        let elem_ptr = unsafe { base_ptr.add(elem_offset) }.cast();
646        // SAFETY: `elem_ptr` is valid for writes and is properly aligned for `ColId`.
647        unsafe {
648            *elem_ptr = val;
649        }
650        // SAFETY: the length <= the capacity and we just init the `len + 1`th element.
651        unsafe {
652            self.set_len(len + 1);
653        }
654    }
655
656    /// Computes a layout for the following struct:
657    /// ```rust,ignore
658    /// struct ColListVecData {
659    ///     len: u16,
660    ///     capacity: u16,
661    ///     data: [ColId],
662    /// }
663    /// ```
664    ///
665    /// Panics if `cap` would result in an allocation larger than `isize::MAX`.
666    fn layout(cap: u16) -> Layout {
667        Layout::array::<u16>(cap.checked_add(2).expect("capacity overflow") as usize).unwrap()
668    }
669}
670
671impl Deref for ColListVec {
672    type Target = [ColId];
673
674    fn deref(&self) -> &Self::Target {
675        let len = self.len() as usize;
676        let ptr = self.0.as_ptr();
677        // SAFETY: `ptr + 2` is always in bounds of the allocation and `ptr <= isize::MAX`.
678        let ptr = unsafe { ptr.add(2) }.cast::<ColId>();
679        // SAFETY:
680        // - `ptr` is valid for reads for `len * size_of::<ColId>` and it is properly aligned.
681        // - `len`  elements are initialized.
682        // - For the lifetime of `'0`, the memory won't be mutated.
683        // - `len * size_of::<ColId> <= isize::MAX` holds.
684        unsafe { from_raw_parts(ptr, len) }
685    }
686}
687
688impl DerefMut for ColListVec {
689    fn deref_mut(&mut self) -> &mut Self::Target {
690        let len = self.len() as usize;
691        let ptr = self.0.as_ptr();
692        // SAFETY: `ptr + 2` is always in bounds of the allocation and `ptr <= isize::MAX`.
693        let ptr = unsafe { ptr.add(2) }.cast::<ColId>();
694        // SAFETY:
695        // - `ptr` is valid for reads and writes for `len * size_of::<ColId>` and it is properly aligned.
696        // - `len`  elements are initialized.
697        // - `len * size_of::<ColId> <= isize::MAX` holds.
698        unsafe { from_raw_parts_mut(ptr, len) }
699    }
700}
701
702impl Drop for ColListVec {
703    fn drop(&mut self) {
704        let capacity = self.capacity();
705        let layout = Self::layout(capacity);
706        let ptr = self.0.as_ptr().cast();
707        // SAFETY: `ptr` was allocated by the global allocator
708        // and `layout` was the one the memory was allocated with.
709        unsafe { dealloc(ptr, layout) };
710    }
711}
712
713impl Clone for ColListVec {
714    fn clone(&self) -> Self {
715        let mut vec = ColListVec::with_capacity(self.len());
716        for col in self.iter().copied() {
717            vec.push(col);
718        }
719        vec
720    }
721}
722
723/// Check if a buffer is sorted and deduplicated.
724fn is_sorted_and_deduped(data: &[ColId]) -> bool {
725    match data {
726        [] => true,
727        [mut prev, rest @ ..] => !rest.iter().any(|elem| {
728            let bad = prev >= *elem;
729            prev = *elem;
730            bad
731        }),
732    }
733}
734
735#[cfg(test)]
736mod tests {
737    use super::*;
738    use proptest::collection::vec;
739    use proptest::prelude::*;
740
741    fn contains(list: &ColList, x: &ColId) -> bool {
742        list.iter().any(|y| y == *x)
743    }
744
745    proptest! {
746        #![proptest_config(ProptestConfig::with_cases(if cfg!(miri) { 8 } else { 2048 }))]
747
748        #[test]
749        fn test_inline(cols in vec((0..ColList::FIRST_HEAP_COL_U16).prop_map_into(), 1..100)) {
750            let [head, tail @ ..] = &*cols else { unreachable!() };
751
752            let mut list = ColList::new(*head);
753            let mut is_inline = list.is_inline();
754            prop_assert!(is_inline);
755            prop_assert!(!list.is_empty());
756            prop_assert_eq!(list.len(), 1);
757            prop_assert_eq!(list.head(), Some(*head));
758            prop_assert_eq!(list.last(), Some(*head));
759            prop_assert_eq!(list.iter().collect::<Vec<_>>(), [*head]);
760
761
762            for col in tail {
763                is_inline &= list.last().unwrap() < *col;
764                list.push(*col);
765
766                prop_assert_eq!(is_inline, list.is_inline());
767                prop_assert!(!list.is_empty());
768                prop_assert_eq!(list.head(), Some(*head));
769                prop_assert_eq!(list.last(), Some(*col));
770                prop_assert_eq!(list.last(), list.iter().last());
771                prop_assert!(contains(&list, col));
772            }
773
774            prop_assert_eq!(&list.clone(), &list);
775            prop_assert_eq!(list.iter().collect::<Vec<_>>(), cols);
776        }
777
778        #[test]
779        fn test_heap(cols in vec((ColList::FIRST_HEAP_COL_U16..).prop_map_into(), 1..100)) {
780            let contains = |list: &ColList, x| list.iter().collect::<Vec<_>>().contains(x);
781
782            let head = ColId(0);
783            let mut list = ColList::new(head);
784            prop_assert!(list.is_inline());
785            prop_assert_eq!(list.len(), 1);
786
787            for (idx, col) in cols.iter().enumerate() {
788                list.push(*col);
789                prop_assert!(!list.is_inline());
790                prop_assert!(!list.is_empty());
791                prop_assert_eq!(list.len() as usize, idx + 2);
792                prop_assert_eq!(list.head(), Some(head));
793                prop_assert_eq!(list.last(), Some(*col));
794                prop_assert!(contains(&list, col));
795            }
796
797            prop_assert_eq!(&list.clone(), &list);
798
799            let mut cols = cols;
800            cols.insert(0, head);
801            prop_assert_eq!(list.iter().collect::<Vec<_>>(), cols);
802        }
803
804        #[test]
805        fn test_collect(cols in vec((0..100).prop_map_into(), 0..100)) {
806            let list = cols.iter().copied().collect::<ColList>();
807            prop_assert!(list.iter().eq(cols));
808            prop_assert_eq!(&list, &list.iter().collect::<ColList>());
809        }
810
811        #[test]
812        fn test_as_singleton(cols in vec((0..100).prop_map_into(), 0..10)) {
813            let list = cols.iter().copied().collect::<ColList>();
814            match cols.len() {
815                1 => {
816                    prop_assert_eq!(list.as_singleton(), Some(cols[0]));
817                    prop_assert_eq!(list.as_singleton(), list.head());
818                },
819                _ => prop_assert_eq!(list.as_singleton(), None),
820            }
821        }
822
823        #[test]
824        fn test_set_inlines(mut cols in vec((0..ColList::FIRST_HEAP_COL_U16).prop_map_into(), 1..100)) {
825            prop_assume!(!is_sorted_and_deduped(&cols[..]));
826
827            let list = ColList::from_iter(cols.iter().copied());
828            prop_assert!(!list.is_inline());
829            let set = ColSet::from(list);
830            prop_assert!(set.is_inline());
831
832            for col in cols.iter() {
833                prop_assert!(set.contains(*col));
834            }
835
836            cols.sort();
837            cols.dedup();
838            prop_assert_eq!(set.iter().collect::<Vec<_>>(), cols);
839        }
840
841        #[test]
842        fn test_set_heap(mut cols in vec((ColList::FIRST_HEAP_COL_U16..).prop_map_into(), 1..100)) {
843            prop_assume!(!is_sorted_and_deduped(&cols[..]));
844
845            let list = ColList::from_iter(cols.iter().copied());
846            prop_assert!(!list.is_inline());
847            let set = ColSet::from(list);
848            prop_assert!(!set.is_inline());
849
850            for col in cols.iter() {
851                prop_assert!(set.contains(*col));
852            }
853
854            cols.sort();
855            cols.dedup();
856            prop_assert_eq!(set.iter().collect::<Vec<_>>(), cols);
857        }
858    }
859}