Skip to main content

nearest/
list.rs

1use core::{fmt, iter::FusedIterator, marker::PhantomData};
2
3use crate::{Flat, Patch, emitter::Pos};
4
5/// A linked list of `T` values stored in a [`Region`](crate::Region).
6///
7/// # Layout
8///
9/// `#[repr(C)]` — 8 bytes, `align_of::<i32>()` alignment:
10///
11/// | Offset | Field  | Type  | Description                             |
12/// |--------|--------|-------|-----------------------------------------|
13/// | 0      | `head` | `i32` | Self-relative offset to first segment   |
14/// | 4      | `len`  | `u32` | TOTAL element count across all segments |
15///
16/// When `len == 0`, `head` is `0` (no valid segment). When non-empty, `head` is
17/// a self-relative offset from the `head` field itself to the first `Segment<T>`.
18///
19/// `NearList<T>` is **not** `Copy` or `Clone` — moving it would invalidate the
20/// self-relative offset.
21///
22/// After [`trim`](crate::Region::trim), all elements live in a single contiguous
23/// segment for cache-friendly iteration.
24///
25/// # Segment chain
26///
27/// A non-empty list consists of one or more `Segment<T>` nodes linked by
28/// self-relative `next` pointers (`next == 0` terminates the chain). Each
29/// segment stores a contiguous array of `T` values immediately after its
30/// header.
31///
32/// Mutation operations ([`splice_list`], [`push_front`], [`extend_list`]) may
33/// produce multi-segment lists. [`trim`](crate::Region::trim) compacts all
34/// segments into one, restoring O(1) indexing and cache-friendly iteration.
35///
36/// # Soundness
37///
38/// **Provenance**: Segment walking uses `with_exposed_provenance` to
39/// follow self-relative `next` pointers, recovering the allocation's
40/// provenance exposed by `AlignedBuf::grow`. Same pattern as `Near<T>`.
41///
42/// **Invariant**: `len == 0` ⟺ `head == 0`. Enforced by all list-patching
43/// methods which write both fields atomically.
44///
45/// [`splice_list`]: crate::Session::splice_list
46/// [`push_front`]: crate::Session::push_front
47/// [`extend_list`]: crate::Session::extend_list
48#[repr(C)]
49pub struct NearList<T> {
50  head: i32,
51  len: u32,
52  _type: PhantomData<T>,
53}
54
55// SAFETY: NearList<T> contains only i32, u32, and PhantomData — no Drop, no heap.
56// T: Flat bound enables self-sufficient deep_copy/validate that walk segments
57// and handle elements. Coinductive trait resolution handles recursive types.
58unsafe impl<T: Flat> Flat for NearList<T> {
59  unsafe fn deep_copy(&self, p: &mut impl Patch, at: Pos) {
60    let len = self.len;
61    if len == 0 {
62      // SAFETY: Caller guarantees `at` was allocated for `NearList<T>`.
63      unsafe { p.patch_list_header::<T>(at, Pos::ZERO, 0) };
64      return;
65    }
66    let seg_pos = p.alloc_segment::<T>(len);
67    let values_off = size_of::<Segment<T>>();
68    for (i, elem) in self.iter().enumerate() {
69      // SAFETY: seg_pos was just allocated with space for `len` elements.
70      unsafe { elem.deep_copy(p, seg_pos.offset(values_off + i * size_of::<T>())) };
71    }
72    // SAFETY: Caller guarantees `at` was allocated for `NearList<T>`.
73    unsafe { p.patch_list_header::<T>(at, seg_pos, len) };
74  }
75
76  fn validate(addr: usize, buf: &[u8]) -> Result<(), crate::ValidateError> {
77    crate::validate::validate_list_impl::<T>(addr, buf)
78  }
79
80  fn validate_option(addr: usize, buf: &[u8]) -> Result<(), crate::ValidateError> {
81    crate::ValidateError::check::<Option<Self>>(addr, buf)?;
82    let disc = buf[addr];
83    match disc {
84      0 => Ok(()), // None
85      1 => {
86        let inner = addr + core::mem::offset_of!(Option<Self>, Some.0);
87        crate::validate::validate_list_impl::<T>(inner, buf)
88      }
89      _ => Err(crate::ValidateError::InvalidDiscriminant { addr, value: disc, max: 1 }),
90    }
91  }
92}
93
94/// A contiguous segment of values in a [`NearList`]. Stored in the Region buffer.
95///
96/// # Layout
97///
98/// `#[repr(C)]` — 8 bytes header + `len * size_of::<T>()` values:
99///
100/// | Offset                       | Field     | Type    | Description                          |
101/// |------------------------------|-----------|---------|--------------------------------------|
102/// | 0                            | `next`    | `i32`   | Self-relative offset to next segment |
103/// | 4                            | `len`     | `u32`   | Number of values in this segment     |
104/// | `size_of::<Segment<T>>()`    | values... | `[T]`   | Contiguous values                    |
105///
106/// `next == 0` indicates the end of the segment chain. Values are accessed by
107/// pointer arithmetic from the segment address: the first value is at
108/// `segment_addr + size_of::<Segment<T>>()`, subsequent values are contiguous
109/// at `+ i * size_of::<T>()`.
110#[repr(C)]
111pub struct Segment<T> {
112  pub(crate) next: i32,
113  pub(crate) len: u32,
114  pub(crate) _values: [T; 0],
115}
116
117impl<T: Flat> NearList<T> {
118  /// Returns the number of elements.
119  ///
120  /// # Examples
121  ///
122  /// ```
123  /// use nearest::{Flat, NearList, Region, empty, list};
124  ///
125  /// #[derive(Flat)]
126  /// struct Root { items: NearList<u32> }
127  ///
128  /// let region = Region::new(Root::make(list([1u32, 2, 3])));
129  /// assert_eq!(region.items.len(), 3);
130  ///
131  /// let empty_region = Region::new(Root::make(empty()));
132  /// assert_eq!(empty_region.items.len(), 0);
133  /// ```
134  #[must_use]
135  pub const fn len(&self) -> usize {
136    self.len as usize
137  }
138
139  /// Returns `true` if the list has no elements.
140  ///
141  /// # Examples
142  ///
143  /// ```
144  /// use nearest::{Flat, NearList, Region, empty, list};
145  ///
146  /// #[derive(Flat)]
147  /// struct Root { items: NearList<u32> }
148  ///
149  /// let region = Region::new(Root::make(empty()));
150  /// assert!(region.items.is_empty());
151  ///
152  /// let region = Region::new(Root::make(list([1u32])));
153  /// assert!(!region.items.is_empty());
154  /// ```
155  #[must_use]
156  pub const fn is_empty(&self) -> bool {
157    self.len == 0
158  }
159
160  /// Returns the raw head offset (for internal Session use).
161  pub(crate) const fn head_offset(&self) -> i32 {
162    self.head
163  }
164
165  /// Returns the number of segments in the chain.
166  ///
167  /// After [`trim`](crate::Region::trim), this is always 0 (empty) or 1.
168  ///
169  /// # Examples
170  ///
171  /// ```
172  /// use nearest::{Flat, NearList, Region, list};
173  ///
174  /// #[derive(Flat)]
175  /// struct Root { items: NearList<u32> }
176  ///
177  /// let mut region = Region::new(Root::make(list([1u32, 2, 3])));
178  /// assert_eq!(region.items.segment_count(), 1);
179  ///
180  /// // push_front adds a new segment.
181  /// region.session(|s| {
182  ///   let items = s.nav(s.root(), |r| &r.items);
183  ///   s.push_front(items, 0u32);
184  /// });
185  /// assert_eq!(region.items.segment_count(), 2);
186  ///
187  /// // trim consolidates into one segment.
188  /// region.trim();
189  /// assert_eq!(region.items.segment_count(), 1);
190  /// ```
191  #[must_use]
192  pub fn segment_count(&self) -> usize {
193    if self.len == 0 {
194      return 0;
195    }
196    let mut count = 1usize;
197    let mut seg_addr =
198      core::ptr::from_ref(&self.head).cast::<u8>().addr().wrapping_add_signed(self.head as isize);
199    loop {
200      // SAFETY: seg_addr points to a valid Segment<T> in the region buffer.
201      let seg = unsafe { &*core::ptr::with_exposed_provenance::<Segment<T>>(seg_addr) };
202      if seg.next == 0 {
203        break;
204      }
205      seg_addr =
206        core::ptr::from_ref(&seg.next).cast::<u8>().addr().wrapping_add_signed(seg.next as isize);
207      count += 1;
208    }
209    count
210  }
211
212  /// Returns a reference to the last element, or `None` if empty.
213  ///
214  /// **O(n)**: requires full traversal of the segment chain to find the
215  /// last segment, then indexes its final element.
216  ///
217  /// # Examples
218  ///
219  /// ```
220  /// use nearest::{Flat, NearList, Region, empty, list};
221  ///
222  /// #[derive(Flat)]
223  /// struct Root { items: NearList<u32> }
224  ///
225  /// let region = Region::new(Root::make(list([10u32, 20, 30])));
226  /// assert_eq!(region.items.last(), Some(&30));
227  ///
228  /// let region = Region::new(Root::make(empty()));
229  /// assert_eq!(region.items.last(), None);
230  /// ```
231  #[must_use]
232  pub fn last(&self) -> Option<&T> {
233    if self.len == 0 {
234      return None;
235    }
236    // Walk the segment chain to the last segment.
237    let mut seg_addr =
238      core::ptr::from_ref(&self.head).cast::<u8>().addr().wrapping_add_signed(self.head as isize);
239    loop {
240      // SAFETY: seg_addr points to a valid Segment<T> in the region buffer.
241      let seg = unsafe { &*core::ptr::with_exposed_provenance::<Segment<T>>(seg_addr) };
242      if seg.next == 0 {
243        // This is the last segment. Return its final element.
244        let last_idx = seg.len as usize - 1;
245        let val_addr =
246          seg_addr.wrapping_add(size_of::<Segment<T>>()).wrapping_add(last_idx * size_of::<T>());
247        // SAFETY: val_addr points to a valid T within the last segment.
248        return Some(unsafe { &*core::ptr::with_exposed_provenance::<T>(val_addr) });
249      }
250      seg_addr =
251        core::ptr::from_ref(&seg.next).cast::<u8>().addr().wrapping_add_signed(seg.next as isize);
252    }
253  }
254
255  /// Returns `true` if the list contains an element equal to the given value.
256  ///
257  /// # Examples
258  ///
259  /// ```
260  /// use nearest::{Flat, NearList, Region, empty, list};
261  ///
262  /// #[derive(Flat)]
263  /// struct Root { items: NearList<u32> }
264  ///
265  /// let region = Region::new(Root::make(list([1u32, 2, 3])));
266  /// assert!(region.items.contains(&2));
267  /// assert!(!region.items.contains(&4));
268  ///
269  /// let region = Region::new(Root::make(empty()));
270  /// assert!(!region.items.contains(&1));
271  /// ```
272  #[must_use]
273  pub fn contains(&self, item: &T) -> bool
274  where
275    T: PartialEq,
276  {
277    self.iter().any(|x| x == item)
278  }
279
280  /// Returns a reference to the first element, or `None` if empty.
281  ///
282  /// # Examples
283  ///
284  /// ```
285  /// use nearest::{Flat, NearList, Region, empty, list};
286  ///
287  /// #[derive(Flat)]
288  /// struct Root { items: NearList<u32> }
289  ///
290  /// let region = Region::new(Root::make(list([10u32, 20])));
291  /// assert_eq!(region.items.first(), Some(&10));
292  ///
293  /// let region = Region::new(Root::make(empty()));
294  /// assert_eq!(region.items.first(), None);
295  /// ```
296  #[must_use]
297  pub fn first(&self) -> Option<&T> {
298    if self.len == 0 {
299      return None;
300    }
301    // SAFETY: head offset was written by the emitter, pointing to a valid Segment<T>.
302    // The first value is at seg_addr + size_of::<Segment<T>>().
303    unsafe {
304      let addr =
305        core::ptr::from_ref(&self.head).cast::<u8>().addr().wrapping_add_signed(self.head as isize);
306      let val_ptr =
307        core::ptr::with_exposed_provenance::<T>(addr.wrapping_add(size_of::<Segment<T>>()));
308      Some(&*val_ptr)
309    }
310  }
311
312  /// Returns an iterator over the elements.
313  ///
314  /// Construction is O(1). Within a segment, iteration is contiguous (`ptr.add(1)`).
315  /// At segment boundaries, follows `next` pointers to the next segment.
316  /// After [`trim`](crate::Region::trim), iteration is pure array traversal.
317  ///
318  /// # Examples
319  ///
320  /// ```
321  /// use nearest::{Flat, NearList, Region, list};
322  ///
323  /// #[derive(Flat)]
324  /// struct Root { items: NearList<u32> }
325  ///
326  /// let region = Region::new(Root::make(list([10u32, 20, 30])));
327  /// let sum: u32 = region.items.iter().sum();
328  /// assert_eq!(sum, 60);
329  ///
330  /// // Also works with for-in (via IntoIterator for &NearList<T>).
331  /// let mut count = 0;
332  /// for item in &region.items {
333  ///   count += 1;
334  ///   assert!(*item >= 10);
335  /// }
336  /// assert_eq!(count, 3);
337  /// ```
338  #[must_use]
339  pub fn iter(&self) -> NearListIter<'_, T> {
340    if self.len == 0 {
341      return NearListIter {
342        current_value: core::ptr::null(),
343        remaining_in_seg: 0,
344        current_seg: core::ptr::null(),
345        remaining_total: 0,
346        _type: PhantomData,
347      };
348    }
349    // SAFETY: head offset was written by the emitter, pointing to a valid Segment<T>.
350    let seg_addr =
351      core::ptr::from_ref(&self.head).cast::<u8>().addr().wrapping_add_signed(self.head as isize);
352    let seg_ptr = core::ptr::with_exposed_provenance::<Segment<T>>(seg_addr);
353    // SAFETY: `seg_ptr` points to a valid `Segment<T>` in the region buffer,
354    // reached via the head offset written by the emitter.
355    let seg = unsafe { &*seg_ptr };
356    let val_ptr =
357      core::ptr::with_exposed_provenance::<T>(seg_addr.wrapping_add(size_of::<Segment<T>>()));
358    NearListIter {
359      current_value: val_ptr,
360      remaining_in_seg: seg.len,
361      current_seg: seg_ptr,
362      remaining_total: self.len,
363      _type: PhantomData,
364    }
365  }
366}
367
368/// Iterator over the elements of a [`NearList`].
369///
370/// Within a segment, values are contiguous (pointer increment). At segment
371/// boundaries, follows the `next` pointer to the next segment.
372///
373/// Implements [`ExactSizeIterator`] and [`FusedIterator`]. Does **not**
374/// implement [`DoubleEndedIterator`] because `NearList` uses a singly-linked
375/// segment chain — reverse traversal would require hidden allocation or
376/// O(n) per `next_back()` call, which conflicts with the library's
377/// predictable-performance, no-hidden-allocation philosophy. Use
378/// [`NearList::last`] for O(segments) access to the final element.
379pub struct NearListIter<'a, T: Flat> {
380  current_value: *const T,
381  remaining_in_seg: u32,
382  current_seg: *const Segment<T>,
383  remaining_total: u32,
384  _type: PhantomData<&'a T>,
385}
386
387impl<'a, T: Flat> Iterator for NearListIter<'a, T> {
388  type Item = &'a T;
389
390  fn next(&mut self) -> Option<&'a T> {
391    if self.remaining_total == 0 {
392      return None;
393    }
394
395    // If current segment is exhausted, advance to the next segment.
396    if self.remaining_in_seg == 0 {
397      // SAFETY: remaining_total > 0 guarantees a next segment exists.
398      unsafe {
399        let seg = &*self.current_seg;
400        let next_addr =
401          core::ptr::from_ref(&seg.next).cast::<u8>().addr().wrapping_add_signed(seg.next as isize);
402        self.current_seg = core::ptr::with_exposed_provenance::<Segment<T>>(next_addr);
403        let new_seg = &*self.current_seg;
404        self.remaining_in_seg = new_seg.len;
405        self.current_value =
406          core::ptr::with_exposed_provenance::<T>(next_addr.wrapping_add(size_of::<Segment<T>>()));
407      }
408    }
409
410    // SAFETY: current_value points to a valid T within the current segment.
411    unsafe {
412      let val = &*self.current_value;
413      self.remaining_total -= 1;
414      self.remaining_in_seg -= 1;
415      // Advance to the next value in the segment (contiguous).
416      // For ZSTs, add(1) is a no-op on the address, which is correct.
417      self.current_value = self.current_value.add(1);
418      Some(val)
419    }
420  }
421
422  fn size_hint(&self) -> (usize, Option<usize>) {
423    let r = self.remaining_total as usize;
424    (r, Some(r))
425  }
426}
427
428impl<T: Flat> ExactSizeIterator for NearListIter<'_, T> {}
429
430/// `NearListIter` always returns `None` once exhausted — it never resumes
431/// yielding elements after `remaining_total` reaches zero.
432impl<T: Flat> FusedIterator for NearListIter<'_, T> {}
433
434impl<'a, T: Flat> IntoIterator for &'a NearList<T> {
435  type Item = &'a T;
436  type IntoIter = NearListIter<'a, T>;
437
438  fn into_iter(self) -> Self::IntoIter {
439    self.iter()
440  }
441}
442
443impl<T: Flat + PartialEq> PartialEq for NearList<T> {
444  fn eq(&self, other: &Self) -> bool {
445    self.len() == other.len() && self.iter().zip(other.iter()).all(|(a, b)| a == b)
446  }
447}
448
449impl<T: Flat + Eq> Eq for NearList<T> {}
450
451impl<T: Flat + fmt::Debug> fmt::Debug for NearList<T> {
452  fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
453    f.debug_list().entries(self.iter()).finish()
454  }
455}
456
457impl<T: Flat + fmt::Display> fmt::Display for NearList<T> {
458  fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
459    f.write_str("[")?;
460    for (i, item) in self.iter().enumerate() {
461      if i > 0 {
462        f.write_str(", ")?;
463      }
464      fmt::Display::fmt(item, f)?;
465    }
466    f.write_str("]")
467  }
468}
469
470impl<T: Flat> NearList<T> {
471  /// Returns a reference to the element at `index`, or `None` if out of bounds.
472  ///
473  /// **O(1)** when all elements are in a single segment (always true after
474  /// [`trim`](crate::Region::trim)). Falls back to O(n) segment walk otherwise.
475  ///
476  /// # Examples
477  ///
478  /// ```
479  /// use nearest::{Flat, NearList, Region, list};
480  ///
481  /// #[derive(Flat)]
482  /// struct Root { items: NearList<u32> }
483  ///
484  /// let region = Region::new(Root::make(list([10u32, 20, 30])));
485  /// assert_eq!(region.items.get(0), Some(&10));
486  /// assert_eq!(region.items.get(2), Some(&30));
487  /// assert_eq!(region.items.get(3), None);
488  /// ```
489  #[must_use]
490  pub fn get(&self, index: usize) -> Option<&T> {
491    if index >= self.len as usize {
492      return None;
493    }
494    // SAFETY: head offset was written by the emitter, pointing to a valid Segment<T>.
495    unsafe {
496      let seg_addr =
497        core::ptr::from_ref(&self.head).cast::<u8>().addr().wrapping_add_signed(self.head as isize);
498      let seg = &*core::ptr::with_exposed_provenance::<Segment<T>>(seg_addr);
499      // Fast path: all elements in first segment (always true after trim).
500      if (seg.len as usize) > index {
501        let val_addr =
502          seg_addr.wrapping_add(size_of::<Segment<T>>()).wrapping_add(index * size_of::<T>());
503        return Some(&*core::ptr::with_exposed_provenance::<T>(val_addr));
504      }
505    }
506    // Slow path: multi-segment walk.
507    self.iter().nth(index)
508  }
509}
510
511impl<T: Flat> core::ops::Index<usize> for NearList<T> {
512  type Output = T;
513
514  /// Access the element at `index`.
515  ///
516  /// **O(1)** when all elements are in a single segment (always true after
517  /// [`trim`](crate::Region::trim)). Falls back to O(n) segment walk otherwise.
518  ///
519  /// # Panics
520  ///
521  /// Panics if `index >= self.len()`.
522  fn index(&self, index: usize) -> &T {
523    self.get(index).expect("NearList index out of bounds")
524  }
525}