sliding_ring/
lib.rs

1//! Fixed-size sliding ring buffer with anchor-preserving index math.
2//!
3//! `SlidingRing` keeps a circular array of 256 slots whose live window begins at an
4//! anchor index. Moves are expressed as signed deltas; the ring rewires indices in
5//! constant time and zeroes only the slots that newly enter the window (or the entire
6//! ring when the delta exceeds the configured depth). Any domain-specific “anchor
7//! coordinate” should be stored outside the ring and combined with offsets as needed.
8//!
9//! ## Model overview
10//! - The ring tracks an anchor **index** (0-255). Store your absolute coordinate outside the ring.
11//! - Moving by `k` adjusts the anchor index by `k mod 256` and clears exactly `|k|`
12//!   freshly entered offsets (full reset when `|k| >= DEPTH`).
13//! - [`SlidingRing::slot`] / [`SlidingRing::slot_mut`] accept offsets relative to the anchor, keeping lookups branch-free.
14//! - [`SlidingRing::shift_by_keep_anchor`] preserves the newly entered anchor slot on backward moves
15//!   so consumers can promote “next best” levels without losing their quantity.
16//!
17//! ## When to use it
18//! Use this crate any time you need a cache-friendly sliding window tied to a
19//! monotonically moving reference point and you can tolerate a fixed resolution
20//! of 256 discrete offsets. Typical use-cases include:
21//! - Aggregated orderbooks keyed by price ticks.
22//! - Sliding time buckets for telemetry or rate-limiters.
23//! - Game state timelines (e.g., ringed entity positions) where only a bounded
24//!   horizon surrounding the current tick matters.
25//!
26//! ## When *not* to use it
27//! - You need more than 256 distinct offsets at a time.
28//! - The anchor jumps arbitrarily with sparse occupancy (use a hashmap instead).
29//! - Slots require heap allocations on clear (prefer a structure that owns the
30//!   heap nodes elsewhere and references them by ID).
31//!
32//! ## Slot requirements
33//! The buffer stores `[T; 256]` where `T: Copy`. Pass the value that represents
34//! your cleared state (often zero) to [`SlidingRing::new`]; the ring seeds every
35//! slot with that value and reuses it whenever newly entered slots need to be
36//! cleared. This keeps the API simple and removes the need for a custom trait.
37//! For ultra-low latency users, the main primitive is
38//! [`SlidingRing::slices_from_anchor`], which returns the two contiguous spans
39//! that cover the ring starting at the anchor. Operate on those slices directly
40//! with SIMD or unrolled loops when you need raw throughput. The iterator
41//! variants (`iter*`) simply layer offset reporting on top of the
42//! same index arithmetic and remain zero-cost.
43//! Pair those slices with a stable SIMD helper (e.g., the [`wide`](https://crates.io/crates/wide)
44//! crate demonstrated in `examples/simd_scan.rs`) to keep portable performance on stable Rust.
45//!
46//! ## Example
47//!
48//! ```rust
49//! use sliding_ring::SlidingRing;
50//!
51//! #[derive(Debug, Copy, Clone)]
52//! struct Level { qty: u64 }
53//!
54//! const DEPTH: u8 = 64;
55//! let mut ring = SlidingRing::<Level, DEPTH>::new(Level { qty: 0 });
56//! let mut best = 1_000i128;
57//! let offset = 5u8;
58//! ring.slot_mut(offset).qty = 10;
59//! ring.shift_by(offset as i128);
60//! best += offset as i128;
61//! assert_eq!(ring.borrow_anchor().qty, 10);
62//! ```
63//!
64//! ### Telemetry counter example
65//!
66//! ```rust
67//! # use sliding_ring::{SlidingRing, Direction};
68//! #[derive(Clone, Copy)]
69//! struct Bucket { hits: u64 }
70//! const DEPTH: u8 = 32;
71//! let mut ring = SlidingRing::<Bucket, DEPTH>::new(Bucket { hits: 0 });
72//! ring.slot_mut(5).hits += 1;
73//! ring.slot_mut(6).hits += 3;
74//! let sum: u64 = ring
75//!     .iter_from_anchor(Direction::Forward)
76//!     .map(|(_, bucket)| bucket.hits)
77//!     .sum();
78//! assert_eq!(sum, 4);
79//! ```
80
81use core::{fmt, marker::PhantomData};
82
83/// Number of slots in the ring.
84pub const RING_SIZE: usize = 256;
85pub type RingIndex = u8;
86
87/// Ring that slides over signed coordinates while keeping slot access O(1).
88pub struct SlidingRing<T: Copy, const DEPTH: u8> {
89    slots: [T; RING_SIZE],
90    zero: T,
91    anchor: RingIndex,
92}
93
94impl<T: Copy, const DEPTH: u8> SlidingRing<T, DEPTH> {
95    /// Create a new ring seeded with `zero`. The anchor index starts at 0.
96    pub fn new(zero: T) -> Self {
97        assert!(DEPTH > 0, "SlidingRing DEPTH must be at least 1");
98        assert!(
99            DEPTH as usize <= RING_SIZE,
100            "SlidingRing DEPTH cannot exceed {}",
101            RING_SIZE
102        );
103        Self {
104            slots: [zero; RING_SIZE],
105            zero,
106            anchor: 0,
107        }
108    }
109
110    /// Compile-time depth parameter exposed for downstream consumers.
111    pub const DEPTH: u8 = DEPTH;
112
113    #[inline(always)]
114    const fn depth_usize() -> usize {
115        DEPTH as usize
116    }
117
118    #[inline(always)]
119    fn shift_internal(&mut self, delta: i128, keep_new_anchor: bool) {
120        if delta == 0 {
121            return;
122        }
123        let step = diff_mod_256(delta);
124        self.anchor = self.anchor.wrapping_add(step);
125        let depth = Self::depth_usize();
126        let depth_limit = DEPTH as i128;
127        if delta >= depth_limit || delta <= -depth_limit {
128            self.slots.fill(self.zero);
129            return;
130        }
131        let count = if delta > 0 {
132            delta as usize
133        } else {
134            (-delta) as usize
135        };
136        debug_assert!(count < depth, "delta {} exceeds window {}", delta, DEPTH);
137        if delta > 0 {
138            let keep = depth - count;
139            let mut idx = self.anchor.wrapping_add(keep as u8);
140            for _ in 0..count {
141                self.slots[idx as usize] = self.zero;
142                idx = idx.wrapping_add(1);
143            }
144        } else {
145            let mut idx = self.anchor;
146            for i in 0..count {
147                if keep_new_anchor && i == 0 {
148                    idx = idx.wrapping_add(1);
149                    continue;
150                }
151                self.slots[idx as usize] = self.zero;
152                idx = idx.wrapping_add(1);
153            }
154        }
155    }
156
157    /// Borrow the slot at the anchor.
158    #[inline(always)]
159    pub fn borrow_anchor(&self) -> &T {
160        &self.slots[self.anchor as usize]
161    }
162
163    /// Borrow the slot at the anchor mutably.
164    #[inline(always)]
165    pub fn borrow_anchor_mut(&mut self) -> &mut T {
166        &mut self.slots[self.anchor as usize]
167    }
168
169    /// Immutable access to the slot for `offset` (measured from the anchor).
170    #[inline(always)]
171    pub fn slot(&self, offset: RingIndex) -> &T {
172        debug_assert!(
173            (offset as usize) < Self::depth_usize(),
174            "offset {} is outside the configured depth ({})",
175            offset,
176            DEPTH
177        );
178        let idx = self.anchor.wrapping_add(offset);
179        &self.slots[idx as usize]
180    }
181
182    /// Mutable access to the slot for `offset` (measured from the anchor).
183    #[inline(always)]
184    pub fn slot_mut(&mut self, offset: RingIndex) -> &mut T {
185        debug_assert!(
186            (offset as usize) < Self::depth_usize(),
187            "offset {} is outside the configured depth ({})",
188            offset,
189            DEPTH
190        );
191        let idx = self.anchor.wrapping_add(offset);
192        &mut self.slots[idx as usize]
193    }
194
195    /// Shift the anchor by `delta`, clearing only the slots that newly enter the window.
196    #[inline(always)]
197    pub fn shift_by(&mut self, delta: i128) {
198        self.shift_internal(delta, false);
199    }
200
201    /// Shift by `delta` while preserving the new anchor slot on backward moves.
202    #[inline(always)]
203    pub fn shift_by_keep_anchor(&mut self, delta: i128) {
204        self.shift_internal(delta, true);
205    }
206
207    /// Immutable access by raw ring index (0-255).
208    #[inline(always)]
209    pub fn slot_by_index(&self, index: RingIndex) -> &T {
210        &self.slots[index as usize]
211    }
212
213    /// Mutable access by raw ring index (0-255).
214    #[inline(always)]
215    pub fn slot_by_index_mut(&mut self, index: RingIndex) -> &mut T {
216        &mut self.slots[index as usize]
217    }
218
219    /// Clear every slot and reset the anchor index to zero.
220    #[inline(always)]
221    pub fn clear_all(&mut self) {
222        self.slots.fill(self.zero);
223        self.anchor = 0;
224    }
225
226    /// Expose immutable slice reference for advanced use-cases.
227    #[inline(always)]
228    pub fn as_slice(&self) -> &[T; RING_SIZE] {
229        &self.slots
230    }
231
232    /// Expose mutable slice reference for advanced use-cases.
233    #[inline(always)]
234    pub fn as_mut_slice(&mut self) -> &mut [T; RING_SIZE] {
235        &mut self.slots
236    }
237
238    /// Returns the contiguous slices that cover the ring starting at the anchor.
239    ///
240    /// The first slice begins at the anchor slot; the second slice wraps around
241    /// to index 0 (and may be empty). This is ideal for SIMD scans.
242    ///
243    /// ```
244    /// # use sliding_ring::SlidingRing;
245    /// const DEPTH: u8 = 16;
246    /// let mut ring = SlidingRing::<u64, DEPTH>::new(0);
247    /// *ring.slot_mut(0) = 42;
248    /// let (tail, head) = ring.slices_from_anchor();
249    /// assert_eq!(tail.first(), Some(&42));
250    /// assert!(head.is_empty());
251    /// ```
252    #[inline(always)]
253    pub fn slices_from_anchor(&self) -> (&[T], &[T]) {
254        let start = self.anchor as usize;
255        let depth = Self::depth_usize();
256        if depth >= RING_SIZE {
257            let (head, tail) = self.slots.split_at(start);
258            return (tail, head);
259        }
260        let tail_len = depth.min(RING_SIZE - start);
261        let head_len = depth - tail_len;
262        let tail = &self.slots[start..start + tail_len];
263        let head = &self.slots[..head_len];
264        (tail, head)
265    }
266
267    /// Mutable variant of [`SlidingRing::slices_from_anchor`].
268    ///
269    /// ```
270    /// # use sliding_ring::SlidingRing;
271    /// const DEPTH: u8 = 16;
272    /// let mut ring = SlidingRing::<u64, DEPTH>::new(0);
273    /// {
274    ///     let (tail, head) = ring.slices_from_anchor_mut();
275    ///     tail[0] = 7;
276    ///     assert!(head.is_empty());
277    /// }
278    /// assert_eq!(*ring.slot(0), 7);
279    /// ```
280    #[inline(always)]
281    pub fn slices_from_anchor_mut(&mut self) -> (&mut [T], &mut [T]) {
282        let start = self.anchor as usize;
283        let depth = Self::depth_usize();
284        if depth >= RING_SIZE {
285            let (head, tail) = self.slots.split_at_mut(start);
286            return (tail, head);
287        }
288        let tail_len = depth.min(RING_SIZE - start);
289        let head_len = depth - tail_len;
290        let (head, tail) = self.slots.split_at_mut(start);
291        let (tail_window, _) = tail.split_at_mut(tail_len);
292        let head_window = &mut head[..head_len];
293        (tail_window, head_window)
294    }
295
296    /// Iterate over slots in storage order using raw pointers.
297    #[inline(always)]
298    pub fn iter(&self) -> Slots<'_, T> {
299        Slots::new(&self.slots)
300    }
301
302    /// Mutable iterator over slots in storage order using raw pointers.
303    #[inline(always)]
304    pub fn iter_mut(&mut self) -> SlotsMut<'_, T> {
305        SlotsMut::new(&mut self.slots)
306    }
307
308    /// Iterate starting at the anchor in the desired direction (wrapping once).
309    ///
310    /// ```
311    /// # use sliding_ring::{Direction, SlidingRing};
312    /// const DEPTH: u8 = 16;
313    /// let mut ring = SlidingRing::<u64, DEPTH>::new(0);
314    /// *ring.slot_mut(0) = 1;
315    /// *ring.slot_mut(1) = 2;
316    /// let mut iter = ring.iter_from_anchor(Direction::Forward);
317    /// assert_eq!(iter.next().unwrap().1, &1);
318    /// assert_eq!(iter.next().unwrap().0, 1);
319    /// ```
320    #[inline(always)]
321    pub fn iter_from_anchor(&self, direction: Direction) -> AnchorIter<'_, T, DEPTH> {
322        AnchorIter::new(self, direction)
323    }
324
325    /// Mutable iteration starting at the anchor in the desired direction.
326    ///
327    /// ```
328    /// # use sliding_ring::{Direction, SlidingRing};
329    /// const DEPTH: u8 = 16;
330    /// let mut ring = SlidingRing::<u64, DEPTH>::new(0);
331    /// for (_, slot) in ring.iter_from_anchor_mut(Direction::Forward).take(2) {
332    ///     *slot = 5;
333    /// }
334    /// assert_eq!(*ring.slot(0), 5);
335    /// assert_eq!(*ring.slot(1), 5);
336    /// ```
337    #[inline(always)]
338    pub fn iter_from_anchor_mut(&mut self, direction: Direction) -> AnchorIterMut<'_, T, DEPTH> {
339        AnchorIterMut::new(self, direction)
340    }
341}
342
343impl<T: Copy, const DEPTH: u8> fmt::Debug for SlidingRing<T, DEPTH> {
344    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
345        f.debug_struct("SlidingRing")
346            .field("anchor_index", &self.anchor)
347            .finish()
348    }
349}
350
351/// Iteration order relative to the anchor.
352#[derive(Copy, Clone, Debug)]
353pub enum Direction {
354    /// Increasing offsets from the anchor (useful for asks / forward scans).
355    Forward,
356    /// Decreasing offsets from the anchor (useful for bids / backward scans).
357    Backward,
358}
359
360/// Raw-pointer iterator over an immutable ring slice.
361pub struct Slots<'a, T> {
362    ptr: *const T,
363    end: *const T,
364    _marker: PhantomData<&'a T>,
365}
366
367impl<'a, T> Slots<'a, T> {
368    #[inline(always)]
369    fn new(slice: &'a [T; RING_SIZE]) -> Self {
370        let ptr = slice.as_ptr();
371        unsafe {
372            Self {
373                ptr,
374                end: ptr.add(RING_SIZE),
375                _marker: PhantomData,
376            }
377        }
378    }
379}
380
381impl<'a, T> Iterator for Slots<'a, T> {
382    type Item = &'a T;
383
384    #[inline(always)]
385    fn next(&mut self) -> Option<Self::Item> {
386        if self.ptr == self.end {
387            return None;
388        }
389        unsafe {
390            let item = &*self.ptr;
391            self.ptr = self.ptr.add(1);
392            Some(item)
393        }
394    }
395
396    #[inline(always)]
397    fn size_hint(&self) -> (usize, Option<usize>) {
398        let len = unsafe { self.end.offset_from(self.ptr) as usize };
399        (len, Some(len))
400    }
401}
402
403impl<'a, T> ExactSizeIterator for Slots<'a, T> {
404    #[inline(always)]
405    fn len(&self) -> usize {
406        unsafe { self.end.offset_from(self.ptr) as usize }
407    }
408}
409
410impl<'a, T> core::iter::FusedIterator for Slots<'a, T> {}
411
412/// Raw-pointer iterator over a mutable ring slice.
413pub struct SlotsMut<'a, T> {
414    ptr: *mut T,
415    end: *mut T,
416    _marker: PhantomData<&'a mut T>,
417}
418
419impl<'a, T> SlotsMut<'a, T> {
420    #[inline(always)]
421    fn new(slice: &'a mut [T; RING_SIZE]) -> Self {
422        let ptr = slice.as_mut_ptr();
423        unsafe {
424            Self {
425                ptr,
426                end: ptr.add(RING_SIZE),
427                _marker: PhantomData,
428            }
429        }
430    }
431}
432
433impl<'a, T> Iterator for SlotsMut<'a, T> {
434    type Item = &'a mut T;
435
436    #[inline(always)]
437    fn next(&mut self) -> Option<Self::Item> {
438        if self.ptr == self.end {
439            return None;
440        }
441        unsafe {
442            let item = &mut *self.ptr;
443            self.ptr = self.ptr.add(1);
444            Some(item)
445        }
446    }
447
448    #[inline(always)]
449    fn size_hint(&self) -> (usize, Option<usize>) {
450        let len = unsafe { self.end.offset_from(self.ptr) as usize };
451        (len, Some(len))
452    }
453}
454
455impl<'a, T> ExactSizeIterator for SlotsMut<'a, T> {
456    #[inline(always)]
457    fn len(&self) -> usize {
458        unsafe { self.end.offset_from(self.ptr) as usize }
459    }
460}
461
462impl<'a, T> core::iter::FusedIterator for SlotsMut<'a, T> {}
463
464/// Anchor-ordered iterator (immutable).
465pub struct AnchorIter<'a, T: Copy, const DEPTH: u8> {
466    ptr: *const T,
467    base: *const T,
468    end: *const T,
469    remaining: usize,
470    offset: i16,
471    direction: Direction,
472    _marker: PhantomData<&'a T>,
473}
474
475impl<'a, T: Copy, const DEPTH: u8> AnchorIter<'a, T, DEPTH> {
476    #[inline(always)]
477    fn new(ring: &'a SlidingRing<T, DEPTH>, direction: Direction) -> Self {
478        let base = ring.slots.as_ptr();
479        let ptr = unsafe { base.add(ring.anchor as usize) };
480        let end = unsafe { base.add(RING_SIZE) };
481        Self {
482            ptr,
483            base,
484            end,
485            remaining: SlidingRing::<T, DEPTH>::depth_usize(),
486            offset: 0,
487            direction,
488            _marker: PhantomData,
489        }
490    }
491}
492
493impl<'a, T: Copy, const DEPTH: u8> Iterator for AnchorIter<'a, T, DEPTH> {
494    type Item = (i16, &'a T);
495
496    #[inline(always)]
497    fn next(&mut self) -> Option<Self::Item> {
498        if self.remaining == 0 {
499            return None;
500        }
501        self.remaining -= 1;
502        unsafe {
503            let item = &*self.ptr;
504            self.ptr = match self.direction {
505                Direction::Forward => {
506                    let next = self.ptr.add(1);
507                    if next == self.end { self.base } else { next }
508                }
509                Direction::Backward => {
510                    if self.ptr == self.base {
511                        self.end.sub(1)
512                    } else {
513                        self.ptr.sub(1)
514                    }
515                }
516            };
517            let coord = self.offset;
518            match self.direction {
519                Direction::Forward => self.offset = self.offset.wrapping_add(1),
520                Direction::Backward => self.offset = self.offset.wrapping_sub(1),
521            }
522            Some((coord, item))
523        }
524    }
525
526    #[inline(always)]
527    fn size_hint(&self) -> (usize, Option<usize>) {
528        (self.remaining, Some(self.remaining))
529    }
530}
531
532impl<'a, T: Copy, const DEPTH: u8> ExactSizeIterator for AnchorIter<'a, T, DEPTH> {
533    #[inline(always)]
534    fn len(&self) -> usize {
535        self.remaining
536    }
537}
538
539impl<'a, T: Copy, const DEPTH: u8> core::iter::FusedIterator for AnchorIter<'a, T, DEPTH> {}
540
541/// Anchor-ordered iterator (mutable).
542pub struct AnchorIterMut<'a, T: Copy, const DEPTH: u8> {
543    ptr: *mut T,
544    base: *mut T,
545    end: *mut T,
546    remaining: usize,
547    offset: i16,
548    direction: Direction,
549    _marker: PhantomData<&'a mut T>,
550}
551
552impl<'a, T: Copy, const DEPTH: u8> AnchorIterMut<'a, T, DEPTH> {
553    #[inline(always)]
554    fn new(ring: &'a mut SlidingRing<T, DEPTH>, direction: Direction) -> Self {
555        let base = ring.slots.as_mut_ptr();
556        let ptr = unsafe { base.add(ring.anchor as usize) };
557        let end = unsafe { base.add(RING_SIZE) };
558        Self {
559            ptr,
560            base,
561            end,
562            remaining: SlidingRing::<T, DEPTH>::depth_usize(),
563            offset: 0,
564            direction,
565            _marker: PhantomData,
566        }
567    }
568}
569
570impl<'a, T: Copy, const DEPTH: u8> Iterator for AnchorIterMut<'a, T, DEPTH> {
571    type Item = (i16, &'a mut T);
572
573    #[inline(always)]
574    fn next(&mut self) -> Option<Self::Item> {
575        if self.remaining == 0 {
576            return None;
577        }
578        self.remaining -= 1;
579        unsafe {
580            let item = &mut *self.ptr;
581            self.ptr = match self.direction {
582                Direction::Forward => {
583                    let next = self.ptr.add(1);
584                    if next == self.end { self.base } else { next }
585                }
586                Direction::Backward => {
587                    if self.ptr == self.base {
588                        self.end.sub(1)
589                    } else {
590                        self.ptr.sub(1)
591                    }
592                }
593            };
594            let coord = self.offset;
595            match self.direction {
596                Direction::Forward => self.offset = self.offset.wrapping_add(1),
597                Direction::Backward => self.offset = self.offset.wrapping_sub(1),
598            }
599            Some((coord, item))
600        }
601    }
602
603    #[inline(always)]
604    fn size_hint(&self) -> (usize, Option<usize>) {
605        (self.remaining, Some(self.remaining))
606    }
607}
608
609impl<'a, T: Copy, const DEPTH: u8> ExactSizeIterator for AnchorIterMut<'a, T, DEPTH> {
610    #[inline(always)]
611    fn len(&self) -> usize {
612        self.remaining
613    }
614}
615
616impl<'a, T: Copy, const DEPTH: u8> core::iter::FusedIterator for AnchorIterMut<'a, T, DEPTH> {}
617
618/// Reduce a signed difference mod 256 (ring size).
619#[inline]
620pub fn diff_mod_256(diff: i128) -> RingIndex {
621    (diff & 255) as RingIndex
622}
623
624#[cfg(test)]
625mod tests {
626    use super::*;
627    const TEST_DEPTH: u8 = 64;
628
629    #[test]
630    fn diff_mod_matches_reference() {
631        let xs = [
632            i128::MIN,
633            i128::MIN / 2,
634            -65_536,
635            -10_000,
636            -1024,
637            -256,
638            -255,
639            -1,
640            0,
641            1,
642            127,
643            254,
644            255,
645            256,
646            1024,
647            10_000,
648            65_536,
649            i128::MAX / 2,
650            i128::MAX,
651        ];
652        for &d in &xs {
653            let m = diff_mod_256(d);
654            let expected = (d.rem_euclid(256)) as u8;
655            assert_eq!(m, expected);
656        }
657    }
658
659    #[test]
660    fn slot_reads_and_writes_by_offset() {
661        let mut ring = SlidingRing::<u64, TEST_DEPTH>::new(0);
662        *ring.borrow_anchor_mut() = 5;
663        *ring.slot_mut(1) = 7;
664        assert_eq!(*ring.borrow_anchor(), 5);
665        assert_eq!(*ring.slot(1), 7);
666    }
667
668    #[test]
669    fn shift_by_forward_clears_tail() {
670        let mut ring = SlidingRing::<u64, TEST_DEPTH>::new(0);
671        for off in 0..TEST_DEPTH {
672            *ring.slot_mut(off) = 1;
673        }
674        ring.shift_by(5);
675        let depth = TEST_DEPTH as usize;
676        for off in 0..(depth - 5) {
677            assert_eq!(*ring.slot(off as RingIndex), 1);
678        }
679        for off in depth - 5..depth {
680            assert_eq!(*ring.slot(off as RingIndex), 0);
681        }
682    }
683
684    #[test]
685    fn shift_by_backward_clears_head() {
686        let mut ring = SlidingRing::<u64, TEST_DEPTH>::new(0);
687        for off in 0..TEST_DEPTH {
688            *ring.slot_mut(off) = 1;
689        }
690        ring.shift_by(-4);
691        for off in 0..4 {
692            assert_eq!(*ring.slot(off as RingIndex), 0);
693        }
694        for off in 4..TEST_DEPTH as usize {
695            assert_eq!(*ring.slot(off as RingIndex), 1);
696        }
697    }
698
699    #[test]
700    fn shift_by_large_distance_resets_all() {
701        let mut ring = SlidingRing::<u64, TEST_DEPTH>::new(0);
702        for off in 0..TEST_DEPTH {
703            *ring.slot_mut(off) = 9;
704        }
705        ring.shift_by(TEST_DEPTH as i128);
706        for off in 0..TEST_DEPTH {
707            assert_eq!(*ring.slot(off), 0);
708        }
709    }
710
711    #[test]
712    fn shift_by_keep_anchor_preserves_anchor_slot() {
713        let mut ring = SlidingRing::<u64, TEST_DEPTH>::new(0);
714        let head0 = diff_mod_256(-1);
715        let head1 = diff_mod_256(-2);
716        let head2 = diff_mod_256(-3);
717        *ring.slot_by_index_mut(head0) = 11;
718        *ring.slot_by_index_mut(head1) = 22;
719        *ring.slot_by_index_mut(head2) = 33;
720        ring.shift_by_keep_anchor(-3);
721        assert_eq!(*ring.borrow_anchor(), 33);
722        assert_eq!(*ring.slot(1), 0);
723        assert_eq!(*ring.slot(2), 0);
724    }
725
726    #[test]
727    fn iterators_cover_all_slots_in_storage_order() {
728        let mut ring = SlidingRing::<u64, TEST_DEPTH>::new(0);
729        for (i, slot) in ring.iter_mut().enumerate() {
730            *slot = i as u64;
731        }
732        for (i, slot) in ring.iter().enumerate() {
733            assert_eq!(*slot, i as u64);
734        }
735    }
736
737    #[test]
738    fn slices_from_anchor_split_correctly() {
739        let mut ring = SlidingRing::<u64, TEST_DEPTH>::new(0);
740        for (i, slot) in ring.iter_mut().enumerate() {
741            *slot = i as u64;
742        }
743        ring.shift_by(100);
744        *ring.borrow_anchor_mut() = u64::MAX;
745        let (tail, head) = ring.slices_from_anchor();
746        let window = TEST_DEPTH as usize;
747        let anchor_idx = ring
748            .as_slice()
749            .iter()
750            .position(|&v| v == u64::MAX)
751            .expect("sentinel present");
752        let expected_tail = window.min(RING_SIZE - anchor_idx);
753        assert_eq!(head.len() + tail.len(), window);
754        assert_eq!(tail.len(), expected_tail);
755        assert_eq!(head.len(), window - expected_tail);
756    }
757
758    #[test]
759    fn anchor_iter_traverses_forward() {
760        let mut ring = SlidingRing::<u64, TEST_DEPTH>::new(0);
761        ring.shift_by(50);
762        for (i, slot) in ring.iter_mut().enumerate() {
763            *slot = i as u64;
764        }
765        let mut offsets = Vec::new();
766        for (offset, _) in ring.iter_from_anchor(Direction::Forward).take(3) {
767            offsets.push(offset);
768        }
769        assert_eq!(offsets, vec![0, 1, 2]);
770    }
771
772    #[test]
773    fn anchor_iter_traverses_backward() {
774        let mut ring = SlidingRing::<u64, TEST_DEPTH>::new(0);
775        ring.shift_by(50);
776        let mut offsets = Vec::new();
777        for (offset, _) in ring.iter_from_anchor(Direction::Backward).take(3) {
778            offsets.push(offset);
779        }
780        assert_eq!(offsets, vec![0, -1, -2]);
781    }
782
783    #[test]
784    fn option_slots_clear_back_to_none() {
785        let mut ring = SlidingRing::<Option<u32>, TEST_DEPTH>::new(None);
786        *ring.slot_mut(5) = Some(42);
787        ring.shift_by(TEST_DEPTH as i128);
788        for off in 0..TEST_DEPTH {
789            assert!(ring.slot(off).is_none());
790        }
791    }
792
793    #[test]
794    fn custom_copy_slots_reset_to_zero() {
795        #[derive(Clone, Copy)]
796        struct Level {
797            qty: u32,
798        }
799
800        let mut ring = SlidingRing::<Level, TEST_DEPTH>::new(Level { qty: 0 });
801        ring.slot_mut(3).qty = 7;
802        ring.shift_by(TEST_DEPTH as i128);
803        assert_eq!(ring.slot(3).qty, 0);
804    }
805
806    #[test]
807    fn anchor_index_wraps_modulo() {
808        let mut ring = SlidingRing::<u64, TEST_DEPTH>::new(0);
809        let diff = i128::MAX - 100;
810        ring.shift_by(diff);
811        *ring.borrow_anchor_mut() = 7;
812        let idx = ring
813            .as_slice()
814            .iter()
815            .position(|&v| v == 7)
816            .expect("sentinel should be present");
817        assert_eq!(idx as u8, diff_mod_256(diff));
818    }
819}