Skip to main content

cranpose_core/
slot_table.rs

1//! Slot table implementation using a gap-buffer strategy.
2//!
3//! This is the baseline/reference slot storage implementation that provides:
4//! - Gap-based slot reuse during conditional rendering
5//! - Anchor-based positional stability during reorganization
6//! - Efficient group skipping and scope-based recomposition
7//! - Batch anchor rebuilding for large structural changes
8
9// Complex slot state machine logic benefits from explicit nested pattern matching for clarity
10#![allow(clippy::collapsible_match)]
11
12use crate::{
13    slot_storage::{GroupId, SlotStorage, StartGroup, ValueSlotId},
14    AnchorId, Key, NodeId, Owned, ScopeId,
15};
16use std::any::Any;
17use std::cell::Cell;
18
19#[derive(Default)]
20pub struct SlotTable {
21    slots: Vec<Slot>, // FUTURE(no_std): replace Vec with arena-backed slot storage.
22    pending_slot_drops: Vec<Slot>,
23    cursor: usize,
24    group_stack: Vec<GroupFrame>, // FUTURE(no_std): switch to small stack buffer.
25    /// Maps anchor IDs to their current physical positions in the slots array.
26    /// This indirection layer provides positional stability during slot reorganization.
27    anchors: Vec<usize>, // index = anchor_id.0
28    anchors_dirty: bool,
29    /// Counter for allocating unique anchor IDs.
30    next_anchor_id: Cell<usize>,
31    /// Tracks whether the most recent start() reused a gap slot.
32    last_start_was_gap: bool,
33}
34
35enum Slot {
36    Group {
37        key: Key,
38        anchor: AnchorId,
39        len: usize,
40        scope: Option<ScopeId>,
41        has_gap_children: bool,
42    },
43    Value {
44        anchor: AnchorId,
45        data: Box<dyn Any>,
46    },
47    Node {
48        anchor: AnchorId,
49        id: NodeId,
50    },
51    /// Gap: Marks an unused slot that can be reused or compacted.
52    /// This prevents destructive truncation that would destroy sibling components.
53    /// For Groups marked as gaps (e.g., during tab switching), we preserve their
54    /// key, scope, and length so they can be properly matched and reused when reactivated.
55    Gap {
56        anchor: AnchorId,
57        /// If this gap was a Group, preserve its key for reuse matching
58        group_key: Option<Key>,
59        /// If this gap was a Group, preserve its scope ID for state subscription continuity
60        group_scope: Option<ScopeId>,
61        /// If this gap was a Group, preserve its length to restore the full group structure
62        group_len: usize,
63    },
64}
65
66struct GroupFrame {
67    key: Key,
68    start: usize, // Physical position (will be phased out)
69    end: usize,   // Physical position (will be phased out)
70    force_children_recompose: bool,
71}
72
73const INVALID_ANCHOR_POS: usize = usize::MAX;
74
75#[derive(Debug, PartialEq)]
76enum SlotKind {
77    Group,
78    Value,
79    Node,
80    Gap,
81}
82
83impl Slot {
84    fn kind(&self) -> SlotKind {
85        match self {
86            Slot::Group { .. } => SlotKind::Group,
87            Slot::Value { .. } => SlotKind::Value,
88            Slot::Node { .. } => SlotKind::Node,
89            Slot::Gap { .. } => SlotKind::Gap,
90        }
91    }
92
93    /// Get the anchor ID for this slot.
94    fn anchor_id(&self) -> AnchorId {
95        match self {
96            Slot::Group { anchor, .. } => *anchor,
97            Slot::Value { anchor, .. } => *anchor,
98            Slot::Node { anchor, .. } => *anchor,
99            Slot::Gap { anchor, .. } => *anchor,
100        }
101    }
102
103    fn as_value<T: 'static>(&self) -> &T {
104        match self {
105            Slot::Value { data, .. } => data.downcast_ref::<T>().expect("slot value type mismatch"),
106            _ => panic!("slot is not a value"),
107        }
108    }
109
110    fn as_value_mut<T: 'static>(&mut self) -> &mut T {
111        match self {
112            Slot::Value { data, .. } => data.downcast_mut::<T>().expect("slot value type mismatch"),
113            _ => panic!("slot is not a value"),
114        }
115    }
116}
117
118impl Default for Slot {
119    fn default() -> Self {
120        Slot::Group {
121            key: 0,
122            anchor: AnchorId::INVALID,
123            len: 0,
124            scope: None,
125            has_gap_children: false,
126        }
127    }
128}
129
130fn drop_slots_in_reverse(slots: &mut Vec<Slot>) {
131    let _teardown = crate::runtime::enter_state_teardown_scope();
132    while let Some(slot) = slots.pop() {
133        drop(slot);
134    }
135}
136
137impl SlotTable {
138    const INITIAL_CAP: usize = 32;
139    const LOCAL_GAP_SCAN: usize = 256; // tune
140
141    pub fn new() -> Self {
142        Self {
143            slots: Vec::new(),
144            pending_slot_drops: Vec::new(),
145            cursor: 0,
146            group_stack: Vec::new(),
147            anchors: Vec::new(),
148            anchors_dirty: false,
149            next_anchor_id: Cell::new(1), // Start at 1 (0 is INVALID)
150            last_start_was_gap: false,
151        }
152    }
153
154    pub fn current_group(&self) -> usize {
155        self.group_stack.last().map(|f| f.start).unwrap_or(0)
156    }
157
158    pub fn group_key(&self, index: usize) -> Option<Key> {
159        match self.slots.get(index) {
160            Some(Slot::Group { key, .. }) => Some(*key),
161            Some(Slot::Gap { group_key, .. }) => *group_key,
162            _ => None,
163        }
164    }
165
166    fn ensure_capacity(&mut self) {
167        if self.slots.is_empty() {
168            self.slots.reserve(Self::INITIAL_CAP);
169            self.append_gap_slots(Self::INITIAL_CAP);
170        } else if self.cursor == self.slots.len() {
171            self.grow_slots();
172        }
173    }
174
175    fn force_gap_here(&mut self, cursor: usize) {
176        // we *know* we have capacity (ensure_capacity() already ran)
177        // so just overwrite the slot at cursor with a fresh gap
178        self.replace_slot_deferred(
179            cursor,
180            Slot::Gap {
181                anchor: AnchorId::INVALID,
182                group_key: None,
183                group_scope: None,
184                group_len: 0,
185            },
186        );
187    }
188
189    fn find_right_gap_run(&self, from: usize, scan_limit: usize) -> Option<(usize, usize)> {
190        let end = (from + scan_limit).min(self.slots.len());
191        let mut i = from;
192        while i < end {
193            if let Some(Slot::Gap { anchor, .. }) = self.slots.get(i) {
194                if *anchor == AnchorId::INVALID {
195                    let start = i;
196                    let mut len = 1;
197                    while i + len < end {
198                        match self.slots.get(i + len) {
199                            Some(Slot::Gap { anchor, .. }) if *anchor == AnchorId::INVALID => {
200                                len += 1;
201                            }
202                            _ => break,
203                        }
204                    }
205                    return Some((start, len));
206                }
207            }
208            i += 1;
209        }
210        None
211    }
212
213    fn ensure_gap_at_local(&mut self, cursor: usize) {
214        if matches!(self.slots.get(cursor), Some(Slot::Gap { .. })) {
215            return;
216        }
217        self.ensure_capacity();
218
219        if let Some((run_start, run_len)) = self.find_right_gap_run(cursor, Self::LOCAL_GAP_SCAN) {
220            self.shift_group_frames(cursor, run_len as isize);
221            self.shift_anchor_positions_from(cursor, run_len as isize);
222            self.slots[cursor..run_start + run_len].rotate_right(run_len);
223            return;
224        }
225
226        self.force_gap_here(cursor);
227    }
228
229    fn append_gap_slots(&mut self, count: usize) {
230        if count == 0 {
231            return;
232        }
233        for _ in 0..count {
234            self.slots.push(Slot::Gap {
235                anchor: AnchorId::INVALID,
236                group_key: None,
237                group_scope: None,
238                group_len: 0,
239            });
240        }
241    }
242
243    fn grow_slots(&mut self) {
244        let old_len = self.slots.len();
245        let target_len = (old_len.saturating_mul(2)).max(Self::INITIAL_CAP);
246        let additional = target_len.saturating_sub(old_len);
247        if additional == 0 {
248            return;
249        }
250        self.slots.reserve(additional);
251        self.append_gap_slots(additional);
252    }
253
254    /// Allocate a new unique anchor ID.
255    fn allocate_anchor(&self) -> AnchorId {
256        let id = self.next_anchor_id.get();
257        self.next_anchor_id.set(id + 1);
258        AnchorId(id)
259    }
260
261    fn replace_slot_deferred(&mut self, index: usize, slot: Slot) {
262        let old = std::mem::replace(&mut self.slots[index], slot);
263        self.pending_slot_drops.push(old);
264    }
265
266    fn flush_pending_slot_drops(&mut self) {
267        drop_slots_in_reverse(&mut self.pending_slot_drops);
268    }
269
270    /// Register an anchor at a specific position in the slots array.
271    fn register_anchor(&mut self, anchor: AnchorId, position: usize) {
272        debug_assert!(anchor.is_valid(), "attempted to register invalid anchor");
273        let idx = anchor.0;
274        if idx == 0 {
275            return;
276        }
277        if idx >= self.anchors.len() {
278            self.anchors.resize(idx + 1, INVALID_ANCHOR_POS);
279        }
280        self.anchors[idx] = position;
281    }
282
283    /// Returns whether the most recent `start` invocation reused a gap slot.
284    /// Resets the flag to false after reading.
285    fn take_last_start_was_gap(&mut self) -> bool {
286        let was_gap = self.last_start_was_gap;
287        self.last_start_was_gap = false;
288        was_gap
289    }
290
291    fn find_group_owner_index(&self, start: usize) -> Option<usize> {
292        if start == 0 || self.slots.is_empty() {
293            return None;
294        }
295
296        let mut idx = start.saturating_sub(1);
297        loop {
298            if let Some(Slot::Group { len, .. }) = self.slots.get(idx) {
299                let extent_end = idx.saturating_add(*len);
300                if start < extent_end {
301                    return Some(idx);
302                }
303            }
304            if idx == 0 {
305                break;
306            }
307            idx -= 1;
308        }
309        None
310    }
311
312    /// Resolve an anchor to its current position in the slots array.
313    fn resolve_anchor(&self, anchor: AnchorId) -> Option<usize> {
314        let idx = anchor.0;
315        if idx == 0 {
316            return None;
317        }
318        self.anchors
319            .get(idx)
320            .copied()
321            .filter(|&pos| pos != INVALID_ANCHOR_POS)
322    }
323
324    /// Mark a range of slots as gaps instead of truncating.
325    /// This preserves sibling components while allowing structure changes.
326    /// When encountering a Group, recursively marks the entire group structure as gaps.
327    pub fn mark_range_as_gaps(
328        &mut self,
329        start: usize,
330        end: usize,
331        owner_index: Option<usize>,
332    ) -> bool {
333        let mut i = start;
334        let end = end.min(self.slots.len());
335        let mut marked_any = false;
336        let mut retired_slots = Vec::new();
337
338        while i < end {
339            if i >= self.slots.len() {
340                break;
341            }
342
343            let (anchor, group_len, group_key, group_scope) = {
344                let slot = &self.slots[i];
345                let anchor = slot.anchor_id();
346                let (group_len, group_key, group_scope) = match slot {
347                    Slot::Group {
348                        len, key, scope, ..
349                    } => (*len, Some(*key), *scope),
350                    // Also preserve metadata for existing Gaps!
351                    // This is essential for the decrease-increase scenario where gaps
352                    // are re-marked as gaps but need to retain their original keys.
353                    Slot::Gap {
354                        group_len,
355                        group_key,
356                        group_scope,
357                        ..
358                    } => (*group_len, *group_key, *group_scope),
359                    _ => (0, None, None),
360                };
361                (anchor, group_len, group_key, group_scope)
362            };
363
364            // Mark this slot as a gap, preserving Group metadata if it was a Group
365            // This allows Groups to be properly matched and reused during tab switching
366            retired_slots.push(std::mem::replace(
367                &mut self.slots[i],
368                Slot::Gap {
369                    anchor,
370                    group_key,
371                    group_scope,
372                    group_len,
373                },
374            ));
375            marked_any = true;
376
377            // If it was a group, recursively mark its children as gaps too
378            if group_len > 0 {
379                // Mark children (from i+1 to i+group_len)
380                let children_end = (i + group_len).min(end);
381                for j in (i + 1)..children_end {
382                    if j < self.slots.len() {
383                        // For nested Groups, preserve their metadata as well
384                        if let Slot::Group {
385                            key: nested_key,
386                            scope: nested_scope,
387                            len: nested_len,
388                            ..
389                        } = self.slots[j]
390                        {
391                            let child_anchor = self.slots[j].anchor_id();
392                            retired_slots.push(std::mem::replace(
393                                &mut self.slots[j],
394                                Slot::Gap {
395                                    anchor: child_anchor,
396                                    group_key: Some(nested_key),
397                                    group_scope: nested_scope,
398                                    group_len: nested_len,
399                                },
400                            ));
401                            marked_any = true;
402                        } else {
403                            // For Nodes and other slots, mark as regular gaps
404                            let child_anchor = self.slots[j].anchor_id();
405                            retired_slots.push(std::mem::replace(
406                                &mut self.slots[j],
407                                Slot::Gap {
408                                    anchor: child_anchor,
409                                    group_key: None,
410                                    group_scope: None,
411                                    group_len: 0,
412                                },
413                            ));
414                            marked_any = true;
415                        }
416                    }
417                }
418                i = (i + group_len).max(i + 1);
419            } else {
420                i += 1;
421            }
422        }
423
424        drop_slots_in_reverse(&mut retired_slots);
425        if marked_any {
426            let owner_idx = owner_index.or_else(|| self.find_group_owner_index(start));
427            if let Some(idx) = owner_idx {
428                if let Some(Slot::Group {
429                    has_gap_children, ..
430                }) = self.slots.get_mut(idx)
431                {
432                    *has_gap_children = true;
433                }
434                if let Some(frame) = self.group_stack.iter_mut().find(|frame| frame.start == idx) {
435                    frame.force_children_recompose = true;
436                }
437            }
438        }
439        marked_any
440    }
441
442    pub fn get_group_scope(&self, index: usize) -> Option<ScopeId> {
443        let slot = self
444            .slots
445            .get(index)
446            .expect("get_group_scope: index out of bounds");
447        match slot {
448            Slot::Group { scope, .. } => *scope,
449            _ => None,
450        }
451    }
452
453    pub fn set_group_scope(&mut self, index: usize, scope: ScopeId) {
454        let slot = self
455            .slots
456            .get_mut(index)
457            .expect("set_group_scope: index out of bounds");
458        match slot {
459            Slot::Group {
460                scope: scope_opt, ..
461            } => {
462                // With gaps implementation, Groups can be reused across compositions.
463                // Always update the scope to the current value.
464                *scope_opt = Some(scope);
465            }
466            _ => panic!("set_group_scope: slot at index is not a group"),
467        }
468    }
469
470    pub fn find_group_index_by_scope(&self, scope: ScopeId) -> Option<usize> {
471        self.slots
472            .iter()
473            .enumerate()
474            .find_map(|(i, slot)| match slot {
475                Slot::Group {
476                    scope: Some(id), ..
477                } if *id == scope => Some(i),
478                _ => None,
479            })
480    }
481
482    pub fn start_recranpose_at_scope(&mut self, scope: ScopeId) -> Option<usize> {
483        let index = self.find_group_index_by_scope(scope)?;
484        self.start_recompose(index);
485        Some(index)
486    }
487
488    pub fn debug_dump_groups(&self) -> Vec<(usize, Key, Option<ScopeId>, usize)> {
489        self.slots
490            .iter()
491            .enumerate()
492            .filter_map(|(i, slot)| match slot {
493                Slot::Group {
494                    key, len, scope, ..
495                } => Some((i, *key, *scope, *len)),
496                _ => None,
497            })
498            .collect()
499    }
500
501    pub fn debug_dump_all_slots(&self) -> Vec<(usize, String)> {
502        self.slots
503            .iter()
504            .enumerate()
505            .map(|(i, slot)| {
506                let kind = match slot {
507                    Slot::Group {
508                        key, scope, len, ..
509                    } => format!("Group(key={:?}, scope={:?}, len={})", key, scope, len),
510                    Slot::Value { .. } => "Value".to_string(),
511                    Slot::Node { id, .. } => format!("Node(id={:?})", id),
512                    Slot::Gap {
513                        group_key,
514                        group_scope,
515                        ..
516                    } => {
517                        if let Some(key) = group_key {
518                            format!("Gap(was_group_key={:?}, scope={:?})", key, group_scope)
519                        } else {
520                            "Gap".to_string()
521                        }
522                    }
523                };
524                (i, kind)
525            })
526            .collect()
527    }
528
529    fn update_group_bounds(&mut self) {
530        for frame in &mut self.group_stack {
531            if frame.end < self.cursor {
532                frame.end = self.cursor;
533            }
534        }
535    }
536
537    /// Update all anchor positions to match their current physical positions in the slots array.
538    /// This should be called after any operation that modifies slot positions (insert, remove, etc.)
539    fn rebuild_all_anchor_positions(&mut self) {
540        let mut max_anchor = 0usize;
541        for slot in &self.slots {
542            let idx = slot.anchor_id().0;
543            if idx > max_anchor {
544                max_anchor = idx;
545            }
546        }
547        if self.anchors.len() <= max_anchor {
548            self.anchors.resize(max_anchor + 1, INVALID_ANCHOR_POS);
549        }
550
551        for pos in &mut self.anchors {
552            *pos = INVALID_ANCHOR_POS;
553        }
554
555        for (position, slot) in self.slots.iter().enumerate() {
556            let idx = slot.anchor_id().0;
557            if idx == 0 {
558                continue;
559            }
560            self.anchors[idx] = position;
561        }
562    }
563
564    fn shift_group_frames(&mut self, index: usize, delta: isize) {
565        if delta == 0 {
566            return;
567        }
568        if delta > 0 {
569            let delta = delta as usize;
570            for frame in &mut self.group_stack {
571                if frame.start >= index {
572                    frame.start += delta;
573                    frame.end += delta;
574                } else if frame.end >= index {
575                    frame.end += delta;
576                }
577            }
578        } else {
579            let delta = (-delta) as usize;
580            for frame in &mut self.group_stack {
581                if frame.start >= index {
582                    frame.start = frame.start.saturating_sub(delta);
583                    frame.end = frame.end.saturating_sub(delta);
584                } else if frame.end > index {
585                    frame.end = frame.end.saturating_sub(delta);
586                }
587            }
588        }
589    }
590
591    pub fn start(&mut self, key: Key) -> usize {
592        self.ensure_capacity();
593
594        let cursor = self.cursor;
595        let parent_force = self
596            .group_stack
597            .last()
598            .map(|frame| frame.force_children_recompose)
599            .unwrap_or(false);
600
601        // === FAST PATH =======================================================
602        if let Some(Slot::Group {
603            key: existing_key,
604            len,
605            has_gap_children,
606            ..
607        }) = self.slots.get(cursor)
608        {
609            // Only fast-path if:
610            // 1) key matches
611            // 2) there were NO gap children before
612            // 3) parent is NOT forcing children to recompose
613            if *existing_key == key && !*has_gap_children && !parent_force {
614                self.last_start_was_gap = false;
615
616                let frame = GroupFrame {
617                    key,
618                    start: cursor,
619                    end: cursor + *len,
620                    force_children_recompose: false,
621                };
622                self.group_stack.push(frame);
623                self.cursor = cursor + 1;
624                self.update_group_bounds();
625                return cursor;
626            }
627        }
628
629        // if parent says "my children are unstable", don't try to be clever
630        if parent_force {
631            if let Some(Slot::Group {
632                key: existing_key,
633                len,
634                has_gap_children,
635                ..
636            }) = self.slots.get_mut(cursor)
637            {
638                if *existing_key == key {
639                    *has_gap_children = false;
640                    self.last_start_was_gap = true;
641                    let frame = GroupFrame {
642                        key,
643                        start: cursor,
644                        end: cursor + *len,
645                        force_children_recompose: true,
646                    };
647                    self.group_stack.push(frame);
648                    self.cursor = cursor + 1;
649                    self.update_group_bounds();
650                    return cursor;
651                }
652            }
653
654            if let Some(Slot::Gap {
655                anchor,
656                group_key: Some(gap_key),
657                group_scope,
658                group_len,
659            }) = self.slots.get(cursor)
660            {
661                if *gap_key == key {
662                    let anchor = *anchor;
663                    let gap_len = *group_len;
664                    let preserved_scope = *group_scope;
665                    self.slots[cursor] = Slot::Group {
666                        key,
667                        anchor,
668                        len: gap_len,
669                        scope: preserved_scope,
670                        has_gap_children: false,
671                    };
672                    self.register_anchor(anchor, cursor);
673                    self.last_start_was_gap = true;
674                    let frame = GroupFrame {
675                        key,
676                        start: cursor,
677                        end: cursor + gap_len,
678                        force_children_recompose: true,
679                    };
680                    self.group_stack.push(frame);
681                    self.cursor = cursor + 1;
682                    self.update_group_bounds();
683                    return cursor;
684                }
685            }
686
687            return self.insert_new_group_at_cursor(key);
688        }
689
690        self.last_start_was_gap = false;
691        let cursor = self.cursor;
692        debug_assert!(
693            cursor <= self.slots.len(),
694            "slot cursor {} out of bounds",
695            cursor
696        );
697
698        if cursor == self.slots.len() {
699            self.grow_slots();
700        }
701
702        debug_assert!(
703            cursor < self.slots.len(),
704            "slot cursor {} failed to grow",
705            cursor
706        );
707
708        if cursor > 0 && !matches!(self.slots.get(cursor), Some(Slot::Gap { .. })) {
709            if let Some(Slot::Group { key: prev_key, .. }) = self.slots.get(cursor - 1) {
710                if *prev_key == key {
711                    return self.insert_new_group_at_cursor(key);
712                }
713            }
714        }
715
716        // Check if we can reuse an existing Group at the cursor position without scanning.
717        if let Some(slot) = self.slots.get_mut(cursor) {
718            if let Slot::Group {
719                key: existing_key,
720                len,
721                has_gap_children,
722                ..
723            } = slot
724            {
725                if *existing_key == key {
726                    let group_len = *len;
727                    let had_gap_children = *has_gap_children;
728                    if had_gap_children {
729                        *has_gap_children = false;
730                    }
731                    let force_children = had_gap_children || parent_force;
732
733                    self.last_start_was_gap = false;
734                    let frame = GroupFrame {
735                        key,
736                        start: cursor,
737                        end: cursor + group_len,
738                        force_children_recompose: force_children,
739                    };
740                    self.group_stack.push(frame);
741                    self.cursor = cursor + 1;
742                    self.update_group_bounds();
743                    return cursor;
744                }
745            }
746        }
747
748        // Check if we can reuse an existing Group/GAP by converting in-place.
749        let mut reused_from_gap = false;
750        let reuse_result = match self.slots.get(cursor) {
751            // If there's already a Group here but with a different key, mark it as a gap
752            // instead of recycling it. This preserves the group so it can be found and
753            // restored later (critical for the decrease-increase and tab-switching scenarios).
754            // Then fall through to the search logic to find the desired key.
755            Some(Slot::Group {
756                key: existing_key,
757                anchor: old_anchor,
758                len: old_len,
759                scope: old_scope,
760                has_gap_children: _,
761            }) if *existing_key != key => {
762                // Copy all values to avoid borrow checker issues
763                let old_key = *existing_key;
764                let old_anchor_val = *old_anchor;
765                let old_len_val = *old_len;
766                let old_scope_val = *old_scope;
767
768                // Mark the group's children as gaps so they can be reused safely
769                let group_len = old_len_val.max(1);
770                if group_len > 1 {
771                    let start = cursor + 1;
772                    let end = cursor + group_len;
773                    let _ = self.mark_range_as_gaps(start, end, Some(cursor));
774                }
775
776                // Mark this group as a gap, preserving all its metadata
777                self.replace_slot_deferred(
778                    cursor,
779                    Slot::Gap {
780                        anchor: old_anchor_val,
781                        group_key: Some(old_key),
782                        group_scope: old_scope_val,
783                        group_len: old_len_val,
784                    },
785                );
786                // Don't return early - fall through to search logic
787                None
788            }
789            // Also check for Gaps that were Groups with matching keys!
790            // This enables tab switching to reuse Groups that were marked as gaps.
791            Some(Slot::Gap {
792                anchor,
793                group_key: Some(gap_key),
794                group_scope,
795                group_len,
796            }) if *gap_key == key => {
797                // Convert the Gap back to a Group, preserving its scope and length
798                reused_from_gap = true;
799                Some((*group_len, *anchor, *group_scope))
800            }
801            Some(_slot) => None,
802            None => None,
803        };
804        if let Some((len, group_anchor, preserved_scope)) = reuse_result {
805            // Convert Gap back to Group if needed
806            if matches!(self.slots.get(cursor), Some(Slot::Gap { .. })) {
807                self.slots[cursor] = Slot::Group {
808                    key,
809                    anchor: group_anchor,
810                    len,
811                    scope: preserved_scope,
812                    has_gap_children: false,
813                };
814                self.register_anchor(group_anchor, cursor);
815            }
816
817            if reused_from_gap {
818                if let Some(Slot::Group {
819                    has_gap_children, ..
820                }) = self.slots.get_mut(cursor)
821                {
822                    *has_gap_children = false;
823                }
824            }
825
826            self.last_start_was_gap = reused_from_gap || parent_force;
827            let frame = GroupFrame {
828                key,
829                start: cursor,
830                end: cursor + len,
831                force_children_recompose: reused_from_gap || parent_force,
832            };
833            self.group_stack.push(frame);
834            self.cursor = cursor + 1;
835            self.update_group_bounds();
836            return cursor;
837        }
838
839        let allow_rescue = !parent_force && cursor < self.slots.len().saturating_sub(1);
840        if !allow_rescue {
841            return self.insert_new_group_at_cursor(key);
842        }
843
844        // When a group is restored from a gap, its preserved length may not accurately
845        // reflect where child groups/gaps are now located. To prevent duplicate group
846        // creation, we need to search beyond the preserved extent.
847        //
848        // Strategy: Search up to parent's end first (respects group hierarchy), but
849        // if that's less than slots.len(), we'll continue searching in a second pass
850        // below to find gaps that should be restored.
851        let parent_end = self
852            .group_stack
853            .last()
854            .map(|frame| frame.end.min(self.slots.len()))
855            .unwrap_or(self.slots.len());
856
857        // If parent_end seems constrained (less than slots length), we'll do an
858        // extended search for gaps after the initial search fails.
859        let needs_extended_search = parent_end < self.slots.len();
860
861        let mut search_index = cursor;
862        let mut found_group: Option<(usize, AnchorId, usize, Option<ScopeId>)> = None;
863        const SEARCH_BUDGET: usize = 16;
864        let mut scanned = 0usize;
865        while search_index < parent_end && scanned < SEARCH_BUDGET {
866            scanned += 1;
867            match self.slots.get(search_index) {
868                Some(Slot::Group {
869                    key: existing_key,
870                    anchor,
871                    len,
872                    scope: _,
873                    ..
874                }) => {
875                    let group_len = *len;
876                    if *existing_key == key {
877                        found_group = Some((search_index, *anchor, group_len, None));
878                        break;
879                    }
880                    // Search this group's children for gaps with matching keys
881                    // IMPORTANT: For gaps, we must search recursively inside them because
882                    // gaps can contain nested gaps after multiple decrease-increase cycles
883                    let mut child_index = search_index + 1;
884                    let search_limit = (search_index + group_len).min(self.slots.len());
885                    while child_index < search_limit {
886                        match self.slots.get(child_index) {
887                            Some(Slot::Gap {
888                                anchor: gap_anchor,
889                                group_key: Some(gap_key),
890                                group_scope,
891                                group_len: gap_len,
892                            }) if *gap_key == key => {
893                                // Found a matching gap!
894                                found_group =
895                                    Some((child_index, *gap_anchor, *gap_len, *group_scope));
896                                break;
897                            }
898                            Some(Slot::Gap {
899                                group_len: gap_len, ..
900                            }) => {
901                                // Skip this gap - we don't move deeply nested gaps
902                                // They will be restored when their parent gap is converted to a group
903                                child_index += (*gap_len).max(1);
904                            }
905                            Some(Slot::Group {
906                                len: nested_len, ..
907                            }) => {
908                                // Skip active groups (don't search inside them)
909                                child_index += (*nested_len).max(1);
910                            }
911                            _ => {
912                                child_index += 1;
913                            }
914                        }
915                    }
916                    if found_group.is_some() {
917                        break;
918                    }
919                    let advance = group_len.max(1);
920                    search_index = search_index.saturating_add(advance);
921                }
922                // Also search for Gaps that were Groups with matching keys
923                Some(Slot::Gap {
924                    anchor,
925                    group_key: Some(gap_key),
926                    group_scope,
927                    group_len,
928                }) => {
929                    if *gap_key == key {
930                        found_group = Some((search_index, *anchor, *group_len, *group_scope));
931                        break;
932                    }
933                    // Search this gap's children for gaps with matching keys
934                    // IMPORTANT: For nested gaps, search recursively inside them
935                    let gap_len_val = *group_len;
936                    let mut child_index = search_index + 1;
937                    let search_limit = (search_index + gap_len_val).min(self.slots.len());
938                    while child_index < search_limit {
939                        match self.slots.get(child_index) {
940                            Some(Slot::Gap {
941                                anchor: child_gap_anchor,
942                                group_key: Some(child_gap_key),
943                                group_scope: child_group_scope,
944                                group_len: child_gap_len,
945                            }) if *child_gap_key == key => {
946                                // Found a matching gap!
947                                found_group = Some((
948                                    child_index,
949                                    *child_gap_anchor,
950                                    *child_gap_len,
951                                    *child_group_scope,
952                                ));
953                                break;
954                            }
955                            Some(Slot::Gap {
956                                group_len: nested_gap_len,
957                                ..
958                            }) => {
959                                // Skip this gap - we don't move deeply nested gaps
960                                child_index += (*nested_gap_len).max(1);
961                            }
962                            Some(Slot::Group {
963                                len: nested_len, ..
964                            }) => {
965                                // Skip active groups (don't search inside them)
966                                child_index += (*nested_len).max(1);
967                            }
968                            _ => {
969                                child_index += 1;
970                            }
971                        }
972                    }
973                    if found_group.is_some() {
974                        break;
975                    }
976                    let advance = gap_len_val.max(1);
977                    search_index = search_index.saturating_add(advance);
978                }
979                Some(_slot) => {
980                    search_index += 1;
981                }
982                None => break,
983            }
984        }
985
986        // Extended search: If we didn't find the group within parent_end, but there are
987        // more slots to search, look for BOTH groups and gaps with matching keys.
988        // This handles the recursive decrease-increase case where a parent gap is restored
989        // but its children (which may be groups or gaps) are still at their original
990        // positions beyond the parent's preserved extent.
991        //
992        // IMPORTANT: After multiple decrease-increase cycles, gaps can drift beyond
993        // the parent's extent. We search all remaining slots to ensure we find the
994        // matching group/gap, regardless of structure complexity.
995        if found_group.is_none() && needs_extended_search {
996            search_index = parent_end;
997            const EXTENDED_SEARCH_BUDGET: usize = 16;
998            let mut extended_scanned = 0usize;
999            while search_index < self.slots.len() && extended_scanned < EXTENDED_SEARCH_BUDGET {
1000                extended_scanned += 1;
1001                match self.slots.get(search_index) {
1002                    // Search for groups with matching keys (but only within limited range)
1003                    Some(Slot::Group {
1004                        key: existing_key,
1005                        anchor,
1006                        len,
1007                        scope,
1008                        ..
1009                    }) => {
1010                        if *existing_key == key {
1011                            found_group = Some((search_index, *anchor, *len, *scope));
1012                            break;
1013                        }
1014                        let advance = (*len).max(1);
1015                        search_index = search_index.saturating_add(advance);
1016                    }
1017                    // Search for gaps with matching keys (candidates for restoration)
1018                    Some(Slot::Gap {
1019                        anchor,
1020                        group_key: Some(gap_key),
1021                        group_scope,
1022                        group_len,
1023                    }) => {
1024                        if *gap_key == key {
1025                            found_group = Some((search_index, *anchor, *group_len, *group_scope));
1026                            break;
1027                        }
1028                        let advance = (*group_len).max(1);
1029                        search_index = search_index.saturating_add(advance);
1030                    }
1031                    Some(_slot) => {
1032                        search_index += 1;
1033                    }
1034                    None => break,
1035                }
1036            }
1037        }
1038
1039        if let Some((found_index, group_anchor, group_len, preserved_scope)) = found_group {
1040            // If we found a Gap, convert it back to a Group first
1041            let reused_gap = matches!(self.slots.get(found_index), Some(Slot::Gap { .. }));
1042            if reused_gap {
1043                self.slots[found_index] = Slot::Group {
1044                    key,
1045                    anchor: group_anchor,
1046                    len: group_len,
1047                    scope: preserved_scope,
1048                    has_gap_children: false,
1049                };
1050            }
1051
1052            self.last_start_was_gap = reused_gap || parent_force;
1053            let group_extent = group_len.max(1);
1054            let available = self.slots.len().saturating_sub(found_index);
1055            let actual_len = group_extent.min(available);
1056            if actual_len > 0 {
1057                self.shift_group_frames(found_index, -(actual_len as isize));
1058                let moved: Vec<_> = self
1059                    .slots
1060                    .drain(found_index..found_index + actual_len)
1061                    .collect();
1062                self.shift_group_frames(cursor, moved.len() as isize);
1063                self.slots.splice(cursor..cursor, moved);
1064                // Update all anchor positions after group move
1065                self.anchors_dirty = true;
1066                let frame = GroupFrame {
1067                    key,
1068                    start: cursor,
1069                    end: cursor + actual_len,
1070                    force_children_recompose: reused_gap || parent_force,
1071                };
1072                self.group_stack.push(frame);
1073                self.cursor = cursor + 1;
1074                self.update_group_bounds();
1075                return cursor;
1076            } else {
1077                // If the stored length exceeds available slots, fall back to inserting a new group.
1078                self.shift_group_frames(found_index, 0);
1079            }
1080        }
1081
1082        self.insert_new_group_at_cursor(key)
1083    }
1084
1085    fn insert_new_group_at_cursor(&mut self, key: Key) -> usize {
1086        // make sure we have space at the tail for pulling gaps
1087        self.ensure_capacity();
1088
1089        let cursor = self.cursor;
1090        self.ensure_gap_at_local(cursor);
1091        let parent_force = self
1092            .group_stack
1093            .last()
1094            .map(|frame| frame.force_children_recompose)
1095            .unwrap_or(false);
1096
1097        if cursor < self.slots.len() {
1098            debug_assert!(matches!(self.slots[cursor], Slot::Gap { .. }));
1099            let group_anchor = self.allocate_anchor();
1100            self.slots[cursor] = Slot::Group {
1101                key,
1102                anchor: group_anchor,
1103                len: 0,
1104                scope: None,
1105                has_gap_children: false,
1106            };
1107            self.register_anchor(group_anchor, cursor);
1108        } else {
1109            let group_anchor = self.allocate_anchor();
1110            self.slots.push(Slot::Group {
1111                key,
1112                anchor: group_anchor,
1113                len: 0,
1114                scope: None,
1115                has_gap_children: false,
1116            });
1117            self.register_anchor(group_anchor, cursor);
1118        }
1119        self.last_start_was_gap = parent_force;
1120        self.cursor = cursor + 1;
1121        self.group_stack.push(GroupFrame {
1122            key,
1123            start: cursor,
1124            end: self.cursor,
1125            force_children_recompose: parent_force,
1126        });
1127        self.update_group_bounds();
1128        cursor
1129    }
1130    fn shift_anchor_positions_from(&mut self, start_slot: usize, delta: isize) {
1131        for pos in &mut self.anchors {
1132            if *pos != INVALID_ANCHOR_POS && *pos >= start_slot {
1133                *pos = (*pos as isize + delta) as usize;
1134            }
1135        }
1136    }
1137    fn flush_anchors_if_dirty(&mut self) {
1138        if self.anchors_dirty {
1139            self.anchors_dirty = false;
1140            self.rebuild_all_anchor_positions();
1141        }
1142    }
1143    pub fn end(&mut self) {
1144        if let Some(frame) = self.group_stack.pop() {
1145            let end = self.cursor;
1146            if let Some(slot) = self.slots.get_mut(frame.start) {
1147                debug_assert_eq!(
1148                    SlotKind::Group,
1149                    slot.kind(),
1150                    "slot kind mismatch at {}",
1151                    frame.start
1152                );
1153                if let Slot::Group {
1154                    key,
1155                    len,
1156                    has_gap_children,
1157                    ..
1158                } = slot
1159                {
1160                    debug_assert_eq!(*key, frame.key, "group key mismatch");
1161                    // Calculate new length based on cursor position
1162                    let new_len = end.saturating_sub(frame.start);
1163                    let old_len = *len;
1164                    if new_len < old_len {
1165                        *has_gap_children = true;
1166                    }
1167                    const SHRINK_MIN_DROP: usize = 64;
1168                    const SHRINK_RATIO: usize = 4;
1169                    if old_len > new_len
1170                        && old_len >= new_len.saturating_mul(SHRINK_RATIO)
1171                        && (old_len - new_len) >= SHRINK_MIN_DROP
1172                    {
1173                        *len = new_len;
1174                    } else {
1175                        *len = old_len.max(new_len);
1176                    }
1177                }
1178            }
1179            if let Some(parent) = self.group_stack.last_mut() {
1180                if parent.end < end {
1181                    parent.end = end;
1182                }
1183            }
1184        }
1185    }
1186
1187    fn start_recompose(&mut self, index: usize) {
1188        if let Some(slot) = self.slots.get(index) {
1189            debug_assert_eq!(
1190                SlotKind::Group,
1191                slot.kind(),
1192                "slot kind mismatch at {}",
1193                index
1194            );
1195            if let Slot::Group { key, len, .. } = *slot {
1196                let frame = GroupFrame {
1197                    key,
1198                    start: index,
1199                    end: index + len,
1200                    force_children_recompose: false,
1201                };
1202                self.group_stack.push(frame);
1203                self.cursor = index + 1;
1204                if self.cursor < self.slots.len()
1205                    && matches!(self.slots.get(self.cursor), Some(Slot::Value { .. }))
1206                {
1207                    self.cursor += 1;
1208                }
1209            }
1210        }
1211    }
1212
1213    fn end_recompose(&mut self) {
1214        if let Some(frame) = self.group_stack.pop() {
1215            self.cursor = frame.end;
1216        }
1217    }
1218
1219    pub fn skip_current(&mut self) {
1220        if let Some(frame) = self.group_stack.last() {
1221            self.cursor = frame.end.min(self.slots.len());
1222        }
1223    }
1224
1225    pub fn node_ids_in_current_group(&self) -> Vec<NodeId> {
1226        let Some(frame) = self.group_stack.last() else {
1227            return Vec::new();
1228        };
1229        let end = frame.end.min(self.slots.len());
1230        self.slots[frame.start..end]
1231            .iter()
1232            .filter_map(|slot| match slot {
1233                Slot::Node { id, .. } => Some(*id),
1234                _ => None,
1235            })
1236            .collect()
1237    }
1238
1239    pub fn use_value_slot<T: 'static>(&mut self, init: impl FnOnce() -> T) -> usize {
1240        self.ensure_capacity();
1241
1242        let cursor = self.cursor;
1243        debug_assert!(
1244            cursor <= self.slots.len(),
1245            "slot cursor {} out of bounds",
1246            cursor
1247        );
1248
1249        if cursor < self.slots.len() {
1250            // Check if we can reuse the existing slot
1251            let reuse = matches!(
1252                self.slots.get(cursor),
1253                Some(Slot::Value { data, .. }) if data.is::<T>()
1254            );
1255            if reuse {
1256                self.cursor = cursor + 1;
1257                self.update_group_bounds();
1258                return cursor;
1259            }
1260
1261            // Check if the slot is a Gap that we can replace
1262            if matches!(self.slots.get(cursor), Some(Slot::Gap { .. })) {
1263                let anchor = self.allocate_anchor();
1264                let boxed: Box<dyn Any> = Box::new(init());
1265                self.slots[cursor] = Slot::Value {
1266                    anchor,
1267                    data: boxed,
1268                };
1269                self.register_anchor(anchor, cursor);
1270                self.cursor = cursor + 1;
1271                self.update_group_bounds();
1272                return cursor;
1273            }
1274
1275            // Type mismatch: replace current slot (mark old content as unreachable via gap)
1276            // We replace in-place to maintain cursor position
1277            let anchor = self.allocate_anchor();
1278            let boxed: Box<dyn Any> = Box::new(init());
1279            self.replace_slot_deferred(
1280                cursor,
1281                Slot::Value {
1282                    anchor,
1283                    data: boxed,
1284                },
1285            );
1286            self.register_anchor(anchor, cursor);
1287            self.cursor = cursor + 1;
1288            self.update_group_bounds();
1289            return cursor;
1290        }
1291
1292        // We're at the end of the slot table, append new slot
1293        let anchor = self.allocate_anchor();
1294        let boxed: Box<dyn Any> = Box::new(init());
1295        let slot = Slot::Value {
1296            anchor,
1297            data: boxed,
1298        };
1299        self.slots.push(slot);
1300        self.register_anchor(anchor, cursor);
1301        self.cursor = cursor + 1;
1302        self.update_group_bounds();
1303        cursor
1304    }
1305
1306    pub fn read_value<T: 'static>(&self, idx: usize) -> &T {
1307        let slot = self
1308            .slots
1309            .get(idx)
1310            .unwrap_or_else(|| panic!("slot index {} out of bounds", idx));
1311        debug_assert_eq!(
1312            SlotKind::Value,
1313            slot.kind(),
1314            "slot kind mismatch at {}",
1315            idx
1316        );
1317        slot.as_value()
1318    }
1319
1320    pub fn read_value_mut<T: 'static>(&mut self, idx: usize) -> &mut T {
1321        let slot = self
1322            .slots
1323            .get_mut(idx)
1324            .unwrap_or_else(|| panic!("slot index {} out of bounds", idx));
1325        debug_assert_eq!(
1326            SlotKind::Value,
1327            slot.kind(),
1328            "slot kind mismatch at {}",
1329            idx
1330        );
1331        slot.as_value_mut()
1332    }
1333
1334    pub fn write_value<T: 'static>(&mut self, idx: usize, value: T) {
1335        if idx >= self.slots.len() {
1336            panic!("attempted to write slot {} out of bounds", idx);
1337        }
1338        let slot = &mut self.slots[idx];
1339        debug_assert_eq!(
1340            SlotKind::Value,
1341            slot.kind(),
1342            "slot kind mismatch at {}",
1343            idx
1344        );
1345        // Preserve the anchor when replacing the value
1346        let anchor = slot.anchor_id();
1347        *slot = Slot::Value {
1348            anchor,
1349            data: Box::new(value),
1350        };
1351    }
1352
1353    /// Read a value slot by its anchor ID.
1354    /// Provides stable access even if the slot's position changes.
1355    pub fn read_value_by_anchor<T: 'static>(&self, anchor: AnchorId) -> Option<&T> {
1356        let idx = self.resolve_anchor(anchor)?;
1357        Some(self.read_value(idx))
1358    }
1359
1360    /// Read a mutable value slot by its anchor ID.
1361    pub fn read_value_mut_by_anchor<T: 'static>(&mut self, anchor: AnchorId) -> Option<&mut T> {
1362        let idx = self.resolve_anchor(anchor)?;
1363        Some(self.read_value_mut(idx))
1364    }
1365
1366    pub fn remember<T: 'static>(&mut self, init: impl FnOnce() -> T) -> Owned<T> {
1367        let index = self.use_value_slot(|| Owned::new(init()));
1368        self.read_value::<Owned<T>>(index).clone()
1369    }
1370
1371    /// Remember a value and return both its index and anchor ID.
1372    /// The anchor provides stable access even if the slot's position changes.
1373    pub fn remember_with_anchor<T: 'static>(
1374        &mut self,
1375        init: impl FnOnce() -> T,
1376    ) -> (usize, AnchorId) {
1377        let index = self.use_value_slot(|| Owned::new(init()));
1378        let anchor = self
1379            .slots
1380            .get(index)
1381            .map(|slot| slot.anchor_id())
1382            .unwrap_or(AnchorId::INVALID);
1383        (index, anchor)
1384    }
1385
1386    pub fn record_node(&mut self, id: NodeId) {
1387        self.ensure_capacity();
1388
1389        let cursor = self.cursor;
1390        debug_assert!(
1391            cursor <= self.slots.len(),
1392            "slot cursor {} out of bounds",
1393            cursor
1394        );
1395        if cursor < self.slots.len() {
1396            // Check if we can reuse the existing node slot
1397            if let Some(Slot::Node { id: existing, .. }) = self.slots.get(cursor) {
1398                if *existing == id {
1399                    self.cursor = cursor + 1;
1400                    self.update_group_bounds();
1401                    return;
1402                }
1403            }
1404
1405            // Check if the slot is a Gap that we can replace
1406            if matches!(self.slots.get(cursor), Some(Slot::Gap { .. })) {
1407                let anchor = self.allocate_anchor();
1408                self.slots[cursor] = Slot::Node { anchor, id };
1409                self.register_anchor(anchor, cursor);
1410                self.cursor = cursor + 1;
1411                self.update_group_bounds();
1412                return;
1413            }
1414
1415            // Type mismatch: Replace the slot directly with the new node
1416            // For nodes, we can't use gaps because nodes exist in the applier
1417            // The old node becomes orphaned and will be garbage collected later
1418            let anchor = self.allocate_anchor();
1419            self.replace_slot_deferred(cursor, Slot::Node { anchor, id });
1420            self.register_anchor(anchor, cursor);
1421            self.cursor = cursor + 1;
1422            self.update_group_bounds();
1423            return;
1424        }
1425
1426        // No existing slot at cursor: add new slot
1427        let anchor = self.allocate_anchor();
1428        let slot = Slot::Node { anchor, id };
1429        self.slots.push(slot);
1430        self.register_anchor(anchor, cursor);
1431        self.cursor = cursor + 1;
1432        self.update_group_bounds();
1433    }
1434
1435    pub fn peek_node(&self) -> Option<NodeId> {
1436        let cursor = self.cursor;
1437        debug_assert!(
1438            cursor <= self.slots.len(),
1439            "slot cursor {} out of bounds",
1440            cursor
1441        );
1442        match self.slots.get(cursor) {
1443            Some(Slot::Node { id, .. }) => Some(*id),
1444            Some(_slot) => None,
1445            None => None,
1446        }
1447    }
1448
1449    pub fn read_node(&mut self) -> Option<NodeId> {
1450        let cursor = self.cursor;
1451        debug_assert!(
1452            cursor <= self.slots.len(),
1453            "slot cursor {} out of bounds",
1454            cursor
1455        );
1456        let node = match self.slots.get(cursor) {
1457            Some(Slot::Node { id, .. }) => Some(*id),
1458            Some(_slot) => None,
1459            None => None,
1460        };
1461        if node.is_some() {
1462            self.cursor = cursor + 1;
1463            self.update_group_bounds();
1464        }
1465        node
1466    }
1467
1468    pub fn advance_after_node_read(&mut self) {
1469        self.cursor += 1;
1470        self.update_group_bounds();
1471    }
1472
1473    pub fn reset(&mut self) {
1474        self.cursor = 0;
1475        self.group_stack.clear();
1476    }
1477
1478    /// Step the cursor back by one position.
1479    /// Used when we need to replace a slot that was just read but turned out to be incompatible.
1480    pub fn step_back(&mut self) {
1481        debug_assert!(self.cursor > 0, "Cannot step back from cursor 0");
1482        self.cursor = self.cursor.saturating_sub(1);
1483    }
1484
1485    /// Trim slots by marking unreachable slots as gaps.
1486    ///
1487    /// Instead of blindly truncating at cursor position, this method:
1488    /// 1. Marks slots from cursor to end of current group as gaps
1489    /// 2. Keeps the group length unchanged (gaps are part of the group's physical extent)
1490    /// 3. Preserves sibling components outside the current group
1491    ///
1492    /// This ensures effect states (LaunchedEffect, etc.) are preserved even when
1493    /// conditional rendering changes the composition structure.
1494    ///
1495    /// Key insight: Gap slots remain part of the group's physical length. The group's
1496    /// `len` field represents its physical extent in the slots array, not the count of
1497    /// active slots. This allows gap slots to be found and reused in subsequent compositions.
1498    pub fn trim_to_cursor(&mut self) -> bool {
1499        let mut marked = false;
1500        if let Some((owner_start, group_end)) = self
1501            .group_stack
1502            .last()
1503            .map(|frame| (frame.start, frame.end.min(self.slots.len())))
1504        {
1505            // Mark unreachable slots within this group as gaps
1506            if self.cursor < group_end
1507                && self.mark_range_as_gaps(self.cursor, group_end, Some(owner_start))
1508            {
1509                marked = true;
1510            }
1511
1512            // Update the frame end to current cursor
1513            // NOTE: We do NOT update the group's len field, because gap slots
1514            // are still part of the group's physical extent in the slots array.
1515            // The group len should remain unchanged so that traversal can find the gaps.
1516            if let Some(frame) = self.group_stack.last_mut() {
1517                frame.end = self.cursor;
1518            }
1519        } else if self.cursor < self.slots.len() {
1520            // If there's no group stack, we're at the root level
1521            // Mark everything beyond cursor as gaps
1522            if self.mark_range_as_gaps(self.cursor, self.slots.len(), None) {
1523                marked = true;
1524            }
1525        }
1526        self.flush_pending_slot_drops();
1527        marked
1528    }
1529}
1530
1531// ═══════════════════════════════════════════════════════════════════════════
1532// SlotStorage implementation for SlotTable
1533// ═══════════════════════════════════════════════════════════════════════════
1534
1535/// Baseline SlotStorage implementation using a gap-buffer strategy.
1536///
1537/// This is the reference / most-feature-complete backend, supporting:
1538/// - Gap-based slot reuse (preserving sibling state during conditional rendering)
1539/// - Anchor-based positional stability during group moves and insertions
1540/// - Efficient group skipping and recomposition via scope-based entry
1541/// - Batch anchor rebuilding for large structural changes
1542///
1543/// **Implementation Strategy:**
1544/// Uses UFCS (Uniform Function Call Syntax) to delegate to SlotTable's
1545/// inherent methods, avoiding infinite recursion while keeping the trait
1546/// implementation clean.
1547impl SlotStorage for SlotTable {
1548    type Group = GroupId;
1549    type ValueSlot = ValueSlotId;
1550
1551    fn begin_group(&mut self, key: Key) -> StartGroup<Self::Group> {
1552        let idx = SlotTable::start(self, key);
1553        let restored = SlotTable::take_last_start_was_gap(self);
1554        StartGroup {
1555            group: GroupId(idx),
1556            restored_from_gap: restored,
1557        }
1558    }
1559
1560    fn set_group_scope(&mut self, group: Self::Group, scope: ScopeId) {
1561        SlotTable::set_group_scope(self, group.0, scope);
1562    }
1563
1564    fn end_group(&mut self) {
1565        SlotTable::end(self);
1566    }
1567
1568    fn skip_current_group(&mut self) {
1569        SlotTable::skip_current(self);
1570    }
1571
1572    fn nodes_in_current_group(&self) -> Vec<NodeId> {
1573        SlotTable::node_ids_in_current_group(self)
1574    }
1575
1576    fn begin_recranpose_at_scope(&mut self, scope: ScopeId) -> Option<Self::Group> {
1577        SlotTable::start_recranpose_at_scope(self, scope).map(GroupId)
1578    }
1579
1580    fn end_recompose(&mut self) {
1581        SlotTable::end_recompose(self);
1582    }
1583
1584    fn alloc_value_slot<T: 'static>(&mut self, init: impl FnOnce() -> T) -> Self::ValueSlot {
1585        let idx = SlotTable::use_value_slot(self, init);
1586        ValueSlotId(idx)
1587    }
1588
1589    fn read_value<T: 'static>(&self, slot: Self::ValueSlot) -> &T {
1590        SlotTable::read_value(self, slot.0)
1591    }
1592
1593    fn read_value_mut<T: 'static>(&mut self, slot: Self::ValueSlot) -> &mut T {
1594        SlotTable::read_value_mut(self, slot.0)
1595    }
1596
1597    fn write_value<T: 'static>(&mut self, slot: Self::ValueSlot, value: T) {
1598        SlotTable::write_value(self, slot.0, value);
1599    }
1600
1601    fn remember<T: 'static>(&mut self, init: impl FnOnce() -> T) -> Owned<T> {
1602        SlotTable::remember(self, init)
1603    }
1604
1605    fn peek_node(&self) -> Option<NodeId> {
1606        SlotTable::peek_node(self)
1607    }
1608
1609    fn record_node(&mut self, id: NodeId) {
1610        SlotTable::record_node(self, id);
1611    }
1612
1613    fn advance_after_node_read(&mut self) {
1614        SlotTable::advance_after_node_read(self);
1615    }
1616
1617    fn step_back(&mut self) {
1618        SlotTable::step_back(self);
1619    }
1620
1621    fn finalize_current_group(&mut self) -> bool {
1622        SlotTable::trim_to_cursor(self)
1623    }
1624
1625    fn reset(&mut self) {
1626        SlotTable::reset(self);
1627    }
1628
1629    fn flush(&mut self) {
1630        SlotTable::flush_anchors_if_dirty(self);
1631    }
1632}
1633
1634impl Drop for SlotTable {
1635    fn drop(&mut self) {
1636        self.flush_pending_slot_drops();
1637        drop_slots_in_reverse(&mut self.slots);
1638    }
1639}