Skip to main content

cranpose_core/
chunked_slot_storage.rs

1//! Chunked slot storage backend that avoids large rotate operations.
2//!
3//! This backend divides the slot array into fixed-size chunks (256 slots each),
4//! allowing insertions and deletions to only shift slots within or between
5//! adjacent chunks rather than rotating the entire storage. This improves
6
7// Complex slot state machine logic benefits from explicit nested pattern matching for clarity
8#![allow(clippy::collapsible_match)]
9//! performance for large compositions with frequent insertions.
10//!
11//! ## Implementation Details
12//!
13//! - **Chunk size**: Fixed at 256 slots per chunk for optimal balance between
14//!   overhead and shift performance.
15//! - **Insertion strategy**: When inserting at a non-gap position, finds the
16//!   nearest gap and shifts slots to make room. Falls back to overwrite if no
17//!   gap is found nearby.
18//! - **Gap restoration**: When beginning a group at a gap slot with matching key,
19//!   restores the group with its preserved length and scope, setting
20//!   `force_children_recompose = true`.
21//! - **Anchor lifecycle**:
22//!   1. Created with `alloc_anchor()` when a slot is allocated
23//!   2. Marked dirty via `anchors_dirty = true` when slots are shifted
24//!   3. Rebuilt via `rebuild_anchors()` during `flush()`
25//!
26//! ## Trade-offs
27//!
28//! - **Pros**: Better insertion performance for large compositions
29//! - **Cons**: Higher memory overhead (fixed chunk sizes), more complex indexing
30
31use crate::{
32    slot_storage::{GroupId, SlotStorage, StartGroup, ValueSlotId},
33    AnchorId, Key, NodeId, Owned, ScopeId,
34};
35use std::any::Any;
36use std::cell::Cell;
37
38/// Size of each chunk in slots. Tuned for balance between chunk overhead
39/// and shift performance.
40const CHUNK_SIZE: usize = 256;
41
42/// Chunked slot storage implementation.
43///
44/// Uses a Vec of fixed-size chunks to store slots, avoiding the O(n) rotate
45/// operations needed when inserting near the start of a large flat Vec.
46#[derive(Default)]
47pub struct ChunkedSlotStorage {
48    /// Storage chunks, each up to CHUNK_SIZE slots.
49    chunks: Vec<Vec<ChunkedSlot>>,
50    /// Global cursor position (linear index across all chunks).
51    cursor: usize,
52    /// Group stack tracking current composition nesting.
53    group_stack: Vec<GroupFrame>,
54    /// Anchor ID → global slot position mapping.
55    anchors: Vec<usize>,
56    /// Whether anchors need rebuilding.
57    anchors_dirty: bool,
58    /// Counter for allocating unique anchor IDs.
59    next_anchor_id: Cell<usize>,
60    /// Tracks whether the most recent begin_group reused a gap.
61    last_start_was_gap: bool,
62}
63
64struct GroupFrame {
65    #[allow(dead_code)] // Tracked for debugging/future inspection tools
66    key: Key,
67    start: usize,
68    end: usize,
69    #[allow(dead_code)] // Tracked for debugging/future recomposition heuristics
70    force_children_recompose: bool,
71}
72
73enum ChunkedSlot {
74    Group {
75        key: Key,
76        anchor: AnchorId,
77        len: usize,
78        scope: Option<ScopeId>,
79        has_gap_children: bool,
80    },
81    Value {
82        anchor: AnchorId,
83        data: Box<dyn Any>,
84    },
85    Node {
86        anchor: AnchorId,
87        id: NodeId,
88    },
89    Gap {
90        anchor: AnchorId,
91        group_key: Option<Key>,
92        group_scope: Option<ScopeId>,
93        group_len: usize,
94    },
95}
96
97impl ChunkedSlot {
98    fn anchor_id(&self) -> AnchorId {
99        match self {
100            ChunkedSlot::Group { anchor, .. } => *anchor,
101            ChunkedSlot::Value { anchor, .. } => *anchor,
102            ChunkedSlot::Node { anchor, .. } => *anchor,
103            ChunkedSlot::Gap { anchor, .. } => *anchor,
104        }
105    }
106
107    fn as_value<T: 'static>(&self) -> &T {
108        match self {
109            ChunkedSlot::Value { data, .. } => data
110                .downcast_ref::<T>()
111                .expect("slot value type mismatch: requested type does not match stored type"),
112            _ => panic!(
113                "slot is not a value: expected Value variant, got {:?}",
114                std::mem::discriminant(self)
115            ),
116        }
117    }
118
119    fn as_value_mut<T: 'static>(&mut self) -> &mut T {
120        match self {
121            ChunkedSlot::Value { data, .. } => data
122                .downcast_mut::<T>()
123                .expect("slot value type mismatch: requested type does not match stored type"),
124            _ => panic!("slot is not a value: expected Value variant"),
125        }
126    }
127}
128
129impl Default for ChunkedSlot {
130    fn default() -> Self {
131        ChunkedSlot::Gap {
132            anchor: AnchorId::INVALID,
133            group_key: None,
134            group_scope: None,
135            group_len: 0,
136        }
137    }
138}
139
140impl ChunkedSlotStorage {
141    pub fn new() -> Self {
142        Self {
143            next_anchor_id: Cell::new(1), // Start at 1 (0 is INVALID)
144            ..Default::default()
145        }
146    }
147
148    /// Get total number of slots across all chunks.
149    fn total_slots(&self) -> usize {
150        self.chunks.iter().map(|c| c.len()).sum()
151    }
152
153    /// Convert global index to (chunk_index, offset).
154    fn global_to_chunk(&self, global: usize) -> (usize, usize) {
155        let mut remaining = global;
156        for (chunk_idx, chunk) in self.chunks.iter().enumerate() {
157            if remaining < chunk.len() {
158                return (chunk_idx, remaining);
159            }
160            remaining -= chunk.len();
161        }
162        // Past the end
163        (self.chunks.len(), 0)
164    }
165
166    /// Get a reference to the slot at global index.
167    fn get_slot(&self, global: usize) -> Option<&ChunkedSlot> {
168        let (chunk_idx, offset) = self.global_to_chunk(global);
169        self.chunks.get(chunk_idx)?.get(offset)
170    }
171
172    /// Get a mutable reference to the slot at global index.
173    fn get_slot_mut(&mut self, global: usize) -> Option<&mut ChunkedSlot> {
174        let (chunk_idx, offset) = self.global_to_chunk(global);
175        self.chunks.get_mut(chunk_idx)?.get_mut(offset)
176    }
177
178    /// Allocate a new anchor ID.
179    fn alloc_anchor(&self) -> AnchorId {
180        let id = self.next_anchor_id.get();
181        self.next_anchor_id.set(id + 1);
182        AnchorId::new(id)
183    }
184
185    /// Ensure capacity at cursor by adding gap slots if needed.
186    fn ensure_capacity(&mut self) {
187        if self.chunks.is_empty() {
188            // Initialize first chunk
189            let mut chunk = Vec::with_capacity(CHUNK_SIZE);
190            chunk.resize_with(CHUNK_SIZE, ChunkedSlot::default);
191            self.chunks.push(chunk);
192        }
193
194        let total = self.total_slots();
195        if self.cursor >= total {
196            // Need more chunks
197            let mut chunk = Vec::with_capacity(CHUNK_SIZE);
198            chunk.resize_with(CHUNK_SIZE, ChunkedSlot::default);
199            self.chunks.push(chunk);
200        }
201    }
202
203    /// Update all group frame bounds to include the current cursor position.
204    /// This ensures frames grow as we allocate more content than initially expected.
205    fn update_group_bounds(&mut self) {
206        for frame in &mut self.group_stack {
207            if frame.end < self.cursor {
208                frame.end = self.cursor;
209            }
210        }
211    }
212
213    /// Insert a slot at the cursor position, shifting within/between chunks.
214    /// This implements proper insertion semantics: if the target is a gap, reuse it;
215    /// otherwise, shift slots to make room.
216    fn insert_at_cursor(&mut self, slot: ChunkedSlot) {
217        self.ensure_capacity();
218
219        // Fast path: if current slot is a gap (any gap, regardless of anchor), reuse it
220        if let Some(existing) = self.get_slot(self.cursor) {
221            if matches!(existing, ChunkedSlot::Gap { .. }) {
222                *self
223                    .get_slot_mut(self.cursor)
224                    .expect("insert_at_cursor: slot must exist after ensure_capacity") = slot;
225                // Update group end to account for this slot
226                if let Some(frame) = self.group_stack.last_mut() {
227                    if self.cursor >= frame.end {
228                        frame.end = self.cursor + 1;
229                    }
230                }
231                self.anchors_dirty = true;
232                return;
233            }
234        }
235
236        // Need to insert: find a gap to the right and shift slots
237        if let Some(gap_pos) = self.find_gap_near_cursor() {
238            // Shift slots from cursor to gap_pos-1 to the right by 1
239            // Use shift_across_chunks for all cases (handles both same-chunk and cross-chunk)
240            self.shift_across_chunks(self.cursor, gap_pos);
241
242            // Now insert at cursor
243            *self
244                .get_slot_mut(self.cursor)
245                .expect("insert_at_cursor: slot must exist after shift") = slot;
246
247            // Update all group frames that overlap with the shifted region.
248            // This assumes frames are in creation order and fully cover their ranges.
249            // Frames starting at or after cursor: shift both start and end
250            // Frames containing cursor: extend end only
251            for frame in &mut self.group_stack {
252                if frame.start >= self.cursor {
253                    frame.start += 1;
254                    frame.end += 1;
255                } else if frame.end > self.cursor {
256                    frame.end += 1;
257                }
258            }
259
260            self.anchors_dirty = true;
261        } else {
262            // No gap nearby: just overwrite (fallback behavior)
263            if self.cursor < self.total_slots() {
264                *self
265                    .get_slot_mut(self.cursor)
266                    .expect("insert_at_cursor: slot must exist for overwrite") = slot;
267                if let Some(frame) = self.group_stack.last_mut() {
268                    if self.cursor >= frame.end {
269                        frame.end = self.cursor + 1;
270                    }
271                }
272                self.anchors_dirty = true;
273            }
274        }
275    }
276
277    /// Shift slots across chunks from start to end (exclusive).
278    /// Ensures capacity exists for all destination positions before shifting.
279    fn shift_across_chunks(&mut self, start: usize, end: usize) {
280        // Ensure we have capacity for the rightmost destination (end position)
281        while self.total_slots() <= end {
282            let mut chunk = Vec::with_capacity(CHUNK_SIZE);
283            chunk.resize_with(CHUNK_SIZE, ChunkedSlot::default);
284            self.chunks.push(chunk);
285        }
286
287        // Move slots one by one from end-1 down to start using mem::replace
288        for i in (start..end).rev() {
289            let (src_chunk, src_offset) = self.global_to_chunk(i);
290            let (dst_chunk, dst_offset) = self.global_to_chunk(i + 1);
291
292            // Use mem::replace to move without cloning
293            let temp = std::mem::take(&mut self.chunks[src_chunk][src_offset]);
294            // Capacity is guaranteed by the loop above
295            self.chunks[dst_chunk][dst_offset] = temp;
296        }
297    }
298
299    /// Rebuild anchor positions by scanning all slots.
300    fn rebuild_anchors(&mut self) {
301        if !self.anchors_dirty {
302            return;
303        }
304
305        // Clear existing anchor map
306        for pos in self.anchors.iter_mut() {
307            *pos = usize::MAX;
308        }
309
310        // Scan all slots and update anchor positions
311        let mut global_idx = 0;
312        for chunk in &self.chunks {
313            for slot in chunk {
314                let anchor = slot.anchor_id();
315                if anchor.is_valid() {
316                    let id = anchor.0;
317                    if id >= self.anchors.len() {
318                        self.anchors.resize(id + 1, usize::MAX);
319                    }
320                    self.anchors[id] = global_idx;
321                }
322                global_idx += 1;
323            }
324        }
325
326        self.anchors_dirty = false;
327    }
328
329    /// Lookup the current position of an anchor ID.
330    /// Returns None if the anchor is not found or invalid.
331    #[allow(dead_code)]
332    fn lookup_anchor_position(&self, anchor_id: usize) -> Option<usize> {
333        if anchor_id < self.anchors.len() {
334            let pos = self.anchors[anchor_id];
335            if pos != usize::MAX {
336                return Some(pos);
337            }
338        }
339        None
340    }
341
342    /// Find a gap slot near the cursor.
343    /// Accepts any gap slot (structural or finalized) for reuse.
344    fn find_gap_near_cursor(&self) -> Option<usize> {
345        // Look forward from cursor
346        const SCAN_LIMIT: usize = 128;
347        for offset in 0..SCAN_LIMIT {
348            let pos = self.cursor + offset;
349            if let Some(slot) = self.get_slot(pos) {
350                // Accept any gap for reuse (regardless of anchor value)
351                if matches!(slot, ChunkedSlot::Gap { .. }) {
352                    return Some(pos);
353                }
354            }
355        }
356        None
357    }
358
359    /// Start a new group at the cursor.
360    fn start_group(&mut self, key: Key) -> (usize, bool) {
361        self.ensure_capacity();
362
363        // Check if current slot is already a group with matching key - reuse it
364        if let Some(slot) = self.get_slot(self.cursor) {
365            if let ChunkedSlot::Group {
366                key: existing_key,
367                len,
368                has_gap_children,
369                ..
370            } = slot
371            {
372                if *existing_key == key {
373                    // Reuse existing group
374                    let group_len = *len;
375                    let had_gap_children = *has_gap_children;
376
377                    // Clear gap children flag if present
378                    if had_gap_children {
379                        if let Some(ChunkedSlot::Group {
380                            has_gap_children: ref mut flag,
381                            ..
382                        }) = self.get_slot_mut(self.cursor)
383                        {
384                            *flag = false;
385                        }
386                    }
387
388                    let start = self.cursor;
389                    self.cursor += 1;
390                    self.group_stack.push(GroupFrame {
391                        key,
392                        start,
393                        end: start + group_len + 1,
394                        force_children_recompose: had_gap_children,
395                    });
396                    self.update_group_bounds();
397                    self.last_start_was_gap = false;
398                    return (start, false);
399                }
400            }
401        }
402
403        // Check if current slot is a gap group we can restore
404        if let Some(slot) = self.get_slot(self.cursor) {
405            if let ChunkedSlot::Gap {
406                group_key: Some(gap_key),
407                group_scope,
408                group_len,
409                anchor: gap_anchor,
410            } = slot
411            {
412                if *gap_key == key {
413                    // Restore the gap group, reusing the old anchor or creating new one
414                    let anchor = if gap_anchor.is_valid() {
415                        *gap_anchor
416                    } else {
417                        self.alloc_anchor()
418                    };
419                    let scope = *group_scope;
420                    let len = *group_len;
421                    *self
422                        .get_slot_mut(self.cursor)
423                        .expect("start_group: slot must exist for gap restoration") =
424                        ChunkedSlot::Group {
425                            key,
426                            anchor,
427                            len,
428                            scope,
429                            has_gap_children: true,
430                        };
431
432                    let start = self.cursor;
433                    self.cursor += 1;
434                    // Set frame.end to start + len + 1 to account for the group slot itself
435                    // The group slot at `start` contains children from start+1 to start+len
436                    self.group_stack.push(GroupFrame {
437                        key,
438                        start,
439                        end: start + len + 1,
440                        force_children_recompose: true,
441                    });
442                    self.update_group_bounds();
443                    self.last_start_was_gap = true;
444                    self.anchors_dirty = true;
445                    return (start, true);
446                }
447            }
448        }
449
450        // Create new group
451        let anchor = self.alloc_anchor();
452        let slot = ChunkedSlot::Group {
453            key,
454            anchor,
455            len: 0,
456            scope: None,
457            has_gap_children: false,
458        };
459
460        self.insert_at_cursor(slot);
461        let start = self.cursor;
462        self.cursor += 1;
463        self.group_stack.push(GroupFrame {
464            key,
465            start,
466            end: start,
467            force_children_recompose: false,
468        });
469        self.update_group_bounds();
470        self.last_start_was_gap = false;
471        (start, false)
472    }
473
474    /// End the current group (internal implementation).
475    fn do_end_group(&mut self) {
476        if let Some(frame) = self.group_stack.pop() {
477            let len = self.cursor.saturating_sub(frame.start + 1);
478            if let Some(slot) = self.get_slot_mut(frame.start) {
479                if let ChunkedSlot::Group { len: slot_len, .. } = slot {
480                    *slot_len = len;
481                }
482            }
483        }
484    }
485
486    /// Skip over the current group (internal implementation).
487    fn do_skip_current_group(&mut self) {
488        if let Some(slot) = self.get_slot(self.cursor) {
489            if let ChunkedSlot::Group { len, .. } = slot {
490                self.cursor += 1 + len;
491            }
492        }
493    }
494
495    /// Finalize current group by marking unreached tail as gaps (internal implementation).
496    fn do_finalize_current_group(&mut self) -> bool {
497        let frame_end = match self.group_stack.last() {
498            Some(frame) => frame.end,
499            None => {
500                // Root-level finalization: mark everything from cursor to end as gaps
501                let total = self.total_slots();
502                if self.cursor >= total {
503                    return false;
504                }
505                let mut marked = false;
506                while self.cursor < total {
507                    if let Some(slot) = self.get_slot_mut(self.cursor) {
508                        let anchor = slot.anchor_id();
509                        let (group_key, group_scope, group_len) = match slot {
510                            ChunkedSlot::Group {
511                                key, scope, len, ..
512                            } => (Some(*key), *scope, *len),
513                            _ => (None, None, 0),
514                        };
515                        *slot = ChunkedSlot::Gap {
516                            anchor,
517                            group_key,
518                            group_scope,
519                            group_len,
520                        };
521                        marked = true;
522                    }
523                    self.cursor += 1;
524                }
525                // Mark anchors dirty so flush() rebuilds the anchor map
526                self.anchors_dirty = true;
527                return marked;
528            }
529        };
530
531        let mut marked = false;
532        while self.cursor < frame_end {
533            if let Some(slot) = self.get_slot_mut(self.cursor) {
534                // Convert to gap
535                let anchor = slot.anchor_id();
536                let (group_key, group_scope, group_len) = match slot {
537                    ChunkedSlot::Group {
538                        key, scope, len, ..
539                    } => (Some(*key), *scope, *len),
540                    _ => (None, None, 0),
541                };
542                *slot = ChunkedSlot::Gap {
543                    anchor,
544                    group_key,
545                    group_scope,
546                    group_len,
547                };
548                marked = true;
549            }
550            self.cursor += 1;
551        }
552
553        if let Some(frame) = self.group_stack.last_mut() {
554            frame.end = self.cursor;
555        }
556        marked
557    }
558}
559
560impl SlotStorage for ChunkedSlotStorage {
561    type Group = GroupId;
562    type ValueSlot = ValueSlotId;
563
564    fn begin_group(&mut self, key: Key) -> StartGroup<Self::Group> {
565        let (idx, restored) = self.start_group(key);
566        StartGroup {
567            group: GroupId::new(idx),
568            restored_from_gap: restored,
569        }
570    }
571
572    fn set_group_scope(&mut self, group: Self::Group, scope: ScopeId) {
573        if let Some(slot) = self.get_slot_mut(group.index()) {
574            if let ChunkedSlot::Group {
575                scope: slot_scope, ..
576            } = slot
577            {
578                *slot_scope = Some(scope);
579            }
580        }
581    }
582
583    fn end_group(&mut self) {
584        self.do_end_group();
585    }
586
587    fn skip_current_group(&mut self) {
588        self.do_skip_current_group();
589    }
590
591    fn nodes_in_current_group(&self) -> Vec<NodeId> {
592        // Scan current group for nodes
593        let mut nodes = Vec::new();
594        if let Some(frame) = self.group_stack.last() {
595            for pos in (frame.start + 1)..frame.end {
596                if let Some(ChunkedSlot::Node { id, .. }) = self.get_slot(pos) {
597                    nodes.push(*id);
598                }
599            }
600        }
601        nodes
602    }
603
604    fn begin_recranpose_at_scope(&mut self, scope: ScopeId) -> Option<Self::Group> {
605        // Linear scan to find group with this scope
606        for global_idx in 0..self.total_slots() {
607            if let Some(ChunkedSlot::Group { scope: Some(s), .. }) = self.get_slot(global_idx) {
608                if *s == scope {
609                    self.cursor = global_idx;
610                    return Some(GroupId::new(global_idx));
611                }
612            }
613        }
614        None
615    }
616
617    fn end_recompose(&mut self) {
618        // No-op for chunked storage
619    }
620
621    fn alloc_value_slot<T: 'static>(&mut self, init: impl FnOnce() -> T) -> Self::ValueSlot {
622        self.ensure_capacity();
623
624        // Check if current slot is a reusable value slot
625        if let Some(ChunkedSlot::Value { data, .. }) = self.get_slot(self.cursor) {
626            if data.is::<T>() {
627                let slot_id = ValueSlotId::new(self.cursor);
628                self.cursor += 1;
629                return slot_id;
630            }
631        }
632
633        // Create new value slot
634        let anchor = self.alloc_anchor();
635        let slot = ChunkedSlot::Value {
636            anchor,
637            data: Box::new(init()),
638        };
639        self.insert_at_cursor(slot);
640        let slot_id = ValueSlotId::new(self.cursor);
641        self.cursor += 1;
642        self.update_group_bounds();
643        slot_id
644    }
645
646    fn read_value<T: 'static>(&self, slot: Self::ValueSlot) -> &T {
647        self.get_slot(slot.index())
648            .expect("value slot not found")
649            .as_value()
650    }
651
652    fn read_value_mut<T: 'static>(&mut self, slot: Self::ValueSlot) -> &mut T {
653        self.get_slot_mut(slot.index())
654            .expect("value slot not found")
655            .as_value_mut()
656    }
657
658    fn write_value<T: 'static>(&mut self, slot: Self::ValueSlot, value: T) {
659        if let Some(slot_mut) = self.get_slot_mut(slot.index()) {
660            if let ChunkedSlot::Value { data, .. } = slot_mut {
661                *data = Box::new(value);
662            }
663        }
664    }
665
666    fn remember<T: 'static>(&mut self, init: impl FnOnce() -> T) -> Owned<T> {
667        let slot = self.alloc_value_slot(|| Owned::new(init()));
668        self.read_value::<Owned<T>>(slot).clone()
669    }
670
671    fn peek_node(&self) -> Option<NodeId> {
672        if let Some(ChunkedSlot::Node { id, .. }) = self.get_slot(self.cursor) {
673            Some(*id)
674        } else {
675            None
676        }
677    }
678
679    fn record_node(&mut self, id: NodeId) {
680        self.ensure_capacity();
681        let anchor = self.alloc_anchor();
682        let slot = ChunkedSlot::Node { anchor, id };
683        self.insert_at_cursor(slot);
684        self.cursor += 1;
685    }
686
687    fn advance_after_node_read(&mut self) {
688        self.cursor += 1;
689    }
690
691    fn step_back(&mut self) {
692        self.cursor = self.cursor.saturating_sub(1);
693    }
694
695    fn finalize_current_group(&mut self) -> bool {
696        self.do_finalize_current_group()
697    }
698
699    fn reset(&mut self) {
700        self.cursor = 0;
701        self.group_stack.clear();
702    }
703
704    fn flush(&mut self) {
705        self.rebuild_anchors();
706    }
707}
708
709impl ChunkedSlotStorage {
710    /// Debug method to dump all groups.
711    pub fn debug_dump_groups(&self) -> Vec<(usize, Key, Option<ScopeId>, usize)> {
712        let mut result = Vec::new();
713        for global_idx in 0..self.total_slots() {
714            if let Some(ChunkedSlot::Group {
715                key, len, scope, ..
716            }) = self.get_slot(global_idx)
717            {
718                result.push((global_idx, *key, *scope, *len));
719            }
720        }
721        result
722    }
723
724    /// Debug method to dump all slots.
725    pub fn debug_dump_all_slots(&self) -> Vec<(usize, String)> {
726        let mut result = Vec::new();
727        for global_idx in 0..self.total_slots() {
728            let desc = match self.get_slot(global_idx) {
729                Some(ChunkedSlot::Group {
730                    key,
731                    scope,
732                    len,
733                    has_gap_children,
734                    ..
735                }) => {
736                    format!(
737                        "Group(key={}, scope={:?}, len={}, gaps={})",
738                        key, scope, len, has_gap_children
739                    )
740                }
741                Some(ChunkedSlot::Value { .. }) => "Value".to_string(),
742                Some(ChunkedSlot::Node { id, .. }) => format!("Node(id={})", id),
743                Some(ChunkedSlot::Gap {
744                    group_key,
745                    group_scope,
746                    group_len,
747                    ..
748                }) => {
749                    format!(
750                        "Gap(key={:?}, scope={:?}, len={})",
751                        group_key, group_scope, group_len
752                    )
753                }
754                None => "Empty".to_string(),
755            };
756            result.push((global_idx, desc));
757        }
758        result
759    }
760}