cranpose_foundation/lazy/lazy_list_scope.rs
1//! DSL scope for building lazy list content.
2//!
3//! Provides [`LazyListScope`] trait and implementation for the ergonomic
4//! `item {}` / `items {}` API used in `LazyColumn` and `LazyRow`.
5//!
6//! Based on JC's `LazyLayoutIntervalContent` pattern.
7
8use std::cell::RefCell;
9use std::collections::HashMap;
10use std::rc::Rc;
11use std::sync::atomic::{AtomicBool, Ordering};
12
13static USER_OVERFLOW_LOGGED: AtomicBool = AtomicBool::new(false);
14static INDEX_OVERFLOW_LOGGED: AtomicBool = AtomicBool::new(false);
15
16/// Key type for lazy list items.
17///
18/// Separates user-provided keys from default index-based keys to prevent collisions.
19/// This matches JC's `getDefaultLazyLayoutKey()` pattern where a wrapper type
20/// (`DefaultLazyKey`) ensures default keys never collide with user-provided keys.
21///
22/// # JC Reference
23/// - `LazyLayoutIntervalContent.getKey()` returns `content.key?.invoke(localIndex) ?: getDefaultLazyLayoutKey(index)`
24/// - `Lazy.android.kt` defines `DefaultLazyKey(index)` as a wrapper data class
25#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
26pub enum LazyLayoutKey {
27 /// User-provided key (from `scope.item(key: Some(k), ...)` or `scope.items(key: Some(|i| ...), ...)`)
28 User(u64),
29 /// Default key based on global index. Cannot collide with User keys due to enum separation.
30 Index(usize),
31}
32
33impl LazyLayoutKey {
34 /// Tag for user-provided keys: high 2 bits = 00
35 const USER_TAG: u64 = 0b00 << 62;
36 /// Tag for index-based keys: high 2 bits = 01
37 const INDEX_TAG: u64 = 0b01 << 62;
38 /// Mask for the value portion (bits 0-61).
39 /// Based on u64 output type, so this is platform-independent.
40 const VALUE_MASK: u64 = (1u64 << 62) - 1;
41
42 /// Converts to u64 for slot ID usage with guaranteed non-overlapping ranges.
43 ///
44 /// # Encoding
45 /// Uses high 2 bits of the 64-bit slot ID as a type tag:
46 /// - User keys: `0b00` tag + 62-bit value (range: 0x0000... - 0x3FFF...)
47 /// - Index keys: `0b01` tag + 62-bit value (range: 0x4000... - 0x7FFF...)
48 ///
49 /// # ⚠️ Large Key Handling
50 /// Values larger than 62 bits are **mixed down to 62 bits**. This avoids panics
51 /// for extreme indices (e.g. `usize::MAX`) but introduces a small chance of
52 /// collisions for out-of-range keys. Prefer keys that fit in 62 bits when
53 /// you need guaranteed collision-free IDs.
54 ///
55 /// # Cross-Platform Safety
56 /// The slot ID is always `u64` regardless of target platform.
57 #[inline]
58 pub fn to_slot_id(self) -> u64 {
59 match self {
60 // NOTE: Values beyond 62 bits are mixed to preserve stability.
61 LazyLayoutKey::User(k) => {
62 let value = Self::normalize_value(k, "User", &USER_OVERFLOW_LOGGED);
63 Self::USER_TAG | value
64 }
65 LazyLayoutKey::Index(i) => {
66 let value = Self::normalize_value(i as u64, "Index", &INDEX_OVERFLOW_LOGGED);
67 Self::INDEX_TAG | value
68 }
69 }
70 }
71
72 #[inline]
73 fn normalize_value(value: u64, kind: &'static str, logged: &AtomicBool) -> u64 {
74 if value <= Self::VALUE_MASK {
75 value
76 } else {
77 if !logged.swap(true, Ordering::Relaxed) {
78 log::warn!(
79 "LazyList {} key {:#018x} exceeds 62 bits; mixing to 62 bits to avoid overflow",
80 kind,
81 value
82 );
83 }
84 Self::mix_to_value_bits(value)
85 }
86 }
87
88 #[inline]
89 fn mix_to_value_bits(mut value: u64) -> u64 {
90 value ^= value >> 33;
91 value = value.wrapping_mul(0xff51afd7ed558ccd);
92 value ^= value >> 33;
93 value = value.wrapping_mul(0xc4ceb9fe1a85ec53);
94 value ^= value >> 33;
95 value & Self::VALUE_MASK
96 }
97
98 /// Returns true if this is a user-provided key.
99 #[inline]
100 pub fn is_user_key(self) -> bool {
101 matches!(self, LazyLayoutKey::User(_))
102 }
103
104 /// Returns true if this is a default index-based key.
105 #[inline]
106 pub fn is_index_key(self) -> bool {
107 matches!(self, LazyLayoutKey::Index(_))
108 }
109}
110
111/// Marker type for lazy scope DSL.
112#[doc(hidden)]
113pub struct LazyScopeMarker;
114
115/// Receiver scope for lazy list content definition.
116///
117/// Used by [`LazyColumn`] and [`LazyRow`] to define list items.
118/// Matches Jetpack Compose's `LazyListScope`.
119///
120/// # Example
121///
122/// ```rust,ignore
123/// lazy_column(modifier, state, |scope| {
124/// // Single item
125/// scope.item(Some(0), None, || {
126/// Text::new("Header")
127/// });
128///
129/// // Multiple items
130/// scope.items(data.len(), Some(|i| data[i].id), None, |i| {
131/// Text::new(data[i].name.clone())
132/// });
133/// });
134/// ```
135pub trait LazyListScope {
136 /// Adds a single item to the list.
137 ///
138 /// # Arguments
139 /// * `key` - Optional stable key for the item
140 /// * `content_type` - Optional content type for efficient reuse
141 /// * `content` - Closure that emits the item content
142 fn item<F>(&mut self, key: Option<u64>, content_type: Option<u64>, content: F)
143 where
144 F: Fn() + 'static;
145
146 /// Adds multiple items to the list.
147 ///
148 /// # Arguments
149 /// * `count` - Number of items to add
150 /// * `key` - Optional function to generate stable keys from index
151 /// * `content_type` - Optional function to generate content types from index
152 /// * `item_content` - Closure that emits content for each item
153 fn items<K, C, F>(
154 &mut self,
155 count: usize,
156 key: Option<K>,
157 content_type: Option<C>,
158 item_content: F,
159 ) where
160 K: Fn(usize) -> u64 + 'static,
161 C: Fn(usize) -> u64 + 'static,
162 F: Fn(usize) + 'static;
163}
164
165/// Internal representation of a lazy list item interval.
166///
167/// Based on JC's `LazyLayoutIntervalContent.Interval`.
168/// Uses Rc for shared ownership of closures (not Clone).
169pub struct LazyListInterval {
170 /// Start index of this interval in the total item list.
171 pub start_index: usize,
172
173 /// Number of items in this interval.
174 pub count: usize,
175
176 /// Key generator for items in this interval.
177 /// Based on JC's `Interval.key: ((index: Int) -> Any)?`
178 pub key: Option<Rc<dyn Fn(usize) -> u64>>,
179
180 /// Content type generator for items in this interval.
181 /// Based on JC's `Interval.type: ((index: Int) -> Any?)`
182 pub content_type: Option<Rc<dyn Fn(usize) -> u64>>,
183
184 /// Content generator for items in this interval.
185 /// Takes the local index within the interval.
186 pub content: Rc<dyn Fn(usize)>,
187}
188
189impl std::fmt::Debug for LazyListInterval {
190 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
191 f.debug_struct("LazyListInterval")
192 .field("start_index", &self.start_index)
193 .field("count", &self.count)
194 .finish_non_exhaustive()
195 }
196}
197
198/// Builder that collects intervals during scope execution.
199///
200/// Based on JC's `LazyLayoutIntervalContent` with `IntervalList`.
201pub struct LazyListIntervalContent {
202 intervals: Vec<LazyListInterval>,
203 total_count: usize,
204 /// Cached slot_id→index mapping for O(1) key lookups.
205 /// Built lazily on first key lookup, invalidated when content changes.
206 key_cache: RefCell<Option<HashMap<u64, usize>>>,
207}
208
209impl LazyListIntervalContent {
210 /// Creates a new empty interval content.
211 pub fn new() -> Self {
212 Self {
213 intervals: Vec::new(),
214 total_count: 0,
215 key_cache: RefCell::new(None),
216 }
217 }
218
219 /// Invalidates the key cache. Called when content is modified.
220 fn invalidate_cache(&self) {
221 *self.key_cache.borrow_mut() = None;
222 }
223
224 /// Builds the key→index cache for O(1) lookups.
225 ///
226 /// This cache is always built regardless of list size to guarantee O(1) performance.
227 /// Memory usage is approximately 16 bytes per item (slot_id: u64 + index: usize).
228 /// For a list of 100,000 items, this is ~1.6MB of cache memory.
229 fn ensure_cache(&self) {
230 let mut cache = self.key_cache.borrow_mut();
231 if cache.is_some() {
232 return; // Already built
233 }
234
235 let mut map = HashMap::with_capacity(self.total_count);
236 for index in 0..self.total_count {
237 let slot_id = self.get_key(index).to_slot_id();
238 map.insert(slot_id, index);
239 }
240 *cache = Some(map);
241 }
242
243 /// Returns the total number of items across all intervals.
244 /// Matches JC's `LazyLayoutIntervalContent.itemCount`.
245 pub fn item_count(&self) -> usize {
246 self.total_count
247 }
248
249 /// Returns the intervals.
250 pub fn intervals(&self) -> &[LazyListInterval] {
251 &self.intervals
252 }
253
254 /// Gets the key for an item at the given global index.
255 ///
256 /// Returns a [`LazyLayoutKey`] that distinguishes between user-provided keys
257 /// and default index-based keys to prevent collisions.
258 ///
259 /// Matches JC's `LazyLayoutIntervalContent.getKey(index)` pattern.
260 pub fn get_key(&self, index: usize) -> LazyLayoutKey {
261 if let Some((interval, local_index)) = self.find_interval(index) {
262 if let Some(key_fn) = &interval.key {
263 return LazyLayoutKey::User(key_fn(local_index));
264 }
265 }
266 // Default key wraps the index (matches JC's getDefaultLazyLayoutKey)
267 LazyLayoutKey::Index(index)
268 }
269
270 /// Gets the content type for an item at the given global index.
271 /// Matches JC's `LazyLayoutIntervalContent.getContentType(index)`.
272 pub fn get_content_type(&self, index: usize) -> Option<u64> {
273 if let Some((interval, local_index)) = self.find_interval(index) {
274 if let Some(type_fn) = &interval.content_type {
275 return Some(type_fn(local_index));
276 }
277 }
278 None
279 }
280
281 /// Invokes the content closure for an item at the given global index.
282 ///
283 /// Matches JC's `withInterval` pattern where block is called with
284 /// local index and interval content.
285 pub fn invoke_content(&self, index: usize) {
286 if let Some((interval, local_index)) = self.find_interval(index) {
287 (interval.content)(local_index);
288 }
289 }
290
291 /// Executes a block with the interval containing the given global index.
292 /// Matches JC's `withInterval(globalIndex, block)`.
293 pub fn with_interval<T, F>(&self, global_index: usize, block: F) -> Option<T>
294 where
295 F: FnOnce(usize, &LazyListInterval) -> T,
296 {
297 self.find_interval(global_index)
298 .map(|(interval, local_index)| block(local_index, interval))
299 }
300
301 /// Returns the index of an item with the given key, or None if not found.
302 /// Matches JC's `LazyLayoutItemProvider.getIndex(key: Any): Int`.
303 ///
304 /// This is used for scroll position stability - when items are added/removed,
305 /// the scroll position can be maintained by finding the new index of the
306 /// item that was previously at the scroll position (identified by key).
307 ///
308 /// Uses cached HashMap for O(1) lookup when the list has <= 10000 items.
309 /// For larger lists, use [`get_index_by_key_in_range`] with a [`NearestRangeState`].
310 #[must_use]
311 pub fn get_index_by_key(&self, key: LazyLayoutKey) -> Option<usize> {
312 // Convert key to slot_id and use the cache
313 let slot_id = key.to_slot_id();
314 self.get_index_by_slot_id(slot_id)
315 }
316
317 /// Returns the index of an item with the given key, searching only within the range.
318 /// Used with NearestRangeState for O(1) key lookup in large lists.
319 pub fn get_index_by_key_in_range(
320 &self,
321 key: LazyLayoutKey,
322 range: std::ops::Range<usize>,
323 ) -> Option<usize> {
324 let start = range.start.min(self.total_count);
325 let end = range.end.min(self.total_count);
326 (start..end).find(|&index| self.get_key(index) == key)
327 }
328
329 /// Threshold below which linear search is faster than building a HashMap cache.
330 const CACHE_THRESHOLD: usize = 64;
331
332 /// Returns the index of an item with the given slot ID, or None if not found.
333 ///
334 /// This is used for scroll position stability when the stored key is a slot ID (u64).
335 /// Slot IDs are generated by `LazyLayoutKey::to_slot_id()`.
336 ///
337 /// Uses cached HashMap for O(1) lookup on large lists. For small lists (< 64 items),
338 /// uses linear search to avoid HashMap allocation overhead.
339 /// For hot paths during scrolling, prefer [`get_index_by_slot_id_in_range`] first.
340 #[must_use]
341 pub fn get_index_by_slot_id(&self, slot_id: u64) -> Option<usize> {
342 // For small lists, linear search is faster than building/using the cache
343 if self.total_count <= Self::CACHE_THRESHOLD {
344 return (0..self.total_count)
345 .find(|&index| self.get_key(index).to_slot_id() == slot_id);
346 }
347
348 // Try to use cache first (O(1) lookup)
349 self.ensure_cache();
350 if let Some(cache) = self.key_cache.borrow().as_ref() {
351 return cache.get(&slot_id).copied();
352 }
353
354 // Safety fallback: should not happen since ensure_cache always builds the map.
355 log::warn!(
356 "get_index_by_slot_id: cache unexpectedly missing ({} items), using linear search",
357 self.total_count
358 );
359 (0..self.total_count).find(|&index| self.get_key(index).to_slot_id() == slot_id)
360 }
361
362 /// Returns the index of an item with the given slot ID, searching only within the range.
363 pub fn get_index_by_slot_id_in_range(
364 &self,
365 slot_id: u64,
366 range: std::ops::Range<usize>,
367 ) -> Option<usize> {
368 let start = range.start.min(self.total_count);
369 let end = range.end.min(self.total_count);
370 (start..end).find(|&index| self.get_key(index).to_slot_id() == slot_id)
371 }
372
373 /// Finds the interval containing the given global index.
374 /// Returns the interval and the local index within it.
375 /// P2 FIX: Uses binary search for O(log n) instead of linear O(n).
376 fn find_interval(&self, index: usize) -> Option<(&LazyListInterval, usize)> {
377 if self.intervals.is_empty() || index >= self.total_count {
378 return None;
379 }
380
381 // Binary search to find the interval containing this index
382 let pos = self
383 .intervals
384 .partition_point(|interval| interval.start_index + interval.count <= index);
385
386 if pos < self.intervals.len() {
387 let interval = &self.intervals[pos];
388 if index >= interval.start_index && index < interval.start_index + interval.count {
389 let local_index = index - interval.start_index;
390 return Some((interval, local_index));
391 }
392 }
393 None
394 }
395}
396
397impl Default for LazyListIntervalContent {
398 fn default() -> Self {
399 Self::new()
400 }
401}
402
403impl LazyListScope for LazyListIntervalContent {
404 fn item<F>(&mut self, key: Option<u64>, content_type: Option<u64>, content: F)
405 where
406 F: Fn() + 'static,
407 {
408 self.invalidate_cache(); // Content is changing
409 let start_index = self.total_count;
410 self.intervals.push(LazyListInterval {
411 start_index,
412 count: 1,
413 key: key.map(|k| Rc::new(move |_| k) as Rc<dyn Fn(usize) -> u64>),
414 content_type: content_type.map(|t| Rc::new(move |_| t) as Rc<dyn Fn(usize) -> u64>),
415 content: Rc::new(move |_| content()),
416 });
417 self.total_count += 1;
418 }
419
420 fn items<K, C, F>(
421 &mut self,
422 count: usize,
423 key: Option<K>,
424 content_type: Option<C>,
425 item_content: F,
426 ) where
427 K: Fn(usize) -> u64 + 'static,
428 C: Fn(usize) -> u64 + 'static,
429 F: Fn(usize) + 'static,
430 {
431 if count == 0 {
432 return;
433 }
434
435 self.invalidate_cache(); // Content is changing
436 let start_index = self.total_count;
437 self.intervals.push(LazyListInterval {
438 start_index,
439 count,
440 key: key.map(|k| Rc::new(k) as Rc<dyn Fn(usize) -> u64>),
441 content_type: content_type.map(|c| Rc::new(c) as Rc<dyn Fn(usize) -> u64>),
442 content: Rc::new(item_content),
443 });
444 self.total_count += count;
445 }
446}
447
448use crate::lazy::item_provider::LazyLayoutItemProvider;
449
450/// Implements [`LazyLayoutItemProvider`] to formalize the item factory contract.
451/// This provides the same functionality as the existing methods but through
452/// the standardized trait interface.
453impl LazyLayoutItemProvider for LazyListIntervalContent {
454 fn item_count(&self) -> usize {
455 self.total_count
456 }
457
458 fn get_key(&self, index: usize) -> u64 {
459 // Delegate to the existing get_key and convert to slot_id
460 LazyListIntervalContent::get_key(self, index).to_slot_id()
461 }
462
463 fn get_content_type(&self, index: usize) -> Option<u64> {
464 // Delegate to the inherent method which returns Option<u64>
465 LazyListIntervalContent::get_content_type(self, index)
466 }
467
468 fn get_index(&self, key: u64) -> Option<usize> {
469 // Use the cached lookup
470 self.get_index_by_slot_id(key)
471 }
472}
473
474/// Extension trait for adding convenience methods to [`LazyListScope`].
475///
476/// Provides ergonomic APIs for common use cases with different performance tradeoffs:
477///
478/// | Method | Upfront Cost | Use Case |
479/// |--------|--------------|----------|
480/// | [`items_slice`] | O(n) copy | Convenience, small data |
481/// | [`items_slice_rc`] | O(1) | Data already in `Rc<[T]>` |
482/// | [`items_with_provider`] | O(1) | Lazy on-demand access |
483pub trait LazyListScopeExt: LazyListScope {
484 /// Adds items from a slice with an item-aware content closure.
485 ///
486 /// # ⚠️ Performance Warning
487 ///
488 /// **This method performs an O(n) allocation and copy of the entire slice upfront.**
489 ///
490 /// This copy is required to satisfy Rust's `'static` closure requirements for
491 /// the lazy list item factory. For small lists (< 1000 items) this is typically
492 /// acceptable, but for large datasets consider these alternatives:
493 ///
494 /// | Alternative | When to Use |
495 /// |-------------|-------------|
496 /// | [`items_slice_rc`] | Data is already in `Rc<[T]>` - **zero copy** |
497 /// | [`items_vec`] | Data is in a `Vec<T>` you can give up ownership of - **efficient** |
498 /// | [`items_with_provider`] | Need lazy on-demand access - **zero copy** |
499 ///
500 /// After the initial copy, the closure captures a reference-counted pointer,
501 /// so subsequent Rc clones are O(1).
502 ///
503 /// # Example
504 ///
505 /// ```rust,ignore
506 /// let data = vec!["Apple", "Banana", "Cherry"];
507 /// scope.items_slice(&data, |item| {
508 /// Text(item.to_string(), Modifier::empty());
509 /// });
510 /// ```
511 fn items_slice<T, F>(&mut self, items: &[T], item_content: F)
512 where
513 T: Clone + 'static,
514 F: Fn(&T) + 'static,
515 {
516 // Note: to_vec() is O(n) allocation + copy. This is documented above.
517 // For zero-copy, use items_slice_rc() or items_with_provider().
518 let items_rc: Rc<[T]> = items.to_vec().into();
519 self.items(
520 items.len(),
521 None::<fn(usize) -> u64>,
522 None::<fn(usize) -> u64>,
523 move |index| {
524 if let Some(item) = items_rc.get(index) {
525 item_content(item);
526 }
527 },
528 );
529 }
530
531 /// Adds items from a `Vec<T>`, taking ownership.
532 ///
533 /// **Efficient ownership transfer**: Uses `Rc::from(vec)` which avoids copying
534 /// elements if the allocation fits (or does a simple realloc).
535 /// Use this when you have a `Vec` and want to pass it to the list.
536 ///
537 /// # Example
538 ///
539 /// ```rust,ignore
540 /// let data = vec!["Apple".to_string(), "Banana".to_string()];
541 /// scope.items_vec(data, |item| {
542 /// Text(item.to_string(), Modifier::empty());
543 /// });
544 /// ```
545 fn items_vec<T, F>(&mut self, items: Vec<T>, item_content: F)
546 where
547 T: 'static,
548 F: Fn(&T) + 'static,
549 {
550 let len = items.len();
551 let items_rc: Rc<[T]> = Rc::from(items);
552 self.items(
553 len,
554 None::<fn(usize) -> u64>,
555 None::<fn(usize) -> u64>,
556 move |index| {
557 if let Some(item) = items_rc.get(index) {
558 item_content(item);
559 }
560 },
561 );
562 }
563
564 /// Adds indexed items from a collection (Slice, Vec, or Rc).
565 ///
566 /// This method is generic over the input type `L` which must be convertible to `Rc<[T]>`.
567 /// This allows for efficient ownership transfer (zero-copy for `Vec` and `Rc`) or
568 /// convenient usage with slices (which will perform a copy).
569 ///
570 /// # Performance Note
571 ///
572 /// - **`Vec<T>`**: Zero-copy (ownership transfer). Efficient.
573 /// - **`Rc<[T]>`**: Zero-copy (ownership transfer). Efficient.
574 /// - **`&[T]`**: **O(N) copy**. Convenient for small lists, but avoid for large datasets.
575 ///
576 /// # Example
577 ///
578 /// ```rust,ignore
579 /// // Efficient Vec usage (zero-copy)
580 /// let data = vec!["Apple".to_string(), "Banana".to_string()];
581 /// scope.items_indexed(data, |index, item| { ... });
582 ///
583 /// // Slice usage (performs copy)
584 /// let data_slice = &["Apple", "Banana"];
585 /// scope.items_indexed(data_slice, |index, item| { ... });
586 /// ```
587 fn items_indexed<T, L, F>(&mut self, items: L, item_content: F)
588 where
589 T: 'static,
590 L: Into<Rc<[T]>>,
591 F: Fn(usize, &T) + 'static,
592 {
593 let items_rc: Rc<[T]> = items.into();
594 self.items(
595 items_rc.len(),
596 None::<fn(usize) -> u64>,
597 None::<fn(usize) -> u64>,
598 move |index| {
599 if let Some(item) = items_rc.get(index) {
600 item_content(index, item);
601 }
602 },
603 );
604 }
605
606 /// Adds items from a pre-existing `Rc<[T]>` without cloning.
607 ///
608 /// **Zero-copy optimization**: If you already have your data in an `Rc<[T]>`,
609 /// use this method to avoid the O(n) clone that `items_slice` performs.
610 ///
611 /// # Example
612 ///
613 /// ```rust,ignore
614 /// let data: Rc<[String]> = Rc::from(vec!["Apple".into(), "Banana".into()]);
615 /// scope.items_slice_rc(Rc::clone(&data), |item| {
616 /// Text(item.to_string(), Modifier::empty());
617 /// });
618 /// ```
619 fn items_slice_rc<T, F>(&mut self, items: Rc<[T]>, item_content: F)
620 where
621 T: 'static,
622 F: Fn(&T) + 'static,
623 {
624 let len = items.len();
625 self.items(
626 len,
627 None::<fn(usize) -> u64>,
628 None::<fn(usize) -> u64>,
629 move |index| {
630 if let Some(item) = items.get(index) {
631 item_content(item);
632 }
633 },
634 );
635 }
636
637 /// Adds indexed items from a pre-existing `Rc<[T]>` without cloning.
638 ///
639 /// **Zero-copy optimization**: If you already have your data in an `Rc<[T]>`,
640 /// use this method to avoid the O(n) clone that `items_indexed` performs.
641 ///
642 /// # Example
643 ///
644 /// ```rust,ignore
645 /// let data: Rc<[String]> = Rc::from(vec!["Apple".into(), "Banana".into()]);
646 /// scope.items_indexed_rc(Rc::clone(&data), |index, item| {
647 /// Text(format!("{}. {}", index + 1, item), Modifier::empty());
648 /// });
649 /// ```
650 fn items_indexed_rc<T, F>(&mut self, items: Rc<[T]>, item_content: F)
651 where
652 T: 'static,
653 F: Fn(usize, &T) + 'static,
654 {
655 let len = items.len();
656 self.items(
657 len,
658 None::<fn(usize) -> u64>,
659 None::<fn(usize) -> u64>,
660 move |index| {
661 if let Some(item) = items.get(index) {
662 item_content(index, item);
663 }
664 },
665 );
666 }
667
668 /// Adds items using a provider function for on-demand data access.
669 ///
670 /// **Zero-allocation pattern**: Instead of storing data, the provider function
671 /// is called lazily when each item is rendered. This avoids any upfront
672 /// allocation or cloning.
673 ///
674 /// The provider should return `Some(T)` for valid indices and `None` for
675 /// out-of-bounds access. The item is passed by value to the content closure.
676 ///
677 /// # Example
678 ///
679 /// ```rust,ignore
680 /// let data = vec!["Apple", "Banana", "Cherry"];
681 /// scope.items_with_provider(
682 /// data.len(),
683 /// move |index| data.get(index).copied(),
684 /// |item| {
685 /// Text(item.to_string(), Modifier::empty());
686 /// },
687 /// );
688 /// ```
689 fn items_with_provider<T, P, F>(&mut self, count: usize, provider: P, item_content: F)
690 where
691 T: 'static,
692 P: Fn(usize) -> Option<T> + 'static,
693 F: Fn(T) + 'static,
694 {
695 self.items(
696 count,
697 None::<fn(usize) -> u64>,
698 None::<fn(usize) -> u64>,
699 move |index| {
700 if let Some(item) = provider(index) {
701 item_content(item);
702 }
703 },
704 );
705 }
706
707 /// Adds indexed items using a provider function for on-demand data access.
708 ///
709 /// **Zero-allocation pattern**: Instead of storing data, the provider function
710 /// is called lazily when each item is rendered. This avoids any upfront
711 /// allocation or cloning.
712 ///
713 /// # Example
714 ///
715 /// ```rust,ignore
716 /// let data = vec!["Apple", "Banana", "Cherry"];
717 /// scope.items_indexed_with_provider(
718 /// data.len(),
719 /// move |index| data.get(index).copied(),
720 /// |index, item| {
721 /// Text(format!("{}. {}", index + 1, item), Modifier::empty());
722 /// },
723 /// );
724 /// ```
725 fn items_indexed_with_provider<T, P, F>(&mut self, count: usize, provider: P, item_content: F)
726 where
727 T: 'static,
728 P: Fn(usize) -> Option<T> + 'static,
729 F: Fn(usize, T) + 'static,
730 {
731 self.items(
732 count,
733 None::<fn(usize) -> u64>,
734 None::<fn(usize) -> u64>,
735 move |index| {
736 if let Some(item) = provider(index) {
737 item_content(index, item);
738 }
739 },
740 );
741 }
742}
743
744impl<T: LazyListScope + ?Sized> LazyListScopeExt for T {}
745
746#[cfg(test)]
747mod tests {
748 use super::*;
749 use std::cell::Cell;
750
751 #[test]
752 fn test_single_item() {
753 let mut content = LazyListIntervalContent::new();
754 let called = Rc::new(Cell::new(false));
755 let called_clone = Rc::clone(&called);
756
757 content.item(Some(42), None, move || {
758 called_clone.set(true);
759 });
760
761 assert_eq!(content.item_count(), 1);
762 assert_eq!(content.get_key(0), LazyLayoutKey::User(42));
763
764 content.invoke_content(0);
765 assert!(called.get());
766 }
767
768 #[test]
769 fn test_multiple_items() {
770 let mut content = LazyListIntervalContent::new();
771
772 content.items(
773 5,
774 Some(|i| (i * 10) as u64),
775 None::<fn(usize) -> u64>,
776 |_i| {},
777 );
778
779 assert_eq!(content.item_count(), 5);
780 assert_eq!(content.get_key(0), LazyLayoutKey::User(0));
781 assert_eq!(content.get_key(1), LazyLayoutKey::User(10));
782 assert_eq!(content.get_key(4), LazyLayoutKey::User(40));
783 }
784
785 #[test]
786 fn test_mixed_intervals() {
787 let mut content = LazyListIntervalContent::new();
788
789 // Header
790 content.item(Some(100), None, || {});
791
792 // Items
793 content.items(3, Some(|i| i as u64), None::<fn(usize) -> u64>, |_| {});
794
795 // Footer
796 content.item(Some(200), None, || {});
797
798 assert_eq!(content.item_count(), 5);
799 assert_eq!(content.get_key(0), LazyLayoutKey::User(100)); // Header
800 assert_eq!(content.get_key(1), LazyLayoutKey::User(0)); // First item
801 assert_eq!(content.get_key(2), LazyLayoutKey::User(1)); // Second item
802 assert_eq!(content.get_key(3), LazyLayoutKey::User(2)); // Third item
803 assert_eq!(content.get_key(4), LazyLayoutKey::User(200)); // Footer
804 }
805
806 #[test]
807 fn test_with_interval() {
808 let mut content = LazyListIntervalContent::new();
809 content.items(
810 5,
811 None::<fn(usize) -> u64>,
812 None::<fn(usize) -> u64>,
813 |_| {},
814 );
815
816 let result = content.with_interval(3, |local_idx, interval| (local_idx, interval.count));
817
818 assert_eq!(result, Some((3, 5)));
819 }
820
821 #[test]
822 fn test_user_keys_dont_collide_with_default_keys() {
823 let mut content = LazyListIntervalContent::new();
824
825 // Item 0: User key = 0
826 content.item(Some(0), None, || {});
827 // Item 1: No key (default Index(1))
828 content.item(None, None, || {});
829 // Item 2: User key = 1
830 content.item(Some(1), None, || {});
831
832 // User key 0 should NOT equal default Index(0)
833 assert_eq!(content.get_key(0), LazyLayoutKey::User(0));
834 assert_eq!(content.get_key(1), LazyLayoutKey::Index(1));
835 assert_eq!(content.get_key(2), LazyLayoutKey::User(1));
836
837 // Critically: User(0) != Index(1) and User(1) != Index(1)
838 assert_ne!(content.get_key(0), content.get_key(1));
839 assert_ne!(content.get_key(2), content.get_key(1));
840
841 // Keys should convert to different slot IDs
842 assert_ne!(
843 content.get_key(0).to_slot_id(),
844 content.get_key(1).to_slot_id()
845 );
846 }
847
848 #[test]
849 fn test_slot_id_collision_prevention() {
850 // User(0) and Index(0) should produce different slot IDs
851 let user_key = LazyLayoutKey::User(0);
852 let index_key = LazyLayoutKey::Index(0);
853
854 assert_ne!(user_key.to_slot_id(), index_key.to_slot_id());
855
856 // User keys have tag 0b00 in high 2 bits (bits 62-63)
857 // Index keys have tag 0b01 in high 2 bits (bit 62 set)
858 assert_eq!(user_key.to_slot_id(), 0); // 0b00 << 62 | 0 = 0
859 assert_eq!(index_key.to_slot_id(), 1u64 << 62); // 0b01 << 62 | 0
860
861 // User keys occupy range 0x0000... to 0x3FFF...
862 // Index keys occupy range 0x4000... to 0x7FFF...
863 assert!(user_key.to_slot_id() < (1u64 << 62));
864 assert!(index_key.to_slot_id() >= (1u64 << 62));
865 assert!(index_key.to_slot_id() < (2u64 << 62));
866
867 // Any user value within 62 bits maps to the user range
868 let user_max = LazyLayoutKey::User((1u64 << 62) - 1);
869 assert!(
870 user_max.to_slot_id() < (1u64 << 62),
871 "User keys stay in user range"
872 );
873 assert_eq!(user_max.to_slot_id(), (1u64 << 62) - 1); // All 62 value bits set
874
875 // Any index value within 62 bits maps to the index range
876 let index_large = LazyLayoutKey::Index(((1u64 << 62) - 1) as usize);
877 assert!(
878 index_large.to_slot_id() >= (1u64 << 62),
879 "Index keys stay in index range"
880 );
881 assert!(
882 index_large.to_slot_id() < (2u64 << 62),
883 "Index keys below reserved range"
884 );
885
886 // See release-only test for the documented high-bit collision behavior.
887 }
888
889 #[test]
890 fn test_user_key_overflow_is_stable_and_tagged() {
891 let user_max = LazyLayoutKey::User(u64::MAX);
892 let slot = user_max.to_slot_id();
893 assert_eq!(slot, user_max.to_slot_id());
894 assert!(slot < (1u64 << 62));
895 }
896
897 #[test]
898 fn test_index_key_overflow_is_stable_and_tagged() {
899 let index_max = LazyLayoutKey::Index(usize::MAX);
900 let slot = index_max.to_slot_id();
901 assert_eq!(slot, index_max.to_slot_id());
902 assert!(slot >= (1u64 << 62));
903 assert!(slot < (2u64 << 62));
904 }
905
906 #[test]
907 fn test_user_key_high_bits_influence_slot_id() {
908 let key_low = LazyLayoutKey::User(0x0000_0000_0000_0001);
909 let key_high = LazyLayoutKey::User(0x4000_0000_0000_0001); // Differs in bit 62
910 assert_ne!(
911 key_low.to_slot_id(),
912 key_high.to_slot_id(),
913 "High bits are mixed into the slot id to avoid truncation collisions"
914 );
915 }
916
917 // ============================================================
918 // LazyListScopeExt tests
919 // ============================================================
920
921 #[test]
922 fn test_items_slice() {
923 let mut content = LazyListIntervalContent::new();
924 let data = vec!["Apple", "Banana", "Cherry"];
925 let items_visited = Rc::new(RefCell::new(Vec::new()));
926 let items_clone = items_visited.clone();
927
928 content.items_slice(&data, move |item: &&str| {
929 items_clone.borrow_mut().push((*item).to_string());
930 });
931
932 assert_eq!(content.item_count(), 3);
933
934 // Invoke each item and check the callback received correct values
935 for i in 0..3 {
936 content.invoke_content(i);
937 }
938
939 let visited = items_visited.borrow();
940 assert_eq!(*visited, vec!["Apple", "Banana", "Cherry"]);
941 }
942
943 #[test]
944 fn test_items_indexed() {
945 let mut content = LazyListIntervalContent::new();
946 // Use Vec -> Into<Rc<[T]>> directly (efficient)
947 let data = vec![
948 "Apple".to_string(),
949 "Banana".to_string(),
950 "Cherry".to_string(),
951 ];
952 let items_visited = Rc::new(RefCell::new(Vec::new()));
953 let items_clone = items_visited.clone();
954
955 content.items_indexed(data, move |index, item: &String| {
956 items_clone.borrow_mut().push((index, item.clone()));
957 });
958
959 assert_eq!(content.item_count(), 3);
960
961 for i in 0..3 {
962 content.invoke_content(i);
963 }
964
965 let visited = items_visited.borrow();
966 assert_eq!(
967 *visited,
968 vec![
969 (0, "Apple".to_string()),
970 (1, "Banana".to_string()),
971 (2, "Cherry".to_string())
972 ]
973 );
974 }
975
976 #[test]
977 fn test_items_indexed_slice() {
978 let mut content = LazyListIntervalContent::new();
979 // Use Slice -> Into<Rc<[T]>> (performs copy)
980 let data = vec!["Apple", "Banana", "Cherry"];
981 let items_visited = Rc::new(RefCell::new(Vec::new()));
982 let items_clone = items_visited.clone();
983
984 // Note: passing slice explicitly (generic bound doesn't do deref coercion from &Vec)
985 content.items_indexed(data.as_slice(), move |index, item: &&str| {
986 items_clone.borrow_mut().push((index, (*item).to_string()));
987 });
988
989 assert_eq!(content.item_count(), 3);
990
991 for i in 0..3 {
992 content.invoke_content(i);
993 }
994
995 let visited = items_visited.borrow();
996 assert_eq!(
997 *visited,
998 vec![
999 (0, "Apple".to_string()),
1000 (1, "Banana".to_string()),
1001 (2, "Cherry".to_string())
1002 ]
1003 );
1004 }
1005
1006 #[test]
1007 fn test_items_slice_rc() {
1008 let mut content = LazyListIntervalContent::new();
1009 let data: Rc<[String]> = Rc::from(vec!["Apple".into(), "Banana".into()]);
1010 let items_visited = Rc::new(RefCell::new(Vec::new()));
1011 let items_clone = items_visited.clone();
1012
1013 content.items_slice_rc(Rc::clone(&data), move |item: &String| {
1014 items_clone.borrow_mut().push(item.clone());
1015 });
1016
1017 assert_eq!(content.item_count(), 2);
1018
1019 for i in 0..2 {
1020 content.invoke_content(i);
1021 }
1022
1023 let visited = items_visited.borrow();
1024 assert_eq!(*visited, vec!["Apple", "Banana"]);
1025 }
1026
1027 #[test]
1028 fn test_items_indexed_rc() {
1029 let mut content = LazyListIntervalContent::new();
1030 let data: Rc<[String]> = Rc::from(vec!["Apple".into(), "Banana".into()]);
1031 let items_visited = Rc::new(RefCell::new(Vec::new()));
1032 let items_clone = items_visited.clone();
1033
1034 content.items_indexed_rc(Rc::clone(&data), move |index, item: &String| {
1035 items_clone.borrow_mut().push((index, item.clone()));
1036 });
1037
1038 assert_eq!(content.item_count(), 2);
1039
1040 for i in 0..2 {
1041 content.invoke_content(i);
1042 }
1043
1044 let visited = items_visited.borrow();
1045 assert_eq!(
1046 *visited,
1047 vec![(0, "Apple".to_string()), (1, "Banana".to_string())]
1048 );
1049 }
1050
1051 #[test]
1052 fn test_items_with_provider() {
1053 let mut content = LazyListIntervalContent::new();
1054 let data = ["Apple", "Banana", "Cherry"];
1055 let items_visited = Rc::new(RefCell::new(Vec::new()));
1056 let items_clone = items_visited.clone();
1057
1058 content.items_with_provider(
1059 data.len(),
1060 move |index| data.get(index).copied(),
1061 move |item: &str| {
1062 items_clone.borrow_mut().push(item.to_string());
1063 },
1064 );
1065
1066 assert_eq!(content.item_count(), 3);
1067
1068 for i in 0..3 {
1069 content.invoke_content(i);
1070 }
1071
1072 let visited = items_visited.borrow();
1073 assert_eq!(*visited, vec!["Apple", "Banana", "Cherry"]);
1074 }
1075
1076 #[test]
1077 fn test_items_indexed_with_provider() {
1078 let mut content = LazyListIntervalContent::new();
1079 let data = ["Apple", "Banana", "Cherry"];
1080 let items_visited = Rc::new(RefCell::new(Vec::new()));
1081 let items_clone = items_visited.clone();
1082
1083 content.items_indexed_with_provider(
1084 data.len(),
1085 move |index| data.get(index).copied(),
1086 move |index, item: &str| {
1087 items_clone.borrow_mut().push((index, item.to_string()));
1088 },
1089 );
1090
1091 assert_eq!(content.item_count(), 3);
1092
1093 for i in 0..3 {
1094 content.invoke_content(i);
1095 }
1096
1097 let visited = items_visited.borrow();
1098 assert_eq!(
1099 *visited,
1100 vec![
1101 (0, "Apple".to_string()),
1102 (1, "Banana".to_string()),
1103 (2, "Cherry".to_string())
1104 ]
1105 );
1106 }
1107
1108 #[test]
1109 fn test_large_list_cache_works() {
1110 // Test that get_index_by_slot_id uses O(1) cache for lists > 10k items
1111 // (previously this would fall back to O(N) linear search)
1112 let mut content = LazyListIntervalContent::new();
1113
1114 // Create a list with 20,000 items (above the old 10k limit)
1115 content.items(
1116 20_000,
1117 Some(|i| (i * 7) as u64), // Unique keys
1118 None::<fn(usize) -> u64>,
1119 |_| {},
1120 );
1121
1122 // Verify lookup works for item near the end
1123 let key_19999 = content.get_key(19999);
1124 assert_eq!(key_19999, LazyLayoutKey::User(19999 * 7));
1125
1126 // Verify get_index_by_slot_id finds the correct index (should be O(1) now)
1127 let slot_id = key_19999.to_slot_id();
1128 let found_index = content.get_index_by_slot_id(slot_id);
1129 assert_eq!(found_index, Some(19999));
1130
1131 // Also test a middle item
1132 let key_10000 = content.get_key(10000);
1133 let slot_id_mid = key_10000.to_slot_id();
1134 let found_mid = content.get_index_by_slot_id(slot_id_mid);
1135 assert_eq!(found_mid, Some(10000));
1136 }
1137}