1use std::cell::RefCell;
13use std::rc::Rc;
14use std::sync::OnceLock;
15
16use cranpose_core::{MutableState, NodeId};
17use cranpose_macros::composable;
18
19use super::nearest_range::NearestRangeState;
20use super::prefetch::{PrefetchScheduler, PrefetchStrategy};
21
22static LAZY_MEASURE_TELEMETRY_ENABLED: OnceLock<bool> = OnceLock::new();
23
24fn lazy_measure_telemetry_enabled() -> bool {
25 *LAZY_MEASURE_TELEMETRY_ENABLED
26 .get_or_init(|| std::env::var_os("CRANPOSE_LAZY_MEASURE_TELEMETRY").is_some())
27}
28
29const MAX_PENDING_SCROLL_DELTA: f32 = 2000.0;
30const ITEM_SIZE_CACHE_CAPACITY: usize = 100;
31
32#[derive(Clone, Copy, Debug, PartialEq)]
33pub(crate) struct LazyListMeasureStateSnapshot {
34 pub(crate) first_visible_item_index: usize,
35 pub(crate) first_visible_item_scroll_offset: f32,
36 pub(crate) pending_scroll_delta: f32,
37 pub(crate) pending_scroll_to: Option<(usize, f32)>,
38 pub(crate) average_item_size: f32,
39}
40
41#[derive(Clone, Debug, Default, PartialEq)]
45pub struct LazyLayoutStats {
46 pub items_in_use: usize,
48
49 pub items_in_pool: usize,
51
52 pub total_composed: usize,
54
55 pub reuse_count: usize,
57}
58
59#[derive(Clone, Copy)]
71pub struct LazyListScrollPosition {
72 index: MutableState<usize>,
74 scroll_offset: MutableState<f32>,
76 inner: MutableState<Rc<RefCell<ScrollPositionInner>>>,
78}
79
80struct ScrollPositionInner {
82 last_known_first_item_key: Option<u64>,
85 nearest_range_state: NearestRangeState,
87}
88
89impl LazyListScrollPosition {
90 fn is_alive(&self) -> bool {
91 self.index.is_alive() && self.scroll_offset.is_alive() && self.inner.is_alive()
92 }
93
94 fn current_index(&self) -> usize {
95 self.index.try_value().unwrap_or(0)
96 }
97
98 fn current_scroll_offset(&self) -> f32 {
99 self.scroll_offset.try_value().unwrap_or(0.0)
100 }
101
102 pub fn index(&self) -> usize {
104 if !self.index.is_alive() {
105 return 0;
106 }
107 self.index.get()
108 }
109
110 pub fn scroll_offset(&self) -> f32 {
112 if !self.scroll_offset.is_alive() {
113 return 0.0;
114 }
115 self.scroll_offset.get()
116 }
117
118 pub(crate) fn update_from_measure_result(
123 &self,
124 first_visible_index: usize,
125 first_visible_scroll_offset: f32,
126 first_visible_item_key: Option<u64>,
127 ) {
128 if !self.is_alive() {
129 return;
130 }
131 self.inner.with(|rc| {
133 let mut inner = rc.borrow_mut();
134 inner.last_known_first_item_key = first_visible_item_key;
135 inner.nearest_range_state.update(first_visible_index);
136 });
137
138 let old_index = self.index.get_non_reactive();
140 if old_index != first_visible_index {
141 self.index.set(first_visible_index);
142 }
143 let old_offset = self.scroll_offset.get_non_reactive();
144 if (old_offset - first_visible_scroll_offset).abs() > 0.001 {
145 self.scroll_offset.set(first_visible_scroll_offset);
146 }
147 }
148
149 pub(crate) fn request_position_and_forget_last_known_key(
152 &self,
153 index: usize,
154 scroll_offset: f32,
155 ) {
156 if !self.is_alive() {
157 return;
158 }
159 if self.index.get_non_reactive() != index {
161 self.index.set(index);
162 }
163 if (self.scroll_offset.get_non_reactive() - scroll_offset).abs() > 0.001 {
164 self.scroll_offset.set(scroll_offset);
165 }
166 self.inner.with(|rc| {
168 let mut inner = rc.borrow_mut();
169 inner.last_known_first_item_key = None;
170 inner.nearest_range_state.update(index);
171 });
172 }
173
174 pub(crate) fn update_if_first_item_moved<F>(
177 &self,
178 new_item_count: usize,
179 find_by_key: F,
180 ) -> usize
181 where
182 F: Fn(u64) -> Option<usize>,
183 {
184 if !self.index.is_alive() || !self.inner.is_alive() {
185 return 0;
186 }
187
188 let current_index = self.index.get_non_reactive();
189 let last_key = self
190 .inner
191 .try_with(|rc| rc.borrow().last_known_first_item_key)
192 .flatten();
193
194 let new_index = match last_key {
195 None => current_index.min(new_item_count.saturating_sub(1)),
196 Some(key) => find_by_key(key)
197 .unwrap_or_else(|| current_index.min(new_item_count.saturating_sub(1))),
198 };
199
200 if current_index != new_index {
201 self.index.set(new_index);
202 self.inner.with(|rc| {
203 rc.borrow_mut().nearest_range_state.update(new_index);
204 });
205 }
206 new_index
207 }
208
209 pub fn nearest_range(&self) -> std::ops::Range<usize> {
211 self.inner
212 .try_with(|rc| rc.borrow().nearest_range_state.range())
213 .unwrap_or(0..0)
214 }
215}
216
217#[derive(Clone, Copy)]
252pub struct LazyListState {
253 scroll_position: LazyListScrollPosition,
255 can_scroll_forward_state: MutableState<bool>,
257 can_scroll_backward_state: MutableState<bool>,
259 stats_state: MutableState<LazyLayoutStats>,
262 inner: MutableState<Rc<RefCell<LazyListStateInner>>>,
264}
265
266impl PartialEq for LazyListState {
270 fn eq(&self, other: &Self) -> bool {
271 self.inner == other.inner
272 }
273}
274
275struct LazyListStateInner {
277 scroll_to_be_consumed: f32,
279
280 pending_scroll_to_index: Option<(usize, f32)>,
282
283 layout_info: LazyListLayoutInfo,
285
286 invalidate_callbacks: Vec<(u64, Rc<dyn Fn()>)>,
288 next_callback_id: u64,
289
290 layout_invalidation_callback_id: Option<u64>,
294 layout_invalidation_node_id: Option<NodeId>,
295
296 total_composed: usize,
298 reuse_count: usize,
299
300 item_size_cache: std::collections::HashMap<usize, f32>,
302 item_size_lru: std::collections::VecDeque<usize>,
303
304 average_item_size: f32,
306 total_measured_items: usize,
307
308 prefetch_scheduler: PrefetchScheduler,
310
311 prefetch_strategy: PrefetchStrategy,
313
314 last_scroll_direction: f32,
316}
317
318#[composable]
333pub fn remember_lazy_list_state() -> LazyListState {
334 remember_lazy_list_state_with_position(0, 0.0)
335}
336
337#[composable]
341pub fn remember_lazy_list_state_with_position(
342 initial_first_visible_item_index: usize,
343 initial_first_visible_item_scroll_offset: f32,
344) -> LazyListState {
345 let scroll_position = LazyListScrollPosition {
347 index: cranpose_core::useState(|| initial_first_visible_item_index),
348 scroll_offset: cranpose_core::useState(|| initial_first_visible_item_scroll_offset),
349 inner: cranpose_core::useState(|| {
350 Rc::new(RefCell::new(ScrollPositionInner {
351 last_known_first_item_key: None,
352 nearest_range_state: NearestRangeState::new(initial_first_visible_item_index),
353 }))
354 }),
355 };
356
357 let inner = cranpose_core::useState(|| {
359 Rc::new(RefCell::new(LazyListStateInner {
360 scroll_to_be_consumed: 0.0,
361 pending_scroll_to_index: None,
362 layout_info: LazyListLayoutInfo::default(),
363 invalidate_callbacks: Vec::new(),
364 next_callback_id: 1,
365 layout_invalidation_callback_id: None,
366 layout_invalidation_node_id: None,
367 total_composed: 0,
368 reuse_count: 0,
369 item_size_cache: std::collections::HashMap::new(),
370 item_size_lru: std::collections::VecDeque::new(),
371 average_item_size: super::DEFAULT_ITEM_SIZE_ESTIMATE,
372 total_measured_items: 0,
373 prefetch_scheduler: PrefetchScheduler::new(),
374 prefetch_strategy: PrefetchStrategy::default(),
375 last_scroll_direction: 0.0,
376 }))
377 });
378
379 let can_scroll_forward_state = cranpose_core::useState(|| false);
381 let can_scroll_backward_state = cranpose_core::useState(|| false);
382 let stats_state = cranpose_core::useState(LazyLayoutStats::default);
383
384 LazyListState {
385 scroll_position,
386 can_scroll_forward_state,
387 can_scroll_backward_state,
388 stats_state,
389 inner,
390 }
391}
392
393impl LazyListState {
394 pub fn inner_ptr(&self) -> *const () {
399 self.inner
400 .try_with(|rc| Rc::as_ptr(rc) as *const ())
401 .unwrap_or(std::ptr::null())
402 }
403
404 pub fn first_visible_item_index(&self) -> usize {
409 self.scroll_position.index()
411 }
412
413 pub fn first_visible_item_scroll_offset(&self) -> f32 {
419 self.scroll_position.scroll_offset()
421 }
422
423 pub fn is_scrolled_non_reactive(&self) -> bool {
426 self.scroll_position.current_index() > 0
427 || self.scroll_position.current_scroll_offset().abs() > 0.001
428 || self
429 .inner
430 .try_with(|rc| {
431 let inner = rc.borrow();
432 inner.scroll_to_be_consumed.abs() > 0.001
433 || inner
434 .pending_scroll_to_index
435 .is_some_and(|(index, offset)| index > 0 || offset.abs() > 0.001)
436 })
437 .unwrap_or(false)
438 }
439
440 pub fn layout_info(&self) -> LazyListLayoutInfo {
442 self.inner
443 .try_with(|rc| rc.borrow().layout_info.clone())
444 .unwrap_or_default()
445 }
446
447 pub fn stats(&self) -> LazyLayoutStats {
453 if !self.stats_state.is_alive() || !self.inner.is_alive() {
454 return LazyLayoutStats::default();
455 }
456 let reactive = self.stats_state.get();
458 let (total_composed, reuse_count) = self.inner.with(|rc| {
459 let inner = rc.borrow();
460 (inner.total_composed, inner.reuse_count)
461 });
462 LazyLayoutStats {
463 items_in_use: reactive.items_in_use,
464 items_in_pool: reactive.items_in_pool,
465 total_composed,
466 reuse_count,
467 }
468 }
469
470 pub fn update_stats(&self, items_in_use: usize, items_in_pool: usize) {
475 if !self.stats_state.is_alive() || !self.inner.is_alive() {
476 return;
477 }
478
479 let current = self.stats_state.get_non_reactive();
480
481 let should_update_reactive = if items_in_use > current.items_in_use {
490 true
492 } else if items_in_use < current.items_in_use {
493 current.items_in_use - items_in_use > 1
495 } else {
496 false
497 };
498
499 if should_update_reactive {
500 self.stats_state.set(LazyLayoutStats {
501 items_in_use,
502 items_in_pool,
503 ..current
504 });
505 }
506 }
509
510 pub fn record_composition(&self, was_reused: bool) {
515 if !self.inner.is_alive() {
516 return;
517 }
518 self.inner.with(|rc| {
519 let mut inner = rc.borrow_mut();
520 inner.total_composed += 1;
521 if was_reused {
522 inner.reuse_count += 1;
523 }
524 });
525 }
526
527 pub fn record_scroll_direction(&self, delta: f32) {
533 if delta.abs() > 0.001 {
534 if !self.inner.is_alive() {
535 return;
536 }
537 self.inner.with(|rc| {
538 rc.borrow_mut().last_scroll_direction = -delta.signum();
539 });
540 }
541 }
542
543 pub fn update_prefetch_queue(
546 &self,
547 first_visible_index: usize,
548 last_visible_index: usize,
549 total_items: usize,
550 ) {
551 if !self.inner.is_alive() {
552 return;
553 }
554 self.inner.with(|rc| {
555 let mut inner = rc.borrow_mut();
556 let direction = inner.last_scroll_direction;
557 let strategy = inner.prefetch_strategy.clone();
558 inner.prefetch_scheduler.update(
559 first_visible_index,
560 last_visible_index,
561 total_items,
562 direction,
563 &strategy,
564 );
565 });
566 }
567
568 pub fn take_prefetch_indices(&self) -> Vec<usize> {
571 self.inner
572 .try_with(|rc| {
573 let mut inner = rc.borrow_mut();
574 let mut indices = Vec::new();
575 while let Some(idx) = inner.prefetch_scheduler.next_prefetch() {
576 indices.push(idx);
577 }
578 indices
579 })
580 .unwrap_or_default()
581 }
582
583 pub fn scroll_to_item(&self, index: usize, scroll_offset: f32) {
589 if !self.inner.is_alive() {
590 return;
591 }
592 if lazy_measure_telemetry_enabled() {
593 log::warn!(
594 "[lazy-measure-telemetry] scroll_to_item request index={} offset={:.2}",
595 index,
596 scroll_offset
597 );
598 }
599 self.inner.with(|rc| {
601 rc.borrow_mut().pending_scroll_to_index = Some((index, scroll_offset));
602 });
603
604 self.scroll_position
606 .request_position_and_forget_last_known_key(index, scroll_offset);
607
608 self.invalidate();
609 }
610
611 pub fn dispatch_scroll_delta(&self, delta: f32) -> f32 {
619 if !self.inner.is_alive() {
622 return 0.0;
623 }
624 let has_scroll_bounds = self
625 .inner
626 .with(|rc| rc.borrow().layout_info.total_items_count > 0);
627 let pushing_forward = delta < -0.001;
628 let pushing_backward = delta > 0.001;
629 let can_scroll_forward = self.can_scroll_forward_state.is_alive()
630 && self.can_scroll_forward_state.get_non_reactive();
631 let can_scroll_backward = self.can_scroll_backward_state.is_alive()
632 && self.can_scroll_backward_state.get_non_reactive();
633 let blocked_by_bounds = has_scroll_bounds
634 && ((pushing_forward && !can_scroll_forward)
635 || (pushing_backward && !can_scroll_backward));
636
637 if blocked_by_bounds {
638 let should_invalidate = self.inner.with(|rc| {
639 let mut inner = rc.borrow_mut();
640 let pending_before = inner.scroll_to_be_consumed;
641 if pending_before.abs() > 0.001 && pending_before.signum() == delta.signum() {
643 inner.scroll_to_be_consumed = 0.0;
644 }
645 if lazy_measure_telemetry_enabled() {
646 log::warn!(
647 "[lazy-measure-telemetry] dispatch_scroll_delta blocked_by_bounds delta={:.2} pending_before={:.2} pending_after={:.2}",
648 delta,
649 pending_before,
650 inner.scroll_to_be_consumed
651 );
652 }
653 (inner.scroll_to_be_consumed - pending_before).abs() > 0.001
654 });
655 if should_invalidate {
656 self.invalidate();
657 }
658 return 0.0;
659 }
660
661 let should_invalidate = self.inner.with(|rc| {
662 let mut inner = rc.borrow_mut();
663 let pending_before = inner.scroll_to_be_consumed;
664 let pending = inner.scroll_to_be_consumed;
665 let reverse_input = pending.abs() > 0.001
666 && delta.abs() > 0.001
667 && pending.signum() != delta.signum();
668 if reverse_input {
669 if lazy_measure_telemetry_enabled() {
670 log::warn!(
671 "[lazy-measure-telemetry] dispatch_scroll_delta direction_change pending={:.2} new_delta={:.2}",
672 pending,
673 delta
674 );
675 }
676 inner.scroll_to_be_consumed = delta;
680 } else {
681 inner.scroll_to_be_consumed += delta;
682 }
683 inner.scroll_to_be_consumed = inner
684 .scroll_to_be_consumed
685 .clamp(-MAX_PENDING_SCROLL_DELTA, MAX_PENDING_SCROLL_DELTA);
686 if lazy_measure_telemetry_enabled() {
687 log::warn!(
688 "[lazy-measure-telemetry] dispatch_scroll_delta delta={:.2} pending={:.2}",
689 delta,
690 inner.scroll_to_be_consumed
691 );
692 }
693 (inner.scroll_to_be_consumed - pending_before).abs() > 0.001
694 });
695 if should_invalidate {
696 self.invalidate();
697 }
698 delta }
700
701 pub fn peek_scroll_delta(&self) -> f32 {
708 self.inner
709 .try_with(|rc| rc.borrow().scroll_to_be_consumed)
710 .unwrap_or(0.0)
711 }
712
713 pub(crate) fn begin_measure_pass(&self) -> LazyListMeasureStateSnapshot {
714 let (pending_scroll_delta, pending_scroll_to, average_item_size) = self
715 .inner
716 .try_with(|rc| {
717 let mut inner = rc.borrow_mut();
718 let pending_scroll_delta = inner.scroll_to_be_consumed;
719 inner.scroll_to_be_consumed = 0.0;
720 let pending_scroll_to = inner.pending_scroll_to_index.take();
721 (
722 pending_scroll_delta,
723 pending_scroll_to,
724 inner.average_item_size,
725 )
726 })
727 .unwrap_or((0.0, None, super::DEFAULT_ITEM_SIZE_ESTIMATE));
728
729 LazyListMeasureStateSnapshot {
730 first_visible_item_index: self.scroll_position.current_index(),
731 first_visible_item_scroll_offset: self.scroll_position.current_scroll_offset(),
732 pending_scroll_delta,
733 pending_scroll_to,
734 average_item_size,
735 }
736 }
737
738 fn record_item_size_sample(inner: &mut LazyListStateInner, size: f32) {
739 inner.total_measured_items += 1;
740 let n = inner.total_measured_items as f32;
741 inner.average_item_size = inner.average_item_size * ((n - 1.0) / n) + size / n;
742 }
743
744 fn insert_item_size(inner: &mut LazyListStateInner, index: usize, size: f32) -> bool {
745 use std::collections::hash_map::Entry;
746
747 if let Entry::Occupied(mut entry) = inner.item_size_cache.entry(index) {
748 entry.insert(size);
749 if let Some(pos) = inner
750 .item_size_lru
751 .iter()
752 .position(|&cached| cached == index)
753 {
754 inner.item_size_lru.remove(pos);
755 }
756 inner.item_size_lru.push_back(index);
757 return false;
758 }
759
760 while inner.item_size_cache.len() >= ITEM_SIZE_CACHE_CAPACITY {
761 if let Some(oldest) = inner.item_size_lru.pop_front() {
762 if inner.item_size_cache.remove(&oldest).is_some() {
763 break;
764 }
765 } else {
766 break;
767 }
768 }
769
770 inner.item_size_cache.insert(index, size);
771 inner.item_size_lru.push_back(index);
772 true
773 }
774
775 pub fn cache_item_size(&self, index: usize, size: f32) {
777 if !self.inner.is_alive() {
778 return;
779 }
780 self.inner.with(|rc| {
781 let mut inner = rc.borrow_mut();
782 if Self::insert_item_size(&mut inner, index, size) {
783 Self::record_item_size_sample(&mut inner, size);
784 }
785 });
786 }
787
788 pub fn cache_item_sizes<I>(&self, sizes: I) -> f32
790 where
791 I: IntoIterator<Item = (usize, f32)>,
792 {
793 if !self.inner.is_alive() {
794 return super::DEFAULT_ITEM_SIZE_ESTIMATE;
795 }
796
797 self.inner.with(|rc| {
798 let mut inner = rc.borrow_mut();
799 for (index, size) in sizes {
800 if Self::insert_item_size(&mut inner, index, size) {
801 Self::record_item_size_sample(&mut inner, size);
802 }
803 }
804 inner.average_item_size
805 })
806 }
807
808 pub fn get_cached_size(&self, index: usize) -> Option<f32> {
810 self.inner
811 .try_with(|rc| rc.borrow().item_size_cache.get(&index).copied())
812 .flatten()
813 }
814
815 pub fn average_item_size(&self) -> f32 {
817 self.inner
818 .try_with(|rc| rc.borrow().average_item_size)
819 .unwrap_or(super::DEFAULT_ITEM_SIZE_ESTIMATE)
820 }
821
822 pub fn nearest_range(&self) -> std::ops::Range<usize> {
824 self.scroll_position.nearest_range()
826 }
827
828 pub(crate) fn update_scroll_position(
832 &self,
833 first_visible_item_index: usize,
834 first_visible_item_scroll_offset: f32,
835 ) {
836 self.scroll_position.update_from_measure_result(
837 first_visible_item_index,
838 first_visible_item_scroll_offset,
839 None,
840 );
841 }
842
843 pub(crate) fn update_scroll_position_with_key(
847 &self,
848 first_visible_item_index: usize,
849 first_visible_item_scroll_offset: f32,
850 first_visible_item_key: u64,
851 ) {
852 self.scroll_position.update_from_measure_result(
853 first_visible_item_index,
854 first_visible_item_scroll_offset,
855 Some(first_visible_item_key),
856 );
857 }
858
859 pub fn update_scroll_position_if_item_moved<F>(
867 &self,
868 new_item_count: usize,
869 get_index_by_key: F,
870 ) -> usize
871 where
872 F: Fn(u64) -> Option<usize>,
873 {
874 self.scroll_position
876 .update_if_first_item_moved(new_item_count, get_index_by_key)
877 }
878
879 pub(crate) fn update_layout_info(&self, info: LazyListLayoutInfo) {
881 if !self.inner.is_alive() {
882 return;
883 }
884 self.inner.with(|rc| rc.borrow_mut().layout_info = info);
885 }
886
887 pub fn can_scroll_forward(&self) -> bool {
892 if !self.can_scroll_forward_state.is_alive() {
893 return false;
894 }
895 self.can_scroll_forward_state.get()
896 }
897
898 pub fn can_scroll_backward(&self) -> bool {
903 if !self.can_scroll_backward_state.is_alive() {
904 return false;
905 }
906 self.can_scroll_backward_state.get()
907 }
908
909 pub(crate) fn update_scroll_bounds(&self) {
913 if !self.inner.is_alive()
914 || !self.can_scroll_forward_state.is_alive()
915 || !self.can_scroll_backward_state.is_alive()
916 {
917 return;
918 }
919 let can_forward = self.inner.with(|rc| {
921 let inner = rc.borrow();
922 let info = &inner.layout_info;
923 let viewport_end = info.viewport_size - info.after_content_padding;
926 if let Some(last_visible) = info.visible_items_info.last() {
927 last_visible.index < info.total_items_count.saturating_sub(1)
928 || (last_visible.offset + last_visible.size) > viewport_end
929 } else {
930 false
931 }
932 });
933
934 let can_backward = self.scroll_position.current_index() > 0
936 || self.scroll_position.current_scroll_offset() > 0.0;
937
938 if self.can_scroll_forward_state.get_non_reactive() != can_forward {
940 self.can_scroll_forward_state.set(can_forward);
941 }
942 if self.can_scroll_backward_state.get_non_reactive() != can_backward {
943 self.can_scroll_backward_state.set(can_backward);
944 }
945 }
946
947 pub fn add_invalidate_callback(&self, callback: Rc<dyn Fn()>) -> u64 {
949 if !self.inner.is_alive() {
950 return 0;
951 }
952 self.inner.with(|rc| {
953 let mut inner = rc.borrow_mut();
954 let id = inner.next_callback_id;
955 inner.next_callback_id += 1;
956 inner.invalidate_callbacks.push((id, callback));
957 id
958 })
959 }
960
961 pub fn try_register_layout_callback(
969 &self,
970 node_id: NodeId,
971 callback: Rc<dyn Fn()>,
972 ) -> Option<u64> {
973 if !self.inner.is_alive() {
974 return None;
975 }
976 self.inner.with(|rc| {
977 let mut inner = rc.borrow_mut();
978 if let Some(existing_id) = inner.layout_invalidation_callback_id {
979 inner
980 .invalidate_callbacks
981 .retain(|(cb_id, _)| *cb_id != existing_id);
982 }
983 let id = inner.next_callback_id;
984 inner.next_callback_id += 1;
985 inner.invalidate_callbacks.push((id, callback));
986 inner.layout_invalidation_callback_id = Some(id);
987 inner.layout_invalidation_node_id = Some(node_id);
988 Some(id)
989 })
990 }
991
992 pub fn remove_invalidate_callback(&self, id: u64) {
994 if !self.inner.is_alive() {
995 return;
996 }
997 self.inner.with(|rc| {
998 let mut inner = rc.borrow_mut();
999 inner.invalidate_callbacks.retain(|(cb_id, _)| *cb_id != id);
1000 if inner.layout_invalidation_callback_id == Some(id) {
1001 inner.layout_invalidation_callback_id = None;
1002 inner.layout_invalidation_node_id = None;
1003 }
1004 });
1005 }
1006
1007 fn invalidate(&self) {
1008 if !self.inner.is_alive() {
1009 return;
1010 }
1011 let callbacks: Vec<_> = self.inner.with(|rc| {
1014 rc.borrow()
1015 .invalidate_callbacks
1016 .iter()
1017 .map(|(_, cb)| Rc::clone(cb))
1018 .collect()
1019 });
1020
1021 for callback in callbacks {
1022 callback();
1023 }
1024 }
1025}
1026
1027#[derive(Clone, Default, Debug)]
1029pub struct LazyListLayoutInfo {
1030 pub visible_items_info: Vec<LazyListItemInfo>,
1032
1033 pub total_items_count: usize,
1035
1036 pub raw_viewport_size: f32,
1038
1039 pub is_infinite_viewport: bool,
1041
1042 pub viewport_size: f32,
1044
1045 pub viewport_start_offset: f32,
1047
1048 pub viewport_end_offset: f32,
1050
1051 pub before_content_padding: f32,
1053
1054 pub after_content_padding: f32,
1056}
1057
1058#[derive(Clone, Debug)]
1060pub struct LazyListItemInfo {
1061 pub index: usize,
1063
1064 pub key: u64,
1066
1067 pub offset: f32,
1069
1070 pub size: f32,
1072}
1073
1074#[cfg(test)]
1076pub mod test_helpers {
1077 use super::*;
1078 use cranpose_core::{DefaultScheduler, Runtime};
1079 use std::sync::Arc;
1080
1081 pub fn with_test_runtime<T>(f: impl FnOnce() -> T) -> T {
1084 let _runtime = Runtime::new(Arc::new(DefaultScheduler));
1085 f()
1086 }
1087
1088 pub fn new_lazy_list_state() -> LazyListState {
1091 new_lazy_list_state_with_position(0, 0.0)
1092 }
1093
1094 pub fn new_lazy_list_state_with_position(
1097 initial_first_visible_item_index: usize,
1098 initial_first_visible_item_scroll_offset: f32,
1099 ) -> LazyListState {
1100 let scroll_position = LazyListScrollPosition {
1102 index: cranpose_core::mutableStateOf(initial_first_visible_item_index),
1103 scroll_offset: cranpose_core::mutableStateOf(initial_first_visible_item_scroll_offset),
1104 inner: cranpose_core::mutableStateOf(Rc::new(RefCell::new(ScrollPositionInner {
1105 last_known_first_item_key: None,
1106 nearest_range_state: NearestRangeState::new(initial_first_visible_item_index),
1107 }))),
1108 };
1109
1110 let inner = cranpose_core::mutableStateOf(Rc::new(RefCell::new(LazyListStateInner {
1112 scroll_to_be_consumed: 0.0,
1113 pending_scroll_to_index: None,
1114 layout_info: LazyListLayoutInfo::default(),
1115 invalidate_callbacks: Vec::new(),
1116 next_callback_id: 1,
1117 layout_invalidation_callback_id: None,
1118 layout_invalidation_node_id: None,
1119 total_composed: 0,
1120 reuse_count: 0,
1121 item_size_cache: std::collections::HashMap::new(),
1122 item_size_lru: std::collections::VecDeque::new(),
1123 average_item_size: super::super::DEFAULT_ITEM_SIZE_ESTIMATE,
1124 total_measured_items: 0,
1125 prefetch_scheduler: PrefetchScheduler::new(),
1126 prefetch_strategy: PrefetchStrategy::default(),
1127 last_scroll_direction: 0.0,
1128 })));
1129
1130 let can_scroll_forward_state = cranpose_core::mutableStateOf(false);
1132 let can_scroll_backward_state = cranpose_core::mutableStateOf(false);
1133 let stats_state = cranpose_core::mutableStateOf(LazyLayoutStats::default());
1134
1135 LazyListState {
1136 scroll_position,
1137 can_scroll_forward_state,
1138 can_scroll_backward_state,
1139 stats_state,
1140 inner,
1141 }
1142 }
1143}
1144
1145#[cfg(test)]
1146mod tests {
1147 use super::test_helpers::{
1148 new_lazy_list_state, new_lazy_list_state_with_position, with_test_runtime,
1149 };
1150 use super::{LazyListLayoutInfo, LazyListState};
1151 use cranpose_core::{location_key, Composition, MemoryApplier};
1152 use std::cell::Cell;
1153 use std::rc::Rc;
1154
1155 fn enable_bidirectional_scroll(state: &LazyListState) {
1156 state.can_scroll_forward_state.set(true);
1157 state.can_scroll_backward_state.set(true);
1158 }
1159
1160 fn mark_scroll_bounds_known(state: &LazyListState) {
1161 state.update_layout_info(LazyListLayoutInfo {
1162 total_items_count: 10,
1163 ..Default::default()
1164 });
1165 }
1166
1167 #[test]
1168 fn dispatch_scroll_delta_accumulates_same_direction() {
1169 with_test_runtime(|| {
1170 let state = new_lazy_list_state();
1171 enable_bidirectional_scroll(&state);
1172
1173 state.dispatch_scroll_delta(-12.0);
1174 state.dispatch_scroll_delta(-8.0);
1175
1176 assert!((state.peek_scroll_delta() + 20.0).abs() < 0.001);
1177 let snapshot = state.begin_measure_pass();
1178 assert!((snapshot.pending_scroll_delta + 20.0).abs() < 0.001);
1179 assert_eq!(state.begin_measure_pass().pending_scroll_delta, 0.0);
1180 });
1181 }
1182
1183 #[test]
1184 fn dispatch_scroll_delta_drops_stale_backlog_on_direction_change() {
1185 with_test_runtime(|| {
1186 let state = new_lazy_list_state();
1187 enable_bidirectional_scroll(&state);
1188
1189 state.dispatch_scroll_delta(-120.0);
1190 state.dispatch_scroll_delta(-30.0);
1191 assert!((state.peek_scroll_delta() + 150.0).abs() < 0.001);
1192
1193 state.dispatch_scroll_delta(18.0);
1194
1195 assert!((state.peek_scroll_delta() - 18.0).abs() < 0.001);
1196 let snapshot = state.begin_measure_pass();
1197 assert!((snapshot.pending_scroll_delta - 18.0).abs() < 0.001);
1198 assert_eq!(state.begin_measure_pass().pending_scroll_delta, 0.0);
1199 });
1200 }
1201
1202 #[test]
1203 fn dispatch_scroll_delta_clamps_pending_backlog() {
1204 with_test_runtime(|| {
1205 let state = new_lazy_list_state();
1206 enable_bidirectional_scroll(&state);
1207
1208 state.dispatch_scroll_delta(-1_500.0);
1209 state.dispatch_scroll_delta(-1_500.0);
1210 assert!((state.peek_scroll_delta() + super::MAX_PENDING_SCROLL_DELTA).abs() < 0.001);
1211
1212 state.dispatch_scroll_delta(3_000.0);
1213 assert!((state.peek_scroll_delta() - super::MAX_PENDING_SCROLL_DELTA).abs() < 0.001);
1214 });
1215 }
1216
1217 #[test]
1218 fn dispatch_scroll_delta_skips_invalidate_when_clamped_value_is_unchanged() {
1219 with_test_runtime(|| {
1220 let state = new_lazy_list_state();
1221 enable_bidirectional_scroll(&state);
1222 let invalidations = Rc::new(Cell::new(0u32));
1223 let invalidations_clone = Rc::clone(&invalidations);
1224 state.add_invalidate_callback(Rc::new(move || {
1225 invalidations_clone.set(invalidations_clone.get() + 1);
1226 }));
1227
1228 state.dispatch_scroll_delta(-3_000.0);
1229 assert_eq!(invalidations.get(), 1);
1230 assert!((state.peek_scroll_delta() + super::MAX_PENDING_SCROLL_DELTA).abs() < 0.001);
1231
1232 state.dispatch_scroll_delta(-100.0);
1234 assert_eq!(invalidations.get(), 1);
1235
1236 state.dispatch_scroll_delta(100.0);
1238 assert_eq!(invalidations.get(), 2);
1239 });
1240 }
1241
1242 #[test]
1243 fn begin_measure_pass_takes_coherent_snapshot_and_consumes_pending_inputs() {
1244 with_test_runtime(|| {
1245 let state = new_lazy_list_state_with_position(3, 12.0);
1246 state.dispatch_scroll_delta(-20.0);
1247 state.inner.with(|rc| {
1248 rc.borrow_mut().pending_scroll_to_index = Some((8, 4.0));
1249 });
1250
1251 let snapshot = state.begin_measure_pass();
1252
1253 assert_eq!(snapshot.first_visible_item_index, 3);
1254 assert!((snapshot.first_visible_item_scroll_offset - 12.0).abs() < 0.001);
1255 assert!((snapshot.pending_scroll_delta + 20.0).abs() < 0.001);
1256 assert_eq!(snapshot.pending_scroll_to, Some((8, 4.0)));
1257 assert_eq!(state.peek_scroll_delta(), 0.0);
1258 assert_eq!(state.begin_measure_pass().pending_scroll_to, None);
1259 });
1260 }
1261
1262 #[test]
1263 fn item_size_cache_refresh_keeps_recent_entry_and_evicts_oldest_live_entry() {
1264 with_test_runtime(|| {
1265 let state = new_lazy_list_state();
1266 for index in 0..super::ITEM_SIZE_CACHE_CAPACITY {
1267 state.cache_item_size(index, index as f32 + 10.0);
1268 }
1269
1270 state.cache_item_size(0, 999.0);
1271 state.cache_item_size(super::ITEM_SIZE_CACHE_CAPACITY, 123.0);
1272
1273 assert_eq!(state.get_cached_size(0), Some(999.0));
1274 assert_eq!(state.get_cached_size(1), None);
1275 assert_eq!(
1276 state.get_cached_size(super::ITEM_SIZE_CACHE_CAPACITY),
1277 Some(123.0),
1278 );
1279 });
1280 }
1281
1282 #[test]
1283 fn cache_item_sizes_updates_average_only_for_new_entries() {
1284 with_test_runtime(|| {
1285 let state = new_lazy_list_state();
1286
1287 let average = state.cache_item_sizes([(0, 10.0), (1, 20.0), (0, 12.0)]);
1288
1289 assert_eq!(state.get_cached_size(0), Some(12.0));
1290 assert_eq!(state.get_cached_size(1), Some(20.0));
1291 assert!((average - 15.0).abs() < 0.001);
1292 });
1293 }
1294
1295 #[test]
1296 fn layout_callback_can_be_registered_again_after_removal() {
1297 with_test_runtime(|| {
1298 let state = new_lazy_list_state();
1299 let first_node: cranpose_core::NodeId = 1;
1300 let second_node: cranpose_core::NodeId = 2;
1301
1302 let first_id = state
1303 .try_register_layout_callback(first_node, Rc::new(|| {}))
1304 .expect("first layout callback should register");
1305 let duplicate_id = state
1306 .try_register_layout_callback(first_node, Rc::new(|| {}))
1307 .expect("duplicate register should replace with a fresh callback id");
1308 assert_eq!(
1309 state
1310 .inner
1311 .with(|rc| rc.borrow().layout_invalidation_callback_id),
1312 Some(duplicate_id),
1313 "duplicate registration should become the active callback",
1314 );
1315 assert_ne!(
1316 first_id, duplicate_id,
1317 "duplicate registration should replace the old callback id",
1318 );
1319
1320 state.remove_invalidate_callback(first_id);
1321
1322 let second_id = state
1323 .try_register_layout_callback(second_node, Rc::new(|| {}))
1324 .expect("layout callback should register again after removal");
1325 assert_ne!(first_id, second_id);
1326 });
1327 }
1328
1329 #[test]
1330 fn layout_callback_rebinds_when_node_id_changes() {
1331 with_test_runtime(|| {
1332 let state = new_lazy_list_state();
1333 let first_node: cranpose_core::NodeId = 11;
1334 let second_node: cranpose_core::NodeId = 22;
1335
1336 let first_id = state
1337 .try_register_layout_callback(first_node, Rc::new(|| {}))
1338 .expect("first layout callback should register");
1339
1340 let second_id = state
1341 .try_register_layout_callback(second_node, Rc::new(|| {}))
1342 .expect("layout callback should rebind to a new node");
1343
1344 assert_ne!(first_id, second_id);
1345 });
1346 }
1347
1348 #[test]
1349 fn stale_layout_callback_disposer_cannot_remove_replaced_same_node_callback() {
1350 with_test_runtime(|| {
1351 let state = new_lazy_list_state();
1352 let node_id: cranpose_core::NodeId = 7;
1353 let first_hits = Rc::new(Cell::new(0u32));
1354 let second_hits = Rc::new(Cell::new(0u32));
1355
1356 let first_id = state
1357 .try_register_layout_callback(
1358 node_id,
1359 Rc::new({
1360 let first_hits = Rc::clone(&first_hits);
1361 move || first_hits.set(first_hits.get() + 1)
1362 }),
1363 )
1364 .expect("first layout callback should register");
1365
1366 let second_id = state
1367 .try_register_layout_callback(
1368 node_id,
1369 Rc::new({
1370 let second_hits = Rc::clone(&second_hits);
1371 move || second_hits.set(second_hits.get() + 1)
1372 }),
1373 )
1374 .expect("same-node registration should replace the active callback");
1375
1376 assert_ne!(first_id, second_id);
1377
1378 state.remove_invalidate_callback(first_id);
1379 state.dispatch_scroll_delta(-12.0);
1380
1381 assert_eq!(
1382 first_hits.get(),
1383 0,
1384 "replaced callback should not be invoked after removal",
1385 );
1386 assert_eq!(
1387 second_hits.get(),
1388 1,
1389 "active callback should survive stale disposer cleanup",
1390 );
1391 });
1392 }
1393
1394 #[test]
1395 fn dispatch_scroll_delta_returns_zero_when_forward_is_blocked() {
1396 with_test_runtime(|| {
1397 let state = new_lazy_list_state();
1398 mark_scroll_bounds_known(&state);
1399 state.can_scroll_forward_state.set(false);
1400 state.can_scroll_backward_state.set(true);
1401
1402 let consumed = state.dispatch_scroll_delta(-24.0);
1403
1404 assert_eq!(consumed, 0.0);
1405 assert_eq!(state.peek_scroll_delta(), 0.0);
1406 });
1407 }
1408
1409 #[test]
1410 fn equality_does_not_deref_released_inner_state() {
1411 let mut composition = Composition::new(MemoryApplier::new());
1412 let key = location_key(file!(), line!(), column!());
1413
1414 let mut first = None;
1415 composition
1416 .render(key, || {
1417 first = Some(super::remember_lazy_list_state());
1418 })
1419 .expect("initial render");
1420 let first = first.expect("first lazy state");
1421
1422 composition
1423 .render(key, || {})
1424 .expect("dispose first lazy state");
1425 assert!(
1426 !first.inner.is_alive(),
1427 "expected first lazy state to be released after disposal"
1428 );
1429
1430 let mut second = None;
1431 composition
1432 .render(key, || {
1433 second = Some(super::remember_lazy_list_state());
1434 })
1435 .expect("second render");
1436 let second = second.expect("second lazy state");
1437
1438 assert!(
1439 first != second,
1440 "released lazy state handle must compare by identity without panicking"
1441 );
1442 }
1443
1444 #[test]
1445 fn released_lazy_list_state_scroll_position_methods_do_not_panic() {
1446 let mut composition = Composition::new(MemoryApplier::new());
1447 let key = location_key(file!(), line!(), column!());
1448
1449 let mut released = None;
1450 composition
1451 .render(key, || {
1452 released = Some(super::remember_lazy_list_state());
1453 })
1454 .expect("initial render");
1455 let released = released.expect("lazy list state");
1456
1457 composition
1458 .render(key, || {})
1459 .expect("dispose lazy list state");
1460 assert!(
1461 !released.inner.is_alive(),
1462 "expected lazy list state to be released after disposal"
1463 );
1464
1465 assert_eq!(released.first_visible_item_index(), 0);
1466 assert_eq!(released.first_visible_item_scroll_offset(), 0.0);
1467 assert_eq!(released.nearest_range(), 0..0);
1468 assert_eq!(
1469 released.update_scroll_position_if_item_moved(10, |_| Some(0)),
1470 0
1471 );
1472 released.update_scroll_position(3, 12.0);
1473 released.update_scroll_position_with_key(3, 12.0, 42);
1474 released.update_scroll_bounds();
1475 }
1476
1477 #[test]
1478 fn dispatch_scroll_delta_clears_stale_pending_at_forward_edge() {
1479 with_test_runtime(|| {
1480 let state = new_lazy_list_state();
1481 mark_scroll_bounds_known(&state);
1482 enable_bidirectional_scroll(&state);
1483 state.dispatch_scroll_delta(-300.0);
1484 assert!((state.peek_scroll_delta() + 300.0).abs() < 0.001);
1485
1486 state.can_scroll_forward_state.set(false);
1487
1488 let blocked_consumed = state.dispatch_scroll_delta(-10.0);
1489 assert_eq!(blocked_consumed, 0.0);
1490 assert_eq!(state.peek_scroll_delta(), 0.0);
1491
1492 let reverse_consumed = state.dispatch_scroll_delta(12.0);
1493 assert_eq!(reverse_consumed, 12.0);
1494 assert!((state.peek_scroll_delta() - 12.0).abs() < 0.001);
1495 });
1496 }
1497
1498 #[test]
1499 fn negative_scroll_delta_prefetches_forward_items() {
1500 with_test_runtime(|| {
1501 let state = new_lazy_list_state();
1502 state.dispatch_scroll_delta(-24.0);
1503 state.record_scroll_direction(state.peek_scroll_delta());
1504 state.update_prefetch_queue(10, 15, 100);
1505
1506 assert_eq!(state.take_prefetch_indices(), vec![16, 17]);
1507 });
1508 }
1509}