use std::cell::RefCell;
use std::rc::Rc;
use std::sync::OnceLock;
use cranpose_core::{MutableState, NodeId};
use cranpose_macros::composable;
use super::nearest_range::NearestRangeState;
use super::prefetch::{PrefetchScheduler, PrefetchStrategy};
static LAZY_MEASURE_TELEMETRY_ENABLED: OnceLock<bool> = OnceLock::new();
fn lazy_measure_telemetry_enabled() -> bool {
*LAZY_MEASURE_TELEMETRY_ENABLED
.get_or_init(|| std::env::var_os("CRANPOSE_LAZY_MEASURE_TELEMETRY").is_some())
}
const MAX_PENDING_SCROLL_DELTA: f32 = 2000.0;
#[derive(Clone, Debug, Default, PartialEq)]
pub struct LazyLayoutStats {
pub items_in_use: usize,
pub items_in_pool: usize,
pub total_composed: usize,
pub reuse_count: usize,
}
#[derive(Clone, Copy)]
pub struct LazyListScrollPosition {
index: MutableState<usize>,
scroll_offset: MutableState<f32>,
inner: MutableState<Rc<RefCell<ScrollPositionInner>>>,
}
struct ScrollPositionInner {
last_known_first_item_key: Option<u64>,
nearest_range_state: NearestRangeState,
}
impl LazyListScrollPosition {
fn is_alive(&self) -> bool {
self.index.is_alive() && self.scroll_offset.is_alive() && self.inner.is_alive()
}
fn current_index(&self) -> usize {
self.index.try_value().unwrap_or(0)
}
fn current_scroll_offset(&self) -> f32 {
self.scroll_offset.try_value().unwrap_or(0.0)
}
pub fn index(&self) -> usize {
if !self.index.is_alive() {
return 0;
}
self.index.get()
}
pub fn scroll_offset(&self) -> f32 {
if !self.scroll_offset.is_alive() {
return 0.0;
}
self.scroll_offset.get()
}
pub(crate) fn update_from_measure_result(
&self,
first_visible_index: usize,
first_visible_scroll_offset: f32,
first_visible_item_key: Option<u64>,
) {
if !self.is_alive() {
return;
}
self.inner.with(|rc| {
let mut inner = rc.borrow_mut();
inner.last_known_first_item_key = first_visible_item_key;
inner.nearest_range_state.update(first_visible_index);
});
let old_index = self.index.get();
if old_index != first_visible_index {
self.index.set(first_visible_index);
}
let old_offset = self.scroll_offset.get();
if (old_offset - first_visible_scroll_offset).abs() > 0.001 {
self.scroll_offset.set(first_visible_scroll_offset);
}
}
pub(crate) fn request_position_and_forget_last_known_key(
&self,
index: usize,
scroll_offset: f32,
) {
if !self.is_alive() {
return;
}
if self.index.get() != index {
self.index.set(index);
}
if (self.scroll_offset.get() - scroll_offset).abs() > 0.001 {
self.scroll_offset.set(scroll_offset);
}
self.inner.with(|rc| {
let mut inner = rc.borrow_mut();
inner.last_known_first_item_key = None;
inner.nearest_range_state.update(index);
});
}
pub(crate) fn update_if_first_item_moved<F>(
&self,
new_item_count: usize,
find_by_key: F,
) -> usize
where
F: Fn(u64) -> Option<usize>,
{
if !self.index.is_alive() || !self.inner.is_alive() {
return 0;
}
let current_index = self.index.get_non_reactive();
let last_key = self
.inner
.try_with(|rc| rc.borrow().last_known_first_item_key)
.flatten();
let new_index = match last_key {
None => current_index.min(new_item_count.saturating_sub(1)),
Some(key) => find_by_key(key)
.unwrap_or_else(|| current_index.min(new_item_count.saturating_sub(1))),
};
if current_index != new_index {
self.index.set(new_index);
self.inner.with(|rc| {
rc.borrow_mut().nearest_range_state.update(new_index);
});
}
new_index
}
pub fn nearest_range(&self) -> std::ops::Range<usize> {
self.inner
.try_with(|rc| rc.borrow().nearest_range_state.range())
.unwrap_or(0..0)
}
}
#[derive(Clone, Copy)]
pub struct LazyListState {
scroll_position: LazyListScrollPosition,
can_scroll_forward_state: MutableState<bool>,
can_scroll_backward_state: MutableState<bool>,
stats_state: MutableState<LazyLayoutStats>,
inner: MutableState<Rc<RefCell<LazyListStateInner>>>,
}
impl PartialEq for LazyListState {
fn eq(&self, other: &Self) -> bool {
self.inner == other.inner
}
}
struct LazyListStateInner {
scroll_to_be_consumed: f32,
pending_scroll_to_index: Option<(usize, f32)>,
layout_info: LazyListLayoutInfo,
invalidate_callbacks: Vec<(u64, Rc<dyn Fn()>)>,
next_callback_id: u64,
layout_invalidation_callback_id: Option<u64>,
layout_invalidation_node_id: Option<NodeId>,
total_composed: usize,
reuse_count: usize,
item_size_cache: std::collections::HashMap<usize, f32>,
item_size_lru: std::collections::VecDeque<usize>,
average_item_size: f32,
total_measured_items: usize,
prefetch_scheduler: PrefetchScheduler,
prefetch_strategy: PrefetchStrategy,
last_scroll_direction: f32,
}
#[composable]
pub fn remember_lazy_list_state() -> LazyListState {
remember_lazy_list_state_with_position(0, 0.0)
}
#[composable]
pub fn remember_lazy_list_state_with_position(
initial_first_visible_item_index: usize,
initial_first_visible_item_scroll_offset: f32,
) -> LazyListState {
let scroll_position = LazyListScrollPosition {
index: cranpose_core::useState(|| initial_first_visible_item_index),
scroll_offset: cranpose_core::useState(|| initial_first_visible_item_scroll_offset),
inner: cranpose_core::useState(|| {
Rc::new(RefCell::new(ScrollPositionInner {
last_known_first_item_key: None,
nearest_range_state: NearestRangeState::new(initial_first_visible_item_index),
}))
}),
};
let inner = cranpose_core::useState(|| {
Rc::new(RefCell::new(LazyListStateInner {
scroll_to_be_consumed: 0.0,
pending_scroll_to_index: None,
layout_info: LazyListLayoutInfo::default(),
invalidate_callbacks: Vec::new(),
next_callback_id: 1,
layout_invalidation_callback_id: None,
layout_invalidation_node_id: None,
total_composed: 0,
reuse_count: 0,
item_size_cache: std::collections::HashMap::new(),
item_size_lru: std::collections::VecDeque::new(),
average_item_size: super::DEFAULT_ITEM_SIZE_ESTIMATE,
total_measured_items: 0,
prefetch_scheduler: PrefetchScheduler::new(),
prefetch_strategy: PrefetchStrategy::default(),
last_scroll_direction: 0.0,
}))
});
let can_scroll_forward_state = cranpose_core::useState(|| false);
let can_scroll_backward_state = cranpose_core::useState(|| false);
let stats_state = cranpose_core::useState(LazyLayoutStats::default);
LazyListState {
scroll_position,
can_scroll_forward_state,
can_scroll_backward_state,
stats_state,
inner,
}
}
impl LazyListState {
pub fn inner_ptr(&self) -> *const () {
self.inner
.try_with(|rc| Rc::as_ptr(rc) as *const ())
.unwrap_or(std::ptr::null())
}
pub fn first_visible_item_index(&self) -> usize {
self.scroll_position.index()
}
pub fn first_visible_item_scroll_offset(&self) -> f32 {
self.scroll_position.scroll_offset()
}
pub fn is_scrolled_non_reactive(&self) -> bool {
self.scroll_position.current_index() > 0
|| self.scroll_position.current_scroll_offset().abs() > 0.001
|| self
.inner
.try_with(|rc| {
let inner = rc.borrow();
inner.scroll_to_be_consumed.abs() > 0.001
|| inner
.pending_scroll_to_index
.is_some_and(|(index, offset)| index > 0 || offset.abs() > 0.001)
})
.unwrap_or(false)
}
pub fn layout_info(&self) -> LazyListLayoutInfo {
self.inner
.try_with(|rc| rc.borrow().layout_info.clone())
.unwrap_or_default()
}
pub fn stats(&self) -> LazyLayoutStats {
if !self.stats_state.is_alive() || !self.inner.is_alive() {
return LazyLayoutStats::default();
}
let reactive = self.stats_state.get();
let (total_composed, reuse_count) = self.inner.with(|rc| {
let inner = rc.borrow();
(inner.total_composed, inner.reuse_count)
});
LazyLayoutStats {
items_in_use: reactive.items_in_use,
items_in_pool: reactive.items_in_pool,
total_composed,
reuse_count,
}
}
pub fn update_stats(&self, items_in_use: usize, items_in_pool: usize) {
if !self.stats_state.is_alive() || !self.inner.is_alive() {
return;
}
let current = self.stats_state.get_non_reactive();
let should_update_reactive = if items_in_use > current.items_in_use {
true
} else if items_in_use < current.items_in_use {
current.items_in_use - items_in_use > 1
} else {
false
};
if should_update_reactive {
self.stats_state.set(LazyLayoutStats {
items_in_use,
items_in_pool,
..current
});
}
}
pub fn record_composition(&self, was_reused: bool) {
if !self.inner.is_alive() {
return;
}
self.inner.with(|rc| {
let mut inner = rc.borrow_mut();
inner.total_composed += 1;
if was_reused {
inner.reuse_count += 1;
}
});
}
pub fn record_scroll_direction(&self, delta: f32) {
if delta.abs() > 0.001 {
if !self.inner.is_alive() {
return;
}
self.inner.with(|rc| {
rc.borrow_mut().last_scroll_direction = -delta.signum();
});
}
}
pub fn update_prefetch_queue(
&self,
first_visible_index: usize,
last_visible_index: usize,
total_items: usize,
) {
if !self.inner.is_alive() {
return;
}
self.inner.with(|rc| {
let mut inner = rc.borrow_mut();
let direction = inner.last_scroll_direction;
let strategy = inner.prefetch_strategy.clone();
inner.prefetch_scheduler.update(
first_visible_index,
last_visible_index,
total_items,
direction,
&strategy,
);
});
}
pub fn take_prefetch_indices(&self) -> Vec<usize> {
self.inner
.try_with(|rc| {
let mut inner = rc.borrow_mut();
let mut indices = Vec::new();
while let Some(idx) = inner.prefetch_scheduler.next_prefetch() {
indices.push(idx);
}
indices
})
.unwrap_or_default()
}
pub fn scroll_to_item(&self, index: usize, scroll_offset: f32) {
if !self.inner.is_alive() {
return;
}
if lazy_measure_telemetry_enabled() {
log::warn!(
"[lazy-measure-telemetry] scroll_to_item request index={} offset={:.2}",
index,
scroll_offset
);
}
self.inner.with(|rc| {
rc.borrow_mut().pending_scroll_to_index = Some((index, scroll_offset));
});
self.scroll_position
.request_position_and_forget_last_known_key(index, scroll_offset);
self.invalidate();
}
pub fn dispatch_scroll_delta(&self, delta: f32) -> f32 {
if !self.inner.is_alive() {
return 0.0;
}
let has_scroll_bounds = self
.inner
.with(|rc| rc.borrow().layout_info.total_items_count > 0);
let pushing_forward = delta < -0.001;
let pushing_backward = delta > 0.001;
let blocked_by_bounds = has_scroll_bounds
&& ((pushing_forward && !self.can_scroll_forward())
|| (pushing_backward && !self.can_scroll_backward()));
if blocked_by_bounds {
let should_invalidate = self.inner.with(|rc| {
let mut inner = rc.borrow_mut();
let pending_before = inner.scroll_to_be_consumed;
if pending_before.abs() > 0.001 && pending_before.signum() == delta.signum() {
inner.scroll_to_be_consumed = 0.0;
}
if lazy_measure_telemetry_enabled() {
log::warn!(
"[lazy-measure-telemetry] dispatch_scroll_delta blocked_by_bounds delta={:.2} pending_before={:.2} pending_after={:.2}",
delta,
pending_before,
inner.scroll_to_be_consumed
);
}
(inner.scroll_to_be_consumed - pending_before).abs() > 0.001
});
if should_invalidate {
self.invalidate();
}
return 0.0;
}
let should_invalidate = self.inner.with(|rc| {
let mut inner = rc.borrow_mut();
let pending_before = inner.scroll_to_be_consumed;
let pending = inner.scroll_to_be_consumed;
let reverse_input = pending.abs() > 0.001
&& delta.abs() > 0.001
&& pending.signum() != delta.signum();
if reverse_input {
if lazy_measure_telemetry_enabled() {
log::warn!(
"[lazy-measure-telemetry] dispatch_scroll_delta direction_change pending={:.2} new_delta={:.2}",
pending,
delta
);
}
inner.scroll_to_be_consumed = delta;
} else {
inner.scroll_to_be_consumed += delta;
}
inner.scroll_to_be_consumed = inner
.scroll_to_be_consumed
.clamp(-MAX_PENDING_SCROLL_DELTA, MAX_PENDING_SCROLL_DELTA);
if lazy_measure_telemetry_enabled() {
log::warn!(
"[lazy-measure-telemetry] dispatch_scroll_delta delta={:.2} pending={:.2}",
delta,
inner.scroll_to_be_consumed
);
}
(inner.scroll_to_be_consumed - pending_before).abs() > 0.001
});
if should_invalidate {
self.invalidate();
}
delta }
pub(crate) fn consume_scroll_delta(&self) -> f32 {
self.inner
.try_with(|rc| {
let mut inner = rc.borrow_mut();
let delta = inner.scroll_to_be_consumed;
inner.scroll_to_be_consumed = 0.0;
delta
})
.unwrap_or(0.0)
}
pub fn peek_scroll_delta(&self) -> f32 {
self.inner
.try_with(|rc| rc.borrow().scroll_to_be_consumed)
.unwrap_or(0.0)
}
pub(crate) fn consume_scroll_to_index(&self) -> Option<(usize, f32)> {
self.inner
.try_with(|rc| rc.borrow_mut().pending_scroll_to_index.take())
.flatten()
}
pub fn cache_item_size(&self, index: usize, size: f32) {
use std::collections::hash_map::Entry;
if !self.inner.is_alive() {
return;
}
self.inner.with(|rc| {
let mut inner = rc.borrow_mut();
const MAX_CACHE_SIZE: usize = 100;
if let Entry::Occupied(mut entry) = inner.item_size_cache.entry(index) {
entry.insert(size);
if let Some(pos) = inner.item_size_lru.iter().position(|&k| k == index) {
inner.item_size_lru.remove(pos);
}
inner.item_size_lru.push_back(index);
return;
}
while inner.item_size_cache.len() >= MAX_CACHE_SIZE {
if let Some(oldest) = inner.item_size_lru.pop_front() {
if inner.item_size_cache.remove(&oldest).is_some() {
break; }
} else {
break; }
}
inner.item_size_cache.insert(index, size);
inner.item_size_lru.push_back(index);
inner.total_measured_items += 1;
let n = inner.total_measured_items as f32;
inner.average_item_size = inner.average_item_size * ((n - 1.0) / n) + size / n;
});
}
pub fn get_cached_size(&self, index: usize) -> Option<f32> {
self.inner
.try_with(|rc| rc.borrow().item_size_cache.get(&index).copied())
.flatten()
}
pub fn average_item_size(&self) -> f32 {
self.inner
.try_with(|rc| rc.borrow().average_item_size)
.unwrap_or(super::DEFAULT_ITEM_SIZE_ESTIMATE)
}
pub fn nearest_range(&self) -> std::ops::Range<usize> {
self.scroll_position.nearest_range()
}
pub(crate) fn update_scroll_position(
&self,
first_visible_item_index: usize,
first_visible_item_scroll_offset: f32,
) {
self.scroll_position.update_from_measure_result(
first_visible_item_index,
first_visible_item_scroll_offset,
None,
);
}
pub(crate) fn update_scroll_position_with_key(
&self,
first_visible_item_index: usize,
first_visible_item_scroll_offset: f32,
first_visible_item_key: u64,
) {
self.scroll_position.update_from_measure_result(
first_visible_item_index,
first_visible_item_scroll_offset,
Some(first_visible_item_key),
);
}
pub fn update_scroll_position_if_item_moved<F>(
&self,
new_item_count: usize,
get_index_by_key: F,
) -> usize
where
F: Fn(u64) -> Option<usize>,
{
self.scroll_position
.update_if_first_item_moved(new_item_count, get_index_by_key)
}
pub(crate) fn update_layout_info(&self, info: LazyListLayoutInfo) {
if !self.inner.is_alive() {
return;
}
self.inner.with(|rc| rc.borrow_mut().layout_info = info);
}
pub fn can_scroll_forward(&self) -> bool {
if !self.can_scroll_forward_state.is_alive() {
return false;
}
self.can_scroll_forward_state.get()
}
pub fn can_scroll_backward(&self) -> bool {
if !self.can_scroll_backward_state.is_alive() {
return false;
}
self.can_scroll_backward_state.get()
}
pub(crate) fn update_scroll_bounds(&self) {
if !self.inner.is_alive()
|| !self.can_scroll_forward_state.is_alive()
|| !self.can_scroll_backward_state.is_alive()
{
return;
}
let can_forward = self.inner.with(|rc| {
let inner = rc.borrow();
let info = &inner.layout_info;
let viewport_end = info.viewport_size - info.after_content_padding;
if let Some(last_visible) = info.visible_items_info.last() {
last_visible.index < info.total_items_count.saturating_sub(1)
|| (last_visible.offset + last_visible.size) > viewport_end
} else {
false
}
});
let can_backward = self.scroll_position.current_index() > 0
|| self.scroll_position.current_scroll_offset() > 0.0;
if self.can_scroll_forward_state.get_non_reactive() != can_forward {
self.can_scroll_forward_state.set(can_forward);
}
if self.can_scroll_backward_state.get_non_reactive() != can_backward {
self.can_scroll_backward_state.set(can_backward);
}
}
pub fn add_invalidate_callback(&self, callback: Rc<dyn Fn()>) -> u64 {
if !self.inner.is_alive() {
return 0;
}
self.inner.with(|rc| {
let mut inner = rc.borrow_mut();
let id = inner.next_callback_id;
inner.next_callback_id += 1;
inner.invalidate_callbacks.push((id, callback));
id
})
}
pub fn try_register_layout_callback(
&self,
node_id: NodeId,
callback: Rc<dyn Fn()>,
) -> Option<u64> {
if !self.inner.is_alive() {
return None;
}
self.inner.with(|rc| {
let mut inner = rc.borrow_mut();
if let Some(existing_id) = inner.layout_invalidation_callback_id {
inner
.invalidate_callbacks
.retain(|(cb_id, _)| *cb_id != existing_id);
}
let id = inner.next_callback_id;
inner.next_callback_id += 1;
inner.invalidate_callbacks.push((id, callback));
inner.layout_invalidation_callback_id = Some(id);
inner.layout_invalidation_node_id = Some(node_id);
Some(id)
})
}
pub fn remove_invalidate_callback(&self, id: u64) {
if !self.inner.is_alive() {
return;
}
self.inner.with(|rc| {
let mut inner = rc.borrow_mut();
inner.invalidate_callbacks.retain(|(cb_id, _)| *cb_id != id);
if inner.layout_invalidation_callback_id == Some(id) {
inner.layout_invalidation_callback_id = None;
inner.layout_invalidation_node_id = None;
}
});
}
fn invalidate(&self) {
if !self.inner.is_alive() {
return;
}
let callbacks: Vec<_> = self.inner.with(|rc| {
rc.borrow()
.invalidate_callbacks
.iter()
.map(|(_, cb)| Rc::clone(cb))
.collect()
});
for callback in callbacks {
callback();
}
}
}
#[derive(Clone, Default, Debug)]
pub struct LazyListLayoutInfo {
pub visible_items_info: Vec<LazyListItemInfo>,
pub total_items_count: usize,
pub raw_viewport_size: f32,
pub is_infinite_viewport: bool,
pub viewport_size: f32,
pub viewport_start_offset: f32,
pub viewport_end_offset: f32,
pub before_content_padding: f32,
pub after_content_padding: f32,
}
#[derive(Clone, Debug)]
pub struct LazyListItemInfo {
pub index: usize,
pub key: u64,
pub offset: f32,
pub size: f32,
}
#[cfg(test)]
pub mod test_helpers {
use super::*;
use cranpose_core::{DefaultScheduler, Runtime};
use std::sync::Arc;
pub fn with_test_runtime<T>(f: impl FnOnce() -> T) -> T {
let _runtime = Runtime::new(Arc::new(DefaultScheduler));
f()
}
pub fn new_lazy_list_state() -> LazyListState {
new_lazy_list_state_with_position(0, 0.0)
}
pub fn new_lazy_list_state_with_position(
initial_first_visible_item_index: usize,
initial_first_visible_item_scroll_offset: f32,
) -> LazyListState {
let scroll_position = LazyListScrollPosition {
index: cranpose_core::mutableStateOf(initial_first_visible_item_index),
scroll_offset: cranpose_core::mutableStateOf(initial_first_visible_item_scroll_offset),
inner: cranpose_core::mutableStateOf(Rc::new(RefCell::new(ScrollPositionInner {
last_known_first_item_key: None,
nearest_range_state: NearestRangeState::new(initial_first_visible_item_index),
}))),
};
let inner = cranpose_core::mutableStateOf(Rc::new(RefCell::new(LazyListStateInner {
scroll_to_be_consumed: 0.0,
pending_scroll_to_index: None,
layout_info: LazyListLayoutInfo::default(),
invalidate_callbacks: Vec::new(),
next_callback_id: 1,
layout_invalidation_callback_id: None,
layout_invalidation_node_id: None,
total_composed: 0,
reuse_count: 0,
item_size_cache: std::collections::HashMap::new(),
item_size_lru: std::collections::VecDeque::new(),
average_item_size: super::super::DEFAULT_ITEM_SIZE_ESTIMATE,
total_measured_items: 0,
prefetch_scheduler: PrefetchScheduler::new(),
prefetch_strategy: PrefetchStrategy::default(),
last_scroll_direction: 0.0,
})));
let can_scroll_forward_state = cranpose_core::mutableStateOf(false);
let can_scroll_backward_state = cranpose_core::mutableStateOf(false);
let stats_state = cranpose_core::mutableStateOf(LazyLayoutStats::default());
LazyListState {
scroll_position,
can_scroll_forward_state,
can_scroll_backward_state,
stats_state,
inner,
}
}
}
#[cfg(test)]
mod tests {
use super::test_helpers::{new_lazy_list_state, with_test_runtime};
use super::{LazyListLayoutInfo, LazyListState};
use cranpose_core::{location_key, Composition, MemoryApplier};
use std::cell::Cell;
use std::rc::Rc;
fn enable_bidirectional_scroll(state: &LazyListState) {
state.can_scroll_forward_state.set(true);
state.can_scroll_backward_state.set(true);
}
fn mark_scroll_bounds_known(state: &LazyListState) {
state.update_layout_info(LazyListLayoutInfo {
total_items_count: 10,
..Default::default()
});
}
#[test]
fn dispatch_scroll_delta_accumulates_same_direction() {
with_test_runtime(|| {
let state = new_lazy_list_state();
enable_bidirectional_scroll(&state);
state.dispatch_scroll_delta(-12.0);
state.dispatch_scroll_delta(-8.0);
assert!((state.peek_scroll_delta() + 20.0).abs() < 0.001);
assert!((state.consume_scroll_delta() + 20.0).abs() < 0.001);
assert_eq!(state.consume_scroll_delta(), 0.0);
});
}
#[test]
fn dispatch_scroll_delta_drops_stale_backlog_on_direction_change() {
with_test_runtime(|| {
let state = new_lazy_list_state();
enable_bidirectional_scroll(&state);
state.dispatch_scroll_delta(-120.0);
state.dispatch_scroll_delta(-30.0);
assert!((state.peek_scroll_delta() + 150.0).abs() < 0.001);
state.dispatch_scroll_delta(18.0);
assert!((state.peek_scroll_delta() - 18.0).abs() < 0.001);
assert!((state.consume_scroll_delta() - 18.0).abs() < 0.001);
assert_eq!(state.consume_scroll_delta(), 0.0);
});
}
#[test]
fn dispatch_scroll_delta_clamps_pending_backlog() {
with_test_runtime(|| {
let state = new_lazy_list_state();
enable_bidirectional_scroll(&state);
state.dispatch_scroll_delta(-1_500.0);
state.dispatch_scroll_delta(-1_500.0);
assert!((state.peek_scroll_delta() + super::MAX_PENDING_SCROLL_DELTA).abs() < 0.001);
state.dispatch_scroll_delta(3_000.0);
assert!((state.peek_scroll_delta() - super::MAX_PENDING_SCROLL_DELTA).abs() < 0.001);
});
}
#[test]
fn dispatch_scroll_delta_skips_invalidate_when_clamped_value_is_unchanged() {
with_test_runtime(|| {
let state = new_lazy_list_state();
enable_bidirectional_scroll(&state);
let invalidations = Rc::new(Cell::new(0u32));
let invalidations_clone = Rc::clone(&invalidations);
state.add_invalidate_callback(Rc::new(move || {
invalidations_clone.set(invalidations_clone.get() + 1);
}));
state.dispatch_scroll_delta(-3_000.0);
assert_eq!(invalidations.get(), 1);
assert!((state.peek_scroll_delta() + super::MAX_PENDING_SCROLL_DELTA).abs() < 0.001);
state.dispatch_scroll_delta(-100.0);
assert_eq!(invalidations.get(), 1);
state.dispatch_scroll_delta(100.0);
assert_eq!(invalidations.get(), 2);
});
}
#[test]
fn layout_callback_can_be_registered_again_after_removal() {
with_test_runtime(|| {
let state = new_lazy_list_state();
let first_node: cranpose_core::NodeId = 1;
let second_node: cranpose_core::NodeId = 2;
let first_id = state
.try_register_layout_callback(first_node, Rc::new(|| {}))
.expect("first layout callback should register");
let duplicate_id = state
.try_register_layout_callback(first_node, Rc::new(|| {}))
.expect("duplicate register should replace with a fresh callback id");
assert_eq!(
state
.inner
.with(|rc| rc.borrow().layout_invalidation_callback_id),
Some(duplicate_id),
"duplicate registration should become the active callback",
);
assert_ne!(
first_id, duplicate_id,
"duplicate registration should replace the old callback id",
);
state.remove_invalidate_callback(first_id);
let second_id = state
.try_register_layout_callback(second_node, Rc::new(|| {}))
.expect("layout callback should register again after removal");
assert_ne!(first_id, second_id);
});
}
#[test]
fn layout_callback_rebinds_when_node_id_changes() {
with_test_runtime(|| {
let state = new_lazy_list_state();
let first_node: cranpose_core::NodeId = 11;
let second_node: cranpose_core::NodeId = 22;
let first_id = state
.try_register_layout_callback(first_node, Rc::new(|| {}))
.expect("first layout callback should register");
let second_id = state
.try_register_layout_callback(second_node, Rc::new(|| {}))
.expect("layout callback should rebind to a new node");
assert_ne!(first_id, second_id);
});
}
#[test]
fn stale_layout_callback_disposer_cannot_remove_replaced_same_node_callback() {
with_test_runtime(|| {
let state = new_lazy_list_state();
let node_id: cranpose_core::NodeId = 7;
let first_hits = Rc::new(Cell::new(0u32));
let second_hits = Rc::new(Cell::new(0u32));
let first_id = state
.try_register_layout_callback(
node_id,
Rc::new({
let first_hits = Rc::clone(&first_hits);
move || first_hits.set(first_hits.get() + 1)
}),
)
.expect("first layout callback should register");
let second_id = state
.try_register_layout_callback(
node_id,
Rc::new({
let second_hits = Rc::clone(&second_hits);
move || second_hits.set(second_hits.get() + 1)
}),
)
.expect("same-node registration should replace the active callback");
assert_ne!(first_id, second_id);
state.remove_invalidate_callback(first_id);
state.dispatch_scroll_delta(-12.0);
assert_eq!(
first_hits.get(),
0,
"replaced callback should not be invoked after removal",
);
assert_eq!(
second_hits.get(),
1,
"active callback should survive stale disposer cleanup",
);
});
}
#[test]
fn dispatch_scroll_delta_returns_zero_when_forward_is_blocked() {
with_test_runtime(|| {
let state = new_lazy_list_state();
mark_scroll_bounds_known(&state);
state.can_scroll_forward_state.set(false);
state.can_scroll_backward_state.set(true);
let consumed = state.dispatch_scroll_delta(-24.0);
assert_eq!(consumed, 0.0);
assert_eq!(state.peek_scroll_delta(), 0.0);
});
}
#[test]
fn equality_does_not_deref_released_inner_state() {
let mut composition = Composition::new(MemoryApplier::new());
let key = location_key(file!(), line!(), column!());
let mut first = None;
composition
.render(key, || {
first = Some(super::remember_lazy_list_state());
})
.expect("initial render");
let first = first.expect("first lazy state");
composition
.render(key, || {})
.expect("dispose first lazy state");
assert!(
!first.inner.is_alive(),
"expected first lazy state to be released after disposal"
);
let mut second = None;
composition
.render(key, || {
second = Some(super::remember_lazy_list_state());
})
.expect("second render");
let second = second.expect("second lazy state");
assert!(
first != second,
"released lazy state handle must compare by identity without panicking"
);
}
#[test]
fn released_lazy_list_state_scroll_position_methods_do_not_panic() {
let mut composition = Composition::new(MemoryApplier::new());
let key = location_key(file!(), line!(), column!());
let mut released = None;
composition
.render(key, || {
released = Some(super::remember_lazy_list_state());
})
.expect("initial render");
let released = released.expect("lazy list state");
composition
.render(key, || {})
.expect("dispose lazy list state");
assert!(
!released.inner.is_alive(),
"expected lazy list state to be released after disposal"
);
assert_eq!(released.first_visible_item_index(), 0);
assert_eq!(released.first_visible_item_scroll_offset(), 0.0);
assert_eq!(released.nearest_range(), 0..0);
assert_eq!(
released.update_scroll_position_if_item_moved(10, |_| Some(0)),
0
);
released.update_scroll_position(3, 12.0);
released.update_scroll_position_with_key(3, 12.0, 42);
released.update_scroll_bounds();
}
#[test]
fn dispatch_scroll_delta_clears_stale_pending_at_forward_edge() {
with_test_runtime(|| {
let state = new_lazy_list_state();
mark_scroll_bounds_known(&state);
enable_bidirectional_scroll(&state);
state.dispatch_scroll_delta(-300.0);
assert!((state.peek_scroll_delta() + 300.0).abs() < 0.001);
state.can_scroll_forward_state.set(false);
let blocked_consumed = state.dispatch_scroll_delta(-10.0);
assert_eq!(blocked_consumed, 0.0);
assert_eq!(state.peek_scroll_delta(), 0.0);
let reverse_consumed = state.dispatch_scroll_delta(12.0);
assert_eq!(reverse_consumed, 12.0);
assert!((state.peek_scroll_delta() - 12.0).abs() < 0.001);
});
}
#[test]
fn negative_scroll_delta_prefetches_forward_items() {
with_test_runtime(|| {
let state = new_lazy_list_state();
state.dispatch_scroll_delta(-24.0);
state.record_scroll_direction(state.peek_scroll_delta());
state.update_prefetch_queue(10, 15, 100);
assert_eq!(state.take_prefetch_indices(), vec![16, 17]);
});
}
}