pub struct IndexEmitter<'a> { /* private fields */ }Expand description
Helper to build correct range_extractor implementations without allocations.
It enforces the extractor contract:
- Out-of-bounds indexes are ignored (and debug-asserted).
- Duplicates are ignored.
- Out-of-order indexes are ignored (and debug-asserted).
§Example
use virtualizer::{IndexEmitter, Range};
let extractor = |range: Range, emit: &mut dyn FnMut(usize)| {
let mut e = IndexEmitter::new(range, emit);
e.emit_pinned(0);
e.emit_overscanned();
};Implementations§
Source§impl<'a> IndexEmitter<'a>
impl<'a> IndexEmitter<'a>
Sourcepub fn new(range: Range, emit: &'a mut dyn FnMut(usize)) -> Self
pub fn new(range: Range, emit: &'a mut dyn FnMut(usize)) -> Self
Examples found in repository?
examples/pinned_headers.rs (line 15)
6fn main() {
7 // Example: pinned/sticky "headers" at fixed indexes.
8 let mut opts = VirtualizerOptions::new(1_000, |_| 1);
9 opts.overscan = 2;
10
11 let pinned: Arc<[usize]> = Arc::from([0usize, 10, 20, 30, 40, 999]);
12 opts.range_extractor = Some(Arc::new({
13 let pinned = Arc::clone(&pinned);
14 move |r: Range, emit: &mut dyn FnMut(usize)| {
15 let mut e = IndexEmitter::new(r, emit);
16 // IMPORTANT: indexes must be emitted in ascending order.
17 //
18 // We want pinned rows both before and after the overscanned range. To keep the output
19 // sorted, emit:
20 // 1) pinned indexes before the overscanned range
21 // 2) the overscanned contiguous range
22 // 3) pinned indexes after the overscanned range
23 let overscanned_start = r.start_index.saturating_sub(r.overscan);
24 let overscanned_end = r.end_index.saturating_add(r.overscan).min(r.count);
25
26 for &idx in pinned.iter() {
27 if idx < overscanned_start {
28 e.emit_pinned(idx);
29 }
30 }
31
32 e.emit_overscanned();
33
34 for &idx in pinned.iter() {
35 if idx >= overscanned_end {
36 e.emit_pinned(idx);
37 }
38 }
39 }
40 }));
41
42 let mut v = Virtualizer::new(opts);
43 v.set_viewport_and_scroll_clamped(10, 500);
44
45 let mut collected = Vec::new();
46 v.for_each_virtual_index(|i| collected.push(i));
47
48 println!("visible_range={:?}", v.visible_range());
49 println!("virtual_range={:?}", v.virtual_range());
50 println!(
51 "indexes_len={} first_20={:?}",
52 collected.len(),
53 &collected[..20.min(collected.len())]
54 );
55
56 // A real UI would typically iterate items:
57 let mut headers = 0usize;
58 v.for_each_virtual_item(|it| {
59 if pinned.binary_search(&it.index).is_ok() {
60 headers += 1;
61 }
62 });
63 println!("pinned_headers_in_output={headers}");
64}More examples
examples/adapter_sim.rs (line 23)
7fn main() {
8 // Simulate a framework adapter that owns the scroll state.
9 let saved_scroll = Arc::new(AtomicU64::new(120));
10
11 let opts = VirtualizerOptions::new(1000, |_| 1)
12 .with_initial_rect(Some(Rect {
13 main: 10,
14 cross: 80,
15 }))
16 .with_initial_offset_provider({
17 let saved_scroll = Arc::clone(&saved_scroll);
18 move || saved_scroll.load(Ordering::Relaxed)
19 })
20 .with_scroll_margin(5)
21 .with_range_extractor(Some(|r: Range, emit: &mut dyn FnMut(usize)| {
22 // Pin a "sticky header" at index 0, regardless of scroll position.
23 let mut e = virtualizer::IndexEmitter::new(r, emit);
24 e.emit_pinned(0);
25 e.emit_visible();
26 }));
27
28 let mut v = Virtualizer::new(opts);
29
30 // First render: scroll offset comes from the provider.
31 println!("initial scroll_offset={}", v.scroll_offset());
32 println!("initial scroll_rect={:?}", v.scroll_rect());
33
34 // Adapter updates rect + scroll offset on events.
35 v.apply_scroll_frame(
36 Rect {
37 main: 12,
38 cross: 80,
39 },
40 200,
41 0,
42 );
43
44 let mut items = Vec::new();
45 v.for_each_virtual_item_keyed(|it| items.push(it));
46 println!(
47 "is_scrolling={}, visible_range={:?}, items_len={}",
48 v.is_scrolling(),
49 v.visible_range(),
50 items.len()
51 );
52 println!("first_item={:?}", items.first());
53
54 // Demonstrate scroll-to helpers.
55 let target = v.scroll_to_index_offset(500, Align::Start);
56 v.set_scroll_offset_clamped(target);
57 println!("after scroll_to_index: scroll_offset={}", v.scroll_offset());
58
59 // Demonstrate dynamic measurement + scroll adjustment.
60 let applied = v.resize_item(0, 20);
61 println!("resize_item applied_scroll_adjustment={applied}");
62
63 // Simulate reorder: change key mapping. This automatically rebuilds per-index sizes from the
64 // key-based measurement cache. In real apps, you usually keep `get_item_key` stable and call
65 // `sync_item_keys()` when your dataset is reordered while `count` stays the same.
66 v.set_get_item_key(|i| if i == 0 { 1 } else { i as u64 });
67
68 // Debounced scrolling reset without relying on a native scrollend event.
69 v.update_scrolling(200);
70 println!("after update_scrolling: is_scrolling={}", v.is_scrolling());
71
72 // Toggle enabled to disable all queries.
73 v.set_enabled(false);
74 let mut disabled_len = 0usize;
75 v.for_each_virtual_item(|_| disabled_len += 1);
76 println!(
77 "disabled total_size={}, items_len={}",
78 v.total_size(),
79 disabled_len
80 );
81}pub fn range(&self) -> Range
pub fn emit(&mut self, index: usize)
Sourcepub fn emit_pinned(&mut self, index: usize)
pub fn emit_pinned(&mut self, index: usize)
Examples found in repository?
examples/pinned_headers.rs (line 28)
6fn main() {
7 // Example: pinned/sticky "headers" at fixed indexes.
8 let mut opts = VirtualizerOptions::new(1_000, |_| 1);
9 opts.overscan = 2;
10
11 let pinned: Arc<[usize]> = Arc::from([0usize, 10, 20, 30, 40, 999]);
12 opts.range_extractor = Some(Arc::new({
13 let pinned = Arc::clone(&pinned);
14 move |r: Range, emit: &mut dyn FnMut(usize)| {
15 let mut e = IndexEmitter::new(r, emit);
16 // IMPORTANT: indexes must be emitted in ascending order.
17 //
18 // We want pinned rows both before and after the overscanned range. To keep the output
19 // sorted, emit:
20 // 1) pinned indexes before the overscanned range
21 // 2) the overscanned contiguous range
22 // 3) pinned indexes after the overscanned range
23 let overscanned_start = r.start_index.saturating_sub(r.overscan);
24 let overscanned_end = r.end_index.saturating_add(r.overscan).min(r.count);
25
26 for &idx in pinned.iter() {
27 if idx < overscanned_start {
28 e.emit_pinned(idx);
29 }
30 }
31
32 e.emit_overscanned();
33
34 for &idx in pinned.iter() {
35 if idx >= overscanned_end {
36 e.emit_pinned(idx);
37 }
38 }
39 }
40 }));
41
42 let mut v = Virtualizer::new(opts);
43 v.set_viewport_and_scroll_clamped(10, 500);
44
45 let mut collected = Vec::new();
46 v.for_each_virtual_index(|i| collected.push(i));
47
48 println!("visible_range={:?}", v.visible_range());
49 println!("virtual_range={:?}", v.virtual_range());
50 println!(
51 "indexes_len={} first_20={:?}",
52 collected.len(),
53 &collected[..20.min(collected.len())]
54 );
55
56 // A real UI would typically iterate items:
57 let mut headers = 0usize;
58 v.for_each_virtual_item(|it| {
59 if pinned.binary_search(&it.index).is_ok() {
60 headers += 1;
61 }
62 });
63 println!("pinned_headers_in_output={headers}");
64}More examples
examples/adapter_sim.rs (line 24)
7fn main() {
8 // Simulate a framework adapter that owns the scroll state.
9 let saved_scroll = Arc::new(AtomicU64::new(120));
10
11 let opts = VirtualizerOptions::new(1000, |_| 1)
12 .with_initial_rect(Some(Rect {
13 main: 10,
14 cross: 80,
15 }))
16 .with_initial_offset_provider({
17 let saved_scroll = Arc::clone(&saved_scroll);
18 move || saved_scroll.load(Ordering::Relaxed)
19 })
20 .with_scroll_margin(5)
21 .with_range_extractor(Some(|r: Range, emit: &mut dyn FnMut(usize)| {
22 // Pin a "sticky header" at index 0, regardless of scroll position.
23 let mut e = virtualizer::IndexEmitter::new(r, emit);
24 e.emit_pinned(0);
25 e.emit_visible();
26 }));
27
28 let mut v = Virtualizer::new(opts);
29
30 // First render: scroll offset comes from the provider.
31 println!("initial scroll_offset={}", v.scroll_offset());
32 println!("initial scroll_rect={:?}", v.scroll_rect());
33
34 // Adapter updates rect + scroll offset on events.
35 v.apply_scroll_frame(
36 Rect {
37 main: 12,
38 cross: 80,
39 },
40 200,
41 0,
42 );
43
44 let mut items = Vec::new();
45 v.for_each_virtual_item_keyed(|it| items.push(it));
46 println!(
47 "is_scrolling={}, visible_range={:?}, items_len={}",
48 v.is_scrolling(),
49 v.visible_range(),
50 items.len()
51 );
52 println!("first_item={:?}", items.first());
53
54 // Demonstrate scroll-to helpers.
55 let target = v.scroll_to_index_offset(500, Align::Start);
56 v.set_scroll_offset_clamped(target);
57 println!("after scroll_to_index: scroll_offset={}", v.scroll_offset());
58
59 // Demonstrate dynamic measurement + scroll adjustment.
60 let applied = v.resize_item(0, 20);
61 println!("resize_item applied_scroll_adjustment={applied}");
62
63 // Simulate reorder: change key mapping. This automatically rebuilds per-index sizes from the
64 // key-based measurement cache. In real apps, you usually keep `get_item_key` stable and call
65 // `sync_item_keys()` when your dataset is reordered while `count` stays the same.
66 v.set_get_item_key(|i| if i == 0 { 1 } else { i as u64 });
67
68 // Debounced scrolling reset without relying on a native scrollend event.
69 v.update_scrolling(200);
70 println!("after update_scrolling: is_scrolling={}", v.is_scrolling());
71
72 // Toggle enabled to disable all queries.
73 v.set_enabled(false);
74 let mut disabled_len = 0usize;
75 v.for_each_virtual_item(|_| disabled_len += 1);
76 println!(
77 "disabled total_size={}, items_len={}",
78 v.total_size(),
79 disabled_len
80 );
81}pub fn emit_range(&mut self, start_index: usize, end_index: usize)
Sourcepub fn emit_visible(&mut self)
pub fn emit_visible(&mut self)
Examples found in repository?
examples/adapter_sim.rs (line 25)
7fn main() {
8 // Simulate a framework adapter that owns the scroll state.
9 let saved_scroll = Arc::new(AtomicU64::new(120));
10
11 let opts = VirtualizerOptions::new(1000, |_| 1)
12 .with_initial_rect(Some(Rect {
13 main: 10,
14 cross: 80,
15 }))
16 .with_initial_offset_provider({
17 let saved_scroll = Arc::clone(&saved_scroll);
18 move || saved_scroll.load(Ordering::Relaxed)
19 })
20 .with_scroll_margin(5)
21 .with_range_extractor(Some(|r: Range, emit: &mut dyn FnMut(usize)| {
22 // Pin a "sticky header" at index 0, regardless of scroll position.
23 let mut e = virtualizer::IndexEmitter::new(r, emit);
24 e.emit_pinned(0);
25 e.emit_visible();
26 }));
27
28 let mut v = Virtualizer::new(opts);
29
30 // First render: scroll offset comes from the provider.
31 println!("initial scroll_offset={}", v.scroll_offset());
32 println!("initial scroll_rect={:?}", v.scroll_rect());
33
34 // Adapter updates rect + scroll offset on events.
35 v.apply_scroll_frame(
36 Rect {
37 main: 12,
38 cross: 80,
39 },
40 200,
41 0,
42 );
43
44 let mut items = Vec::new();
45 v.for_each_virtual_item_keyed(|it| items.push(it));
46 println!(
47 "is_scrolling={}, visible_range={:?}, items_len={}",
48 v.is_scrolling(),
49 v.visible_range(),
50 items.len()
51 );
52 println!("first_item={:?}", items.first());
53
54 // Demonstrate scroll-to helpers.
55 let target = v.scroll_to_index_offset(500, Align::Start);
56 v.set_scroll_offset_clamped(target);
57 println!("after scroll_to_index: scroll_offset={}", v.scroll_offset());
58
59 // Demonstrate dynamic measurement + scroll adjustment.
60 let applied = v.resize_item(0, 20);
61 println!("resize_item applied_scroll_adjustment={applied}");
62
63 // Simulate reorder: change key mapping. This automatically rebuilds per-index sizes from the
64 // key-based measurement cache. In real apps, you usually keep `get_item_key` stable and call
65 // `sync_item_keys()` when your dataset is reordered while `count` stays the same.
66 v.set_get_item_key(|i| if i == 0 { 1 } else { i as u64 });
67
68 // Debounced scrolling reset without relying on a native scrollend event.
69 v.update_scrolling(200);
70 println!("after update_scrolling: is_scrolling={}", v.is_scrolling());
71
72 // Toggle enabled to disable all queries.
73 v.set_enabled(false);
74 let mut disabled_len = 0usize;
75 v.for_each_virtual_item(|_| disabled_len += 1);
76 println!(
77 "disabled total_size={}, items_len={}",
78 v.total_size(),
79 disabled_len
80 );
81}Sourcepub fn emit_overscanned(&mut self)
pub fn emit_overscanned(&mut self)
Examples found in repository?
examples/pinned_headers.rs (line 32)
6fn main() {
7 // Example: pinned/sticky "headers" at fixed indexes.
8 let mut opts = VirtualizerOptions::new(1_000, |_| 1);
9 opts.overscan = 2;
10
11 let pinned: Arc<[usize]> = Arc::from([0usize, 10, 20, 30, 40, 999]);
12 opts.range_extractor = Some(Arc::new({
13 let pinned = Arc::clone(&pinned);
14 move |r: Range, emit: &mut dyn FnMut(usize)| {
15 let mut e = IndexEmitter::new(r, emit);
16 // IMPORTANT: indexes must be emitted in ascending order.
17 //
18 // We want pinned rows both before and after the overscanned range. To keep the output
19 // sorted, emit:
20 // 1) pinned indexes before the overscanned range
21 // 2) the overscanned contiguous range
22 // 3) pinned indexes after the overscanned range
23 let overscanned_start = r.start_index.saturating_sub(r.overscan);
24 let overscanned_end = r.end_index.saturating_add(r.overscan).min(r.count);
25
26 for &idx in pinned.iter() {
27 if idx < overscanned_start {
28 e.emit_pinned(idx);
29 }
30 }
31
32 e.emit_overscanned();
33
34 for &idx in pinned.iter() {
35 if idx >= overscanned_end {
36 e.emit_pinned(idx);
37 }
38 }
39 }
40 }));
41
42 let mut v = Virtualizer::new(opts);
43 v.set_viewport_and_scroll_clamped(10, 500);
44
45 let mut collected = Vec::new();
46 v.for_each_virtual_index(|i| collected.push(i));
47
48 println!("visible_range={:?}", v.visible_range());
49 println!("virtual_range={:?}", v.virtual_range());
50 println!(
51 "indexes_len={} first_20={:?}",
52 collected.len(),
53 &collected[..20.min(collected.len())]
54 );
55
56 // A real UI would typically iterate items:
57 let mut headers = 0usize;
58 v.for_each_virtual_item(|it| {
59 if pinned.binary_search(&it.index).is_ok() {
60 headers += 1;
61 }
62 });
63 println!("pinned_headers_in_output={headers}");
64}Auto Trait Implementations§
impl<'a> Freeze for IndexEmitter<'a>
impl<'a> !RefUnwindSafe for IndexEmitter<'a>
impl<'a> !Send for IndexEmitter<'a>
impl<'a> !Sync for IndexEmitter<'a>
impl<'a> Unpin for IndexEmitter<'a>
impl<'a> !UnwindSafe for IndexEmitter<'a>
Blanket Implementations§
Source§impl<T> BorrowMut<T> for Twhere
T: ?Sized,
impl<T> BorrowMut<T> for Twhere
T: ?Sized,
Source§fn borrow_mut(&mut self) -> &mut T
fn borrow_mut(&mut self) -> &mut T
Mutably borrows from an owned value. Read more