Skip to main content

cranpose_ui/
render_state.rs

1use cranpose_core::{current_runtime_handle, NodeId, SnapshotStateObserver};
2use std::collections::HashSet;
3use std::sync::atomic::{AtomicBool, AtomicU32, Ordering};
4use std::sync::Mutex;
5#[cfg(not(any(test, feature = "test-helpers")))]
6use std::sync::OnceLock;
7#[cfg(test)]
8use std::sync::OnceLock;
9
10struct RenderState {
11    layout_repasses: Mutex<LayoutRepassManager>,
12    draw_repasses: Mutex<DrawRepassManager>,
13    render_invalidated: AtomicBool,
14    pointer_invalidated: AtomicBool,
15    focus_invalidated: AtomicBool,
16    layout_invalidated: AtomicBool,
17    density_bits: AtomicU32,
18}
19
20#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
21pub(crate) struct DrawObservationScope {
22    node_id: NodeId,
23    command_index: usize,
24}
25
26impl DrawObservationScope {
27    pub(crate) fn new(node_id: NodeId, command_index: usize) -> Self {
28        Self {
29            node_id,
30            command_index,
31        }
32    }
33}
34
35std::thread_local! {
36    static DRAW_OBSERVER: SnapshotStateObserver = {
37        let observer = SnapshotStateObserver::new(|callback| {
38            if let Some(runtime) = current_runtime_handle() {
39                runtime.enqueue_ui_task(callback);
40            } else {
41                callback();
42            }
43        });
44        observer.start();
45        observer
46    };
47}
48
49pub(crate) fn observe_draw_reads<R>(scope: DrawObservationScope, block: impl FnOnce() -> R) -> R {
50    DRAW_OBSERVER.with(|observer| {
51        observer.observe_reads(
52            scope,
53            |scope| {
54                schedule_draw_repass(scope.node_id);
55            },
56            block,
57        )
58    })
59}
60
61pub(crate) fn clear_draw_observations_for_node(node_id: NodeId) {
62    DRAW_OBSERVER.with(|observer| {
63        observer.clear_if(|scope| {
64            scope
65                .downcast_ref::<DrawObservationScope>()
66                .is_some_and(|scope| scope.node_id == node_id)
67        });
68    });
69}
70
71impl RenderState {
72    fn new() -> Self {
73        Self {
74            layout_repasses: Mutex::new(LayoutRepassManager::new()),
75            draw_repasses: Mutex::new(DrawRepassManager::new()),
76            render_invalidated: AtomicBool::new(false),
77            pointer_invalidated: AtomicBool::new(false),
78            focus_invalidated: AtomicBool::new(false),
79            layout_invalidated: AtomicBool::new(false),
80            density_bits: AtomicU32::new(f32::to_bits(1.0)),
81        }
82    }
83}
84
85#[cfg(not(any(test, feature = "test-helpers")))]
86fn with_render_state<R>(f: impl FnOnce(&RenderState) -> R) -> R {
87    static STATE: OnceLock<RenderState> = OnceLock::new();
88    f(STATE.get_or_init(RenderState::new))
89}
90
91#[cfg(any(test, feature = "test-helpers"))]
92fn with_render_state<R>(f: impl FnOnce(&RenderState) -> R) -> R {
93    std::thread_local! {
94        static STATE: RenderState = RenderState::new();
95    }
96    STATE.with(f)
97}
98
99/// Manages scoped layout invalidations for specific nodes.
100///
101/// Similar to PointerDispatchManager, this tracks which specific nodes
102/// need layout invalidation rather than forcing a global invalidation.
103struct LayoutRepassManager {
104    dirty_nodes: HashSet<NodeId>,
105}
106
107impl LayoutRepassManager {
108    fn new() -> Self {
109        Self {
110            dirty_nodes: HashSet::new(),
111        }
112    }
113
114    fn schedule_repass(&mut self, node_id: NodeId) {
115        self.dirty_nodes.insert(node_id);
116    }
117
118    fn has_pending_repass(&self) -> bool {
119        !self.dirty_nodes.is_empty()
120    }
121
122    fn take_dirty_nodes(&mut self) -> Vec<NodeId> {
123        self.dirty_nodes.drain().collect()
124    }
125}
126
127/// Tracks draw-only invalidations so render data can be refreshed without layout.
128struct DrawRepassManager {
129    dirty_nodes: HashSet<NodeId>,
130}
131
132impl DrawRepassManager {
133    fn new() -> Self {
134        Self {
135            dirty_nodes: HashSet::new(),
136        }
137    }
138
139    fn schedule_repass(&mut self, node_id: NodeId) {
140        self.dirty_nodes.insert(node_id);
141    }
142
143    fn has_pending_repass(&self) -> bool {
144        !self.dirty_nodes.is_empty()
145    }
146
147    fn take_dirty_nodes(&mut self) -> Vec<NodeId> {
148        self.dirty_nodes.drain().collect()
149    }
150}
151
152/// Schedules a layout repass for a specific node.
153///
154/// **This is the preferred way to invalidate layout for local changes** (e.g., scroll, single-node mutations).
155///
156/// The app shell will call `take_layout_repass_nodes()` and bubble dirty flags up the tree
157/// via `bubble_layout_dirty`. This gives you **O(subtree) performance** - only the affected
158/// subtree is remeasured, and layout caches for other parts of the app remain valid.
159///
160/// # Implementation Note
161///
162/// This sets the `LAYOUT_INVALIDATED` flag to signal the app shell there's work to do,
163/// but the flag alone does NOT trigger global cache invalidation. The app shell checks
164/// `take_layout_repass_nodes()` first and processes scoped repasses. Global cache invalidation
165/// only happens if the flag is set AND there are no scoped repasses (a rare fallback case).
166///
167/// # For Global Invalidation
168///
169/// For rare global events (window resize, global scale changes), use `request_layout_invalidation()` instead.
170pub fn schedule_layout_repass(node_id: NodeId) {
171    with_render_state(|state| {
172        state
173            .layout_repasses
174            .lock()
175            .expect("layout repass manager poisoned")
176            .schedule_repass(node_id);
177        state.layout_invalidated.store(true, Ordering::Relaxed);
178    });
179    // Set the layout-invalidated flag so the app shell knows to process repasses.
180    // The app shell will check take_layout_repass_nodes() first (scoped path),
181    // and only falls back to global invalidation if the flag is set without any repass nodes.
182    // Also request render invalidation so the frame is actually drawn.
183    // Without this, programmatic scrolls (e.g., scroll_to_item) wouldn't trigger a redraw
184    // until the next user interaction caused a frame request.
185    request_render_invalidation();
186}
187
188/// Schedules a draw-only repass for a specific node.
189///
190/// This ensures draw/pointer data stays in sync when modifier updates do not
191/// require a layout pass (e.g., draw-only modifier changes).
192pub fn schedule_draw_repass(node_id: NodeId) {
193    with_render_state(|state| {
194        state
195            .draw_repasses
196            .lock()
197            .expect("draw repass manager poisoned")
198            .schedule_repass(node_id);
199    });
200    request_render_invalidation();
201}
202
203/// Returns true if any draw repasses are pending.
204pub fn has_pending_draw_repasses() -> bool {
205    with_render_state(|state| {
206        state
207            .draw_repasses
208            .lock()
209            .expect("draw repass manager poisoned")
210            .has_pending_repass()
211    })
212}
213
214/// Takes all pending draw repass node IDs.
215pub fn take_draw_repass_nodes() -> Vec<NodeId> {
216    with_render_state(|state| {
217        state
218            .draw_repasses
219            .lock()
220            .expect("draw repass manager poisoned")
221            .take_dirty_nodes()
222    })
223}
224
225/// Returns true if any layout repasses are pending.
226pub fn has_pending_layout_repasses() -> bool {
227    with_render_state(|state| {
228        state
229            .layout_repasses
230            .lock()
231            .expect("layout repass manager poisoned")
232            .has_pending_repass()
233    })
234}
235
236/// Takes all pending layout repass node IDs.
237///
238/// The caller should iterate over these and call `bubble_layout_dirty` for each.
239pub fn take_layout_repass_nodes() -> Vec<NodeId> {
240    with_render_state(|state| {
241        state
242            .layout_repasses
243            .lock()
244            .expect("layout repass manager poisoned")
245            .take_dirty_nodes()
246    })
247}
248
249/// Returns the current density scale factor (logical px per dp).
250pub fn current_density() -> f32 {
251    with_render_state(|state| f32::from_bits(state.density_bits.load(Ordering::Relaxed)))
252}
253
254/// Updates the current density scale factor.
255///
256/// This triggers a global layout invalidation when the value changes because
257/// density impacts layout, text measurement, and input thresholds.
258pub fn set_density(density: f32) {
259    let normalized = if density.is_finite() && density > 0.0 {
260        density
261    } else {
262        1.0
263    };
264    let new_bits = normalized.to_bits();
265    with_render_state(|state| {
266        let old_bits = state.density_bits.swap(new_bits, Ordering::Relaxed);
267        if old_bits != new_bits {
268            state.layout_invalidated.store(true, Ordering::Relaxed);
269        }
270    });
271}
272
273/// Requests that the renderer rebuild the current scene.
274pub fn request_render_invalidation() {
275    with_render_state(|state| state.render_invalidated.store(true, Ordering::Relaxed));
276}
277
278/// Returns true if a render invalidation was pending and clears the flag.
279pub fn take_render_invalidation() -> bool {
280    with_render_state(|state| state.render_invalidated.swap(false, Ordering::Relaxed))
281}
282
283/// Returns true if a render invalidation is pending without clearing it.
284pub fn peek_render_invalidation() -> bool {
285    with_render_state(|state| state.render_invalidated.load(Ordering::Relaxed))
286}
287
288/// Requests a new pointer-input pass without touching layout or draw dirties.
289pub fn request_pointer_invalidation() {
290    with_render_state(|state| state.pointer_invalidated.store(true, Ordering::Relaxed));
291}
292
293/// Returns true if a pointer invalidation was pending and clears the flag.
294pub fn take_pointer_invalidation() -> bool {
295    with_render_state(|state| state.pointer_invalidated.swap(false, Ordering::Relaxed))
296}
297
298/// Returns true if a pointer invalidation is pending without clearing it.
299pub fn peek_pointer_invalidation() -> bool {
300    with_render_state(|state| state.pointer_invalidated.load(Ordering::Relaxed))
301}
302
303/// Requests a focus recomposition without affecting layout/draw dirties.
304pub fn request_focus_invalidation() {
305    with_render_state(|state| state.focus_invalidated.store(true, Ordering::Relaxed));
306}
307
308/// Returns true if a focus invalidation was pending and clears the flag.
309pub fn take_focus_invalidation() -> bool {
310    with_render_state(|state| state.focus_invalidated.swap(false, Ordering::Relaxed))
311}
312
313/// Returns true if a focus invalidation is pending without clearing it.
314pub fn peek_focus_invalidation() -> bool {
315    with_render_state(|state| state.focus_invalidated.load(Ordering::Relaxed))
316}
317
318/// Requests a **global** layout re-run.
319///
320/// # ⚠️ WARNING: Extremely Expensive - O(entire app size)
321///
322/// This triggers internal cache invalidation that forces **every node** in the app
323/// to re-measure, even if nothing changed. This is a performance footgun!
324///
325/// ## Valid Use Cases (rare!)
326///
327/// Only use this for **true global changes** that affect layout computation everywhere:
328/// - Window/viewport resize
329/// - Global font scale or density changes
330/// - System-wide theme changes that affect layout
331/// - Debug toggles that change layout behavior globally
332///
333/// ## For Local Changes - DO NOT USE THIS
334///
335/// **If you're invalidating layout for scroll, a single widget update, or any local change,
336/// you MUST use the scoped repass mechanism instead:**
337///
338/// ```text
339/// cranpose_ui::schedule_layout_repass(node_id);
340/// ```
341///
342/// Scoped repasses give you O(subtree) performance instead of O(app), and they don't
343/// invalidate caches across the entire app.
344pub fn request_layout_invalidation() {
345    with_render_state(|state| state.layout_invalidated.store(true, Ordering::Relaxed));
346}
347
348/// Returns true if a layout invalidation was pending and clears the flag.
349pub fn take_layout_invalidation() -> bool {
350    with_render_state(|state| state.layout_invalidated.swap(false, Ordering::Relaxed))
351}
352
353/// Returns true if a layout invalidation is pending without clearing it.
354pub fn peek_layout_invalidation() -> bool {
355    with_render_state(|state| state.layout_invalidated.load(Ordering::Relaxed))
356}
357
358#[cfg(any(test, feature = "test-helpers"))]
359#[doc(hidden)]
360pub fn reset_render_state_for_tests() {
361    let _ = take_draw_repass_nodes();
362    let _ = take_layout_repass_nodes();
363    let _ = take_render_invalidation();
364    let _ = take_pointer_invalidation();
365    let _ = take_focus_invalidation();
366    let _ = take_layout_invalidation();
367    set_density(1.0);
368    let _ = take_layout_invalidation();
369}
370
371#[cfg(test)]
372pub(crate) fn render_state_test_guard() -> std::sync::MutexGuard<'static, ()> {
373    static TEST_LOCK: OnceLock<Mutex<()>> = OnceLock::new();
374    match TEST_LOCK.get_or_init(|| Mutex::new(())).lock() {
375        Ok(guard) => guard,
376        Err(poisoned) => poisoned.into_inner(),
377    }
378}
379
380#[cfg(test)]
381mod tests {
382    use super::*;
383    use std::sync::{mpsc, Arc};
384
385    #[test]
386    fn invalidation_flags_are_shared_across_threads() {
387        let state = Arc::new(RenderState::new());
388        let (tx, rx) = mpsc::channel();
389        let worker_state = Arc::clone(&state);
390
391        let handle = std::thread::spawn(move || {
392            worker_state
393                .render_invalidated
394                .store(true, Ordering::Relaxed);
395            worker_state
396                .pointer_invalidated
397                .store(true, Ordering::Relaxed);
398            worker_state
399                .focus_invalidated
400                .store(true, Ordering::Relaxed);
401            worker_state
402                .layout_invalidated
403                .store(true, Ordering::Relaxed);
404            worker_state
405                .density_bits
406                .store(f32::to_bits(2.0), Ordering::Relaxed);
407            tx.send(()).expect("signal invalidation setup");
408
409            f32::from_bits(worker_state.density_bits.load(Ordering::Relaxed))
410        });
411
412        rx.recv().expect("wait for worker invalidation setup");
413        assert!(state.render_invalidated.load(Ordering::Relaxed));
414        assert!(state.pointer_invalidated.load(Ordering::Relaxed));
415        assert!(state.focus_invalidated.load(Ordering::Relaxed));
416        assert!(state.layout_invalidated.load(Ordering::Relaxed));
417        assert_eq!(
418            f32::from_bits(state.density_bits.load(Ordering::Relaxed)),
419            2.0
420        );
421        assert!(state.render_invalidated.swap(false, Ordering::Relaxed));
422        assert!(state.pointer_invalidated.swap(false, Ordering::Relaxed));
423        assert!(state.focus_invalidated.swap(false, Ordering::Relaxed));
424        assert!(state.layout_invalidated.swap(false, Ordering::Relaxed));
425
426        let density = handle.join().expect("worker invalidation snapshot");
427        assert_eq!(density, 2.0);
428        assert!(!state.render_invalidated.load(Ordering::Relaxed));
429        assert!(!state.pointer_invalidated.load(Ordering::Relaxed));
430        assert!(!state.focus_invalidated.load(Ordering::Relaxed));
431        assert!(!state.layout_invalidated.load(Ordering::Relaxed));
432    }
433}