Skip to main content

cranpose_ui/
render_state.rs

1use cranpose_core::NodeId;
2use std::collections::HashSet;
3use std::sync::atomic::{AtomicBool, AtomicU32, Ordering};
4use std::sync::Mutex;
5#[cfg(not(any(test, feature = "test-helpers")))]
6use std::sync::OnceLock;
7#[cfg(test)]
8use std::sync::OnceLock;
9
10struct RenderState {
11    layout_repasses: Mutex<LayoutRepassManager>,
12    draw_repasses: Mutex<DrawRepassManager>,
13    render_invalidated: AtomicBool,
14    pointer_invalidated: AtomicBool,
15    focus_invalidated: AtomicBool,
16    layout_invalidated: AtomicBool,
17    density_bits: AtomicU32,
18}
19
20impl RenderState {
21    fn new() -> Self {
22        Self {
23            layout_repasses: Mutex::new(LayoutRepassManager::new()),
24            draw_repasses: Mutex::new(DrawRepassManager::new()),
25            render_invalidated: AtomicBool::new(false),
26            pointer_invalidated: AtomicBool::new(false),
27            focus_invalidated: AtomicBool::new(false),
28            layout_invalidated: AtomicBool::new(false),
29            density_bits: AtomicU32::new(f32::to_bits(1.0)),
30        }
31    }
32}
33
34#[cfg(not(any(test, feature = "test-helpers")))]
35fn with_render_state<R>(f: impl FnOnce(&RenderState) -> R) -> R {
36    static STATE: OnceLock<RenderState> = OnceLock::new();
37    f(STATE.get_or_init(RenderState::new))
38}
39
40#[cfg(any(test, feature = "test-helpers"))]
41fn with_render_state<R>(f: impl FnOnce(&RenderState) -> R) -> R {
42    std::thread_local! {
43        static STATE: RenderState = RenderState::new();
44    }
45    STATE.with(f)
46}
47
48/// Manages scoped layout invalidations for specific nodes.
49///
50/// Similar to PointerDispatchManager, this tracks which specific nodes
51/// need layout invalidation rather than forcing a global invalidation.
52struct LayoutRepassManager {
53    dirty_nodes: HashSet<NodeId>,
54}
55
56impl LayoutRepassManager {
57    fn new() -> Self {
58        Self {
59            dirty_nodes: HashSet::new(),
60        }
61    }
62
63    fn schedule_repass(&mut self, node_id: NodeId) {
64        self.dirty_nodes.insert(node_id);
65    }
66
67    fn has_pending_repass(&self) -> bool {
68        !self.dirty_nodes.is_empty()
69    }
70
71    fn take_dirty_nodes(&mut self) -> Vec<NodeId> {
72        self.dirty_nodes.drain().collect()
73    }
74}
75
76/// Tracks draw-only invalidations so render data can be refreshed without layout.
77struct DrawRepassManager {
78    dirty_nodes: HashSet<NodeId>,
79}
80
81impl DrawRepassManager {
82    fn new() -> Self {
83        Self {
84            dirty_nodes: HashSet::new(),
85        }
86    }
87
88    fn schedule_repass(&mut self, node_id: NodeId) {
89        self.dirty_nodes.insert(node_id);
90    }
91
92    fn has_pending_repass(&self) -> bool {
93        !self.dirty_nodes.is_empty()
94    }
95
96    fn take_dirty_nodes(&mut self) -> Vec<NodeId> {
97        self.dirty_nodes.drain().collect()
98    }
99}
100
101/// Schedules a layout repass for a specific node.
102///
103/// **This is the preferred way to invalidate layout for local changes** (e.g., scroll, single-node mutations).
104///
105/// The app shell will call `take_layout_repass_nodes()` and bubble dirty flags up the tree
106/// via `bubble_layout_dirty`. This gives you **O(subtree) performance** - only the affected
107/// subtree is remeasured, and layout caches for other parts of the app remain valid.
108///
109/// # Implementation Note
110///
111/// This sets the `LAYOUT_INVALIDATED` flag to signal the app shell there's work to do,
112/// but the flag alone does NOT trigger global cache invalidation. The app shell checks
113/// `take_layout_repass_nodes()` first and processes scoped repasses. Global cache invalidation
114/// only happens if the flag is set AND there are no scoped repasses (a rare fallback case).
115///
116/// # For Global Invalidation
117///
118/// For rare global events (window resize, global scale changes), use `request_layout_invalidation()` instead.
119pub fn schedule_layout_repass(node_id: NodeId) {
120    with_render_state(|state| {
121        state
122            .layout_repasses
123            .lock()
124            .expect("layout repass manager poisoned")
125            .schedule_repass(node_id);
126        state.layout_invalidated.store(true, Ordering::Relaxed);
127    });
128    // Set the layout-invalidated flag so the app shell knows to process repasses.
129    // The app shell will check take_layout_repass_nodes() first (scoped path),
130    // and only falls back to global invalidation if the flag is set without any repass nodes.
131    // Also request render invalidation so the frame is actually drawn.
132    // Without this, programmatic scrolls (e.g., scroll_to_item) wouldn't trigger a redraw
133    // until the next user interaction caused a frame request.
134    request_render_invalidation();
135}
136
137/// Schedules a draw-only repass for a specific node.
138///
139/// This ensures draw/pointer data stays in sync when modifier updates do not
140/// require a layout pass (e.g., draw-only modifier changes).
141pub fn schedule_draw_repass(node_id: NodeId) {
142    with_render_state(|state| {
143        state
144            .draw_repasses
145            .lock()
146            .expect("draw repass manager poisoned")
147            .schedule_repass(node_id);
148    });
149}
150
151/// Returns true if any draw repasses are pending.
152pub fn has_pending_draw_repasses() -> bool {
153    with_render_state(|state| {
154        state
155            .draw_repasses
156            .lock()
157            .expect("draw repass manager poisoned")
158            .has_pending_repass()
159    })
160}
161
162/// Takes all pending draw repass node IDs.
163pub fn take_draw_repass_nodes() -> Vec<NodeId> {
164    with_render_state(|state| {
165        state
166            .draw_repasses
167            .lock()
168            .expect("draw repass manager poisoned")
169            .take_dirty_nodes()
170    })
171}
172
173/// Returns true if any layout repasses are pending.
174pub fn has_pending_layout_repasses() -> bool {
175    with_render_state(|state| {
176        state
177            .layout_repasses
178            .lock()
179            .expect("layout repass manager poisoned")
180            .has_pending_repass()
181    })
182}
183
184/// Takes all pending layout repass node IDs.
185///
186/// The caller should iterate over these and call `bubble_layout_dirty` for each.
187pub fn take_layout_repass_nodes() -> Vec<NodeId> {
188    with_render_state(|state| {
189        state
190            .layout_repasses
191            .lock()
192            .expect("layout repass manager poisoned")
193            .take_dirty_nodes()
194    })
195}
196
197/// Returns the current density scale factor (logical px per dp).
198pub fn current_density() -> f32 {
199    with_render_state(|state| f32::from_bits(state.density_bits.load(Ordering::Relaxed)))
200}
201
202/// Updates the current density scale factor.
203///
204/// This triggers a global layout invalidation when the value changes because
205/// density impacts layout, text measurement, and input thresholds.
206pub fn set_density(density: f32) {
207    let normalized = if density.is_finite() && density > 0.0 {
208        density
209    } else {
210        1.0
211    };
212    let new_bits = normalized.to_bits();
213    with_render_state(|state| {
214        let old_bits = state.density_bits.swap(new_bits, Ordering::Relaxed);
215        if old_bits != new_bits {
216            state.layout_invalidated.store(true, Ordering::Relaxed);
217        }
218    });
219}
220
221/// Requests that the renderer rebuild the current scene.
222pub fn request_render_invalidation() {
223    with_render_state(|state| state.render_invalidated.store(true, Ordering::Relaxed));
224}
225
226/// Returns true if a render invalidation was pending and clears the flag.
227pub fn take_render_invalidation() -> bool {
228    with_render_state(|state| state.render_invalidated.swap(false, Ordering::Relaxed))
229}
230
231/// Returns true if a render invalidation is pending without clearing it.
232pub fn peek_render_invalidation() -> bool {
233    with_render_state(|state| state.render_invalidated.load(Ordering::Relaxed))
234}
235
236/// Requests a new pointer-input pass without touching layout or draw dirties.
237pub fn request_pointer_invalidation() {
238    with_render_state(|state| state.pointer_invalidated.store(true, Ordering::Relaxed));
239}
240
241/// Returns true if a pointer invalidation was pending and clears the flag.
242pub fn take_pointer_invalidation() -> bool {
243    with_render_state(|state| state.pointer_invalidated.swap(false, Ordering::Relaxed))
244}
245
246/// Returns true if a pointer invalidation is pending without clearing it.
247pub fn peek_pointer_invalidation() -> bool {
248    with_render_state(|state| state.pointer_invalidated.load(Ordering::Relaxed))
249}
250
251/// Requests a focus recomposition without affecting layout/draw dirties.
252pub fn request_focus_invalidation() {
253    with_render_state(|state| state.focus_invalidated.store(true, Ordering::Relaxed));
254}
255
256/// Returns true if a focus invalidation was pending and clears the flag.
257pub fn take_focus_invalidation() -> bool {
258    with_render_state(|state| state.focus_invalidated.swap(false, Ordering::Relaxed))
259}
260
261/// Returns true if a focus invalidation is pending without clearing it.
262pub fn peek_focus_invalidation() -> bool {
263    with_render_state(|state| state.focus_invalidated.load(Ordering::Relaxed))
264}
265
266/// Requests a **global** layout re-run.
267///
268/// # ⚠️ WARNING: Extremely Expensive - O(entire app size)
269///
270/// This triggers internal cache invalidation that forces **every node** in the app
271/// to re-measure, even if nothing changed. This is a performance footgun!
272///
273/// ## Valid Use Cases (rare!)
274///
275/// Only use this for **true global changes** that affect layout computation everywhere:
276/// - Window/viewport resize
277/// - Global font scale or density changes
278/// - System-wide theme changes that affect layout
279/// - Debug toggles that change layout behavior globally
280///
281/// ## For Local Changes - DO NOT USE THIS
282///
283/// **If you're invalidating layout for scroll, a single widget update, or any local change,
284/// you MUST use the scoped repass mechanism instead:**
285///
286/// ```text
287/// cranpose_ui::schedule_layout_repass(node_id);
288/// ```
289///
290/// Scoped repasses give you O(subtree) performance instead of O(app), and they don't
291/// invalidate caches across the entire app.
292pub fn request_layout_invalidation() {
293    with_render_state(|state| state.layout_invalidated.store(true, Ordering::Relaxed));
294}
295
296/// Returns true if a layout invalidation was pending and clears the flag.
297pub fn take_layout_invalidation() -> bool {
298    with_render_state(|state| state.layout_invalidated.swap(false, Ordering::Relaxed))
299}
300
301/// Returns true if a layout invalidation is pending without clearing it.
302pub fn peek_layout_invalidation() -> bool {
303    with_render_state(|state| state.layout_invalidated.load(Ordering::Relaxed))
304}
305
306#[cfg(any(test, feature = "test-helpers"))]
307#[doc(hidden)]
308pub fn reset_render_state_for_tests() {
309    let _ = take_draw_repass_nodes();
310    let _ = take_layout_repass_nodes();
311    let _ = take_render_invalidation();
312    let _ = take_pointer_invalidation();
313    let _ = take_focus_invalidation();
314    let _ = take_layout_invalidation();
315    set_density(1.0);
316    let _ = take_layout_invalidation();
317}
318
319#[cfg(test)]
320pub(crate) fn render_state_test_guard() -> std::sync::MutexGuard<'static, ()> {
321    static TEST_LOCK: OnceLock<Mutex<()>> = OnceLock::new();
322    match TEST_LOCK.get_or_init(|| Mutex::new(())).lock() {
323        Ok(guard) => guard,
324        Err(poisoned) => poisoned.into_inner(),
325    }
326}
327
328#[cfg(test)]
329mod tests {
330    use super::*;
331    use std::sync::{mpsc, Arc};
332
333    #[test]
334    fn invalidation_flags_are_shared_across_threads() {
335        let state = Arc::new(RenderState::new());
336        let (tx, rx) = mpsc::channel();
337        let worker_state = Arc::clone(&state);
338
339        let handle = std::thread::spawn(move || {
340            worker_state
341                .render_invalidated
342                .store(true, Ordering::Relaxed);
343            worker_state
344                .pointer_invalidated
345                .store(true, Ordering::Relaxed);
346            worker_state
347                .focus_invalidated
348                .store(true, Ordering::Relaxed);
349            worker_state
350                .layout_invalidated
351                .store(true, Ordering::Relaxed);
352            worker_state
353                .density_bits
354                .store(f32::to_bits(2.0), Ordering::Relaxed);
355            tx.send(()).expect("signal invalidation setup");
356
357            f32::from_bits(worker_state.density_bits.load(Ordering::Relaxed))
358        });
359
360        rx.recv().expect("wait for worker invalidation setup");
361        assert!(state.render_invalidated.load(Ordering::Relaxed));
362        assert!(state.pointer_invalidated.load(Ordering::Relaxed));
363        assert!(state.focus_invalidated.load(Ordering::Relaxed));
364        assert!(state.layout_invalidated.load(Ordering::Relaxed));
365        assert_eq!(
366            f32::from_bits(state.density_bits.load(Ordering::Relaxed)),
367            2.0
368        );
369        assert!(state.render_invalidated.swap(false, Ordering::Relaxed));
370        assert!(state.pointer_invalidated.swap(false, Ordering::Relaxed));
371        assert!(state.focus_invalidated.swap(false, Ordering::Relaxed));
372        assert!(state.layout_invalidated.swap(false, Ordering::Relaxed));
373
374        let density = handle.join().expect("worker invalidation snapshot");
375        assert_eq!(density, 2.0);
376        assert!(!state.render_invalidated.load(Ordering::Relaxed));
377        assert!(!state.pointer_invalidated.load(Ordering::Relaxed));
378        assert!(!state.focus_invalidated.load(Ordering::Relaxed));
379        assert!(!state.layout_invalidated.load(Ordering::Relaxed));
380    }
381}