Skip to main content

fret_ui/tree/ui_tree_invalidation_walk/
mark.rs

1use super::super::*;
2
3impl<H: UiHost> UiTree<H> {
4    pub(in crate::tree) fn mark_invalidation(&mut self, node: NodeId, inv: Invalidation) {
5        self.mark_invalidation_with_source(node, inv, UiDebugInvalidationSource::Other);
6    }
7
8    pub(in crate::tree) fn invalidation_marks_view_dirty(
9        source: UiDebugInvalidationSource,
10        inv: Invalidation,
11        detail: UiDebugInvalidationDetail,
12    ) -> bool {
13        matches!(
14            source,
15            UiDebugInvalidationSource::Notify
16                | UiDebugInvalidationSource::ModelChange
17                | UiDebugInvalidationSource::GlobalChange
18        ) || matches!(detail, UiDebugInvalidationDetail::HoverRegionEdge)
19            || (inv != Invalidation::Paint
20                && matches!(
21                    detail,
22                    UiDebugInvalidationDetail::ScrollHandleLayout
23                        | UiDebugInvalidationDetail::ScrollHandleWindowUpdate
24                        | UiDebugInvalidationDetail::ScrollHandleScrollToItemWindowUpdate
25                        | UiDebugInvalidationDetail::ScrollHandleViewportResizeWindowUpdate
26                        | UiDebugInvalidationDetail::ScrollHandleItemsRevisionWindowUpdate
27                        | UiDebugInvalidationDetail::ScrollHandlePrefetchWindowUpdate
28                ))
29    }
30
31    fn record_invalidation_walk_call(&mut self, source: UiDebugInvalidationSource) {
32        if !self.debug_enabled {
33            return;
34        }
35        self.debug_stats.invalidation_walk_calls =
36            self.debug_stats.invalidation_walk_calls.saturating_add(1);
37        match source {
38            UiDebugInvalidationSource::ModelChange => {
39                self.debug_stats.invalidation_walk_calls_model_change = self
40                    .debug_stats
41                    .invalidation_walk_calls_model_change
42                    .saturating_add(1);
43            }
44            UiDebugInvalidationSource::GlobalChange => {
45                self.debug_stats.invalidation_walk_calls_global_change = self
46                    .debug_stats
47                    .invalidation_walk_calls_global_change
48                    .saturating_add(1);
49            }
50            UiDebugInvalidationSource::Notify => {
51                self.debug_stats.invalidation_walk_calls_other = self
52                    .debug_stats
53                    .invalidation_walk_calls_other
54                    .saturating_add(1);
55            }
56            UiDebugInvalidationSource::Hover => {
57                self.debug_stats.invalidation_walk_calls_hover = self
58                    .debug_stats
59                    .invalidation_walk_calls_hover
60                    .saturating_add(1);
61            }
62            UiDebugInvalidationSource::Focus => {
63                self.debug_stats.invalidation_walk_calls_focus = self
64                    .debug_stats
65                    .invalidation_walk_calls_focus
66                    .saturating_add(1);
67            }
68            UiDebugInvalidationSource::Other => {
69                self.debug_stats.invalidation_walk_calls_other = self
70                    .debug_stats
71                    .invalidation_walk_calls_other
72                    .saturating_add(1);
73            }
74        }
75    }
76
77    fn record_invalidation_walk_node(&mut self, source: UiDebugInvalidationSource) {
78        if !self.debug_enabled {
79            return;
80        }
81        self.debug_stats.invalidation_walk_nodes =
82            self.debug_stats.invalidation_walk_nodes.saturating_add(1);
83        match source {
84            UiDebugInvalidationSource::ModelChange => {
85                self.debug_stats.invalidation_walk_nodes_model_change = self
86                    .debug_stats
87                    .invalidation_walk_nodes_model_change
88                    .saturating_add(1);
89            }
90            UiDebugInvalidationSource::GlobalChange => {
91                self.debug_stats.invalidation_walk_nodes_global_change = self
92                    .debug_stats
93                    .invalidation_walk_nodes_global_change
94                    .saturating_add(1);
95            }
96            UiDebugInvalidationSource::Notify => {
97                self.debug_stats.invalidation_walk_nodes_other = self
98                    .debug_stats
99                    .invalidation_walk_nodes_other
100                    .saturating_add(1);
101            }
102            UiDebugInvalidationSource::Hover => {
103                self.debug_stats.invalidation_walk_nodes_hover = self
104                    .debug_stats
105                    .invalidation_walk_nodes_hover
106                    .saturating_add(1);
107            }
108            UiDebugInvalidationSource::Focus => {
109                self.debug_stats.invalidation_walk_nodes_focus = self
110                    .debug_stats
111                    .invalidation_walk_nodes_focus
112                    .saturating_add(1);
113            }
114            UiDebugInvalidationSource::Other => {
115                self.debug_stats.invalidation_walk_nodes_other = self
116                    .debug_stats
117                    .invalidation_walk_nodes_other
118                    .saturating_add(1);
119            }
120        }
121    }
122
123    pub(in crate::tree) fn mark_invalidation_with_source(
124        &mut self,
125        node: NodeId,
126        inv: Invalidation,
127        source: UiDebugInvalidationSource,
128    ) {
129        let detail = UiDebugInvalidationDetail::from_source(source);
130        self.mark_invalidation_with_detail(node, inv, source, detail);
131    }
132
133    fn mark_invalidation_with_detail(
134        &mut self,
135        node: NodeId,
136        inv: Invalidation,
137        source: UiDebugInvalidationSource,
138        detail: UiDebugInvalidationDetail,
139    ) {
140        let stop_at_view_cache = self.view_cache_active();
141        let agg_enabled = self.subtree_layout_dirty_aggregation_enabled();
142        self.record_invalidation_walk_call(source);
143        let mut current = Some(node);
144        let mut hit_cache_root: Option<NodeId> = None;
145        let root_element = self.nodes.get(node).and_then(|n| n.element);
146        let mut walked_nodes: u32 = 0;
147        let mut invalidation_active = true;
148        let mut pending_layout_dirty_delta: i32 = 0;
149        let mut agg_walk_len: u32 = 0;
150        while let Some(id) = current {
151            if invalidation_active && self.nodes.contains_key(id) {
152                self.record_invalidation_walk_node(source);
153                walked_nodes = walked_nodes.saturating_add(1);
154            }
155            let mut did_stop = false;
156            let mut mark_dirty = false;
157            let mut mark_dirty_for_contained_layout = false;
158            let mut counter_update: Option<(InvalidationFlags, InvalidationFlags)> = None;
159            let mut self_delta: i32 = 0;
160            let mut rebuild_subtree_layout_dirty: bool = false;
161            let next_parent = if let Some(n) = self.nodes.get_mut(id) {
162                let next_parent = n.parent;
163                if invalidation_active {
164                    let prev = n.invalidation;
165                    let layout_before = n.invalidation.layout;
166                    Self::mark_node_invalidation_state(n, inv);
167                    let layout_after = n.invalidation.layout;
168                    record_layout_invalidation_transition(
169                        &mut self.layout_invalidations_count,
170                        layout_before,
171                        layout_after,
172                    );
173                    counter_update = Some((prev, n.invalidation));
174                    self_delta = match (layout_before, layout_after) {
175                        (false, true) => 1,
176                        (true, false) => -1,
177                        _ => 0,
178                    };
179
180                    let can_truncate_at_cache_root = inv == Invalidation::Paint
181                        || (n.view_cache.contained_layout
182                            && n.view_cache.layout_definite
183                            && n.bounds.size != Size::default())
184                        // For auto-sized cache roots, allow descendant invalidations to truncate at
185                        // the first cache boundary we hit. A separate repair step
186                        // (`propagate_auto_sized_view_cache_root_invalidations`) will propagate a
187                        // single invalidation from the cache root to its ancestors so the root can be
188                        // placed before running contained relayouts.
189                        //
190                        // Importantly, do *not* truncate when the invalidation originates at the
191                        // cache root itself (e.g. the repair step), so it can still reach ancestors.
192                        || (n.view_cache.contained_layout
193                            && !n.view_cache.layout_definite
194                            && id != node);
195                    if stop_at_view_cache && n.view_cache.enabled && can_truncate_at_cache_root {
196                        if self.debug_enabled {
197                            self.debug_stats.view_cache_invalidation_truncations = self
198                                .debug_stats
199                                .view_cache_invalidation_truncations
200                                .saturating_add(1);
201                        }
202                        hit_cache_root = Some(id);
203                        did_stop = true;
204                        mark_dirty_for_contained_layout =
205                            n.view_cache.contained_layout && n.invalidation.layout;
206                        if Self::invalidation_marks_view_dirty(source, inv, detail) {
207                            n.view_cache_needs_rerender = true;
208                            mark_dirty = true;
209                        }
210                    }
211                }
212
213                if agg_enabled {
214                    let apply_delta = pending_layout_dirty_delta.saturating_add(self_delta);
215                    if apply_delta != 0 {
216                        let underflow =
217                            super::super::ui_tree_subtree_layout_dirty::apply_i32_delta_to_u32(
218                                &mut n.subtree_layout_dirty_count,
219                                apply_delta,
220                            );
221                        if underflow {
222                            rebuild_subtree_layout_dirty = true;
223                            tracing::error!(
224                                node = ?id,
225                                element = ?n.element,
226                                stored = n.subtree_layout_dirty_count,
227                                delta = apply_delta,
228                                "subtree layout dirty count underflow during invalidation walk"
229                            );
230                        }
231                    }
232                }
233                next_parent
234            } else {
235                break;
236            };
237
238            if let Some((prev, next)) = counter_update {
239                self.update_invalidation_counters(prev, next);
240            }
241
242            if rebuild_subtree_layout_dirty {
243                self.repair_subtree_layout_dirty_counts_from(id);
244            }
245
246            if agg_enabled {
247                agg_walk_len = agg_walk_len.saturating_add(1);
248                pending_layout_dirty_delta = if rebuild_subtree_layout_dirty {
249                    0
250                } else {
251                    pending_layout_dirty_delta.saturating_add(self_delta)
252                };
253            }
254
255            if did_stop {
256                if mark_dirty || mark_dirty_for_contained_layout {
257                    self.mark_cache_root_dirty(id, source, detail);
258                }
259                invalidation_active = false;
260            }
261            if !invalidation_active && (!agg_enabled || pending_layout_dirty_delta == 0) {
262                break;
263            }
264            current = next_parent;
265        }
266
267        if agg_enabled && self.debug_enabled && agg_walk_len > 0 && pending_layout_dirty_delta != 0
268        {
269            self.debug_stats.layout_subtree_dirty_agg_updates = self
270                .debug_stats
271                .layout_subtree_dirty_agg_updates
272                .saturating_add(1);
273            self.debug_stats.layout_subtree_dirty_agg_nodes_touched = self
274                .debug_stats
275                .layout_subtree_dirty_agg_nodes_touched
276                .saturating_add(agg_walk_len);
277            self.debug_stats.layout_subtree_dirty_agg_max_parent_walk = self
278                .debug_stats
279                .layout_subtree_dirty_agg_max_parent_walk
280                .max(agg_walk_len);
281        }
282
283        if self.debug_enabled {
284            self.debug_invalidation_walks.push(UiDebugInvalidationWalk {
285                root: node,
286                root_element,
287                inv,
288                source,
289                detail,
290                walked_nodes,
291                truncated_at: hit_cache_root,
292            });
293        }
294
295        // Nested cache-root correctness: if a descendant cache root is invalidated, any ancestor
296        // cache roots must also be invalidated for the same categories so they cannot replay stale
297        // recorded ranges that include the old descendant output.
298        if stop_at_view_cache && let Some(cache_root) = hit_cache_root {
299            let mut parent = self.nodes.get(cache_root).and_then(|n| n.parent);
300            while let Some(id) = parent {
301                let next_parent = self.nodes.get(id).and_then(|n| n.parent);
302                let mut mark_dirty = false;
303                let mut mark_dirty_for_contained_layout = false;
304                let mut counter_update: Option<(InvalidationFlags, InvalidationFlags)> = None;
305                let mut layout_transition: Option<(NodeId, bool, bool)> = None;
306                if let Some(n) = self.nodes.get_mut(id)
307                    && n.view_cache.enabled
308                {
309                    let prev = n.invalidation;
310                    let layout_before = n.invalidation.layout;
311                    Self::mark_node_invalidation_state(n, inv);
312                    let layout_after = n.invalidation.layout;
313                    record_layout_invalidation_transition(
314                        &mut self.layout_invalidations_count,
315                        layout_before,
316                        layout_after,
317                    );
318                    layout_transition = Some((id, layout_before, layout_after));
319                    counter_update = Some((prev, n.invalidation));
320                    mark_dirty_for_contained_layout =
321                        n.view_cache.contained_layout && n.invalidation.layout;
322                    if Self::invalidation_marks_view_dirty(source, inv, detail) {
323                        n.view_cache_needs_rerender = true;
324                        mark_dirty = true;
325                    }
326                }
327                if let Some((id, before, after)) = layout_transition {
328                    self.note_layout_invalidation_transition_for_subtree_aggregation(
329                        id, before, after,
330                    );
331                }
332                if let Some((prev, next)) = counter_update {
333                    self.update_invalidation_counters(prev, next);
334                }
335                if mark_dirty || mark_dirty_for_contained_layout {
336                    self.mark_cache_root_dirty(id, source, detail);
337                }
338                parent = next_parent;
339            }
340        }
341    }
342
343    fn invalidation_mask(inv: Invalidation) -> u8 {
344        const PAINT: u8 = 1 << 0;
345        const LAYOUT: u8 = 1 << 1;
346        const HIT_TEST: u8 = 1 << 2;
347        match inv {
348            Invalidation::Paint => PAINT,
349            Invalidation::Layout => PAINT | LAYOUT,
350            Invalidation::HitTest => PAINT | LAYOUT | HIT_TEST,
351            Invalidation::HitTestOnly => PAINT | HIT_TEST,
352        }
353    }
354
355    pub(in crate::tree) fn mark_invalidation_dedup_with_source<V: InvalidationVisited>(
356        &mut self,
357        node: NodeId,
358        inv: Invalidation,
359        visited: &mut V,
360        source: UiDebugInvalidationSource,
361    ) {
362        let detail = UiDebugInvalidationDetail::from_source(source);
363        self.mark_invalidation_dedup_with_detail(node, inv, visited, source, detail);
364    }
365
366    pub(in crate::tree) fn mark_invalidation_dedup_with_detail<V: InvalidationVisited>(
367        &mut self,
368        node: NodeId,
369        inv: Invalidation,
370        visited: &mut V,
371        source: UiDebugInvalidationSource,
372        detail: UiDebugInvalidationDetail,
373    ) {
374        let stop_at_view_cache = self.view_cache_active();
375        let agg_enabled = self.subtree_layout_dirty_aggregation_enabled();
376        let needed = Self::invalidation_mask(inv);
377        if source != UiDebugInvalidationSource::Notify && (visited.mask(node) & needed) == needed {
378            return;
379        }
380        self.record_invalidation_walk_call(source);
381
382        let mut current = Some(node);
383        let mut hit_cache_root: Option<NodeId> = None;
384        let root_element = self.nodes.get(node).and_then(|n| n.element);
385        let mut walked_nodes: u32 = 0;
386        let mut invalidation_active = true;
387        let mut pending_layout_dirty_delta: i32 = 0;
388        let mut agg_walk_len: u32 = 0;
389        while let Some(id) = current {
390            let already = visited.mask(id);
391            if invalidation_active
392                && source != UiDebugInvalidationSource::Notify
393                && (already & needed) == needed
394                && !(stop_at_view_cache && Self::invalidation_marks_view_dirty(source, inv, detail))
395            {
396                invalidation_active = false;
397                if !agg_enabled || pending_layout_dirty_delta == 0 {
398                    break;
399                }
400            }
401
402            if invalidation_active && self.nodes.contains_key(id) {
403                self.record_invalidation_walk_node(source);
404                walked_nodes = walked_nodes.saturating_add(1);
405            }
406            let mut did_stop = false;
407            let mut mark_dirty = false;
408            let mut mark_dirty_for_contained_layout = false;
409            let mut self_delta: i32 = 0;
410            let mut rebuild_subtree_layout_dirty: bool = false;
411            let next_parent = if let Some(n) = self.nodes.get_mut(id) {
412                let next_parent = n.parent;
413                let mut counter_update: Option<(InvalidationFlags, InvalidationFlags)> = None;
414                if invalidation_active
415                    && (source == UiDebugInvalidationSource::Notify || (already & needed) != needed)
416                {
417                    let prev = n.invalidation;
418                    let layout_before = n.invalidation.layout;
419                    Self::mark_node_invalidation_state(n, inv);
420                    record_layout_invalidation_transition(
421                        &mut self.layout_invalidations_count,
422                        layout_before,
423                        n.invalidation.layout,
424                    );
425                    visited.set_mask(id, already | needed);
426                    counter_update = Some((prev, n.invalidation));
427                    self_delta = match (layout_before, n.invalidation.layout) {
428                        (false, true) => 1,
429                        (true, false) => -1,
430                        _ => 0,
431                    };
432                }
433
434                if invalidation_active {
435                    let can_truncate_at_cache_root = inv == Invalidation::Paint
436                        || (n.view_cache.contained_layout
437                            && n.view_cache.layout_definite
438                            && n.bounds.size != Size::default())
439                        || (n.view_cache.contained_layout
440                            && !n.view_cache.layout_definite
441                            && id != node);
442                    if stop_at_view_cache && n.view_cache.enabled && can_truncate_at_cache_root {
443                        if self.debug_enabled {
444                            self.debug_stats.view_cache_invalidation_truncations = self
445                                .debug_stats
446                                .view_cache_invalidation_truncations
447                                .saturating_add(1);
448                        }
449                        mark_dirty_for_contained_layout =
450                            n.view_cache.contained_layout && n.invalidation.layout;
451                        if Self::invalidation_marks_view_dirty(source, inv, detail) {
452                            n.view_cache_needs_rerender = true;
453                            mark_dirty = true;
454                        }
455                        hit_cache_root = Some(id);
456                        did_stop = true;
457                    }
458                }
459
460                if agg_enabled {
461                    let apply_delta = pending_layout_dirty_delta.saturating_add(self_delta);
462                    if apply_delta != 0 {
463                        let underflow =
464                            super::super::ui_tree_subtree_layout_dirty::apply_i32_delta_to_u32(
465                                &mut n.subtree_layout_dirty_count,
466                                apply_delta,
467                            );
468                        if underflow {
469                            rebuild_subtree_layout_dirty = true;
470                            tracing::error!(
471                                node = ?id,
472                                element = ?n.element,
473                                stored = n.subtree_layout_dirty_count,
474                                delta = apply_delta,
475                                "subtree layout dirty count underflow during invalidation walk"
476                            );
477                        }
478                    }
479                }
480
481                if let Some((prev, next)) = counter_update {
482                    self.update_invalidation_counters(prev, next);
483                }
484                next_parent
485            } else {
486                break;
487            };
488
489            if rebuild_subtree_layout_dirty {
490                self.repair_subtree_layout_dirty_counts_from(id);
491            }
492
493            if did_stop {
494                if mark_dirty || mark_dirty_for_contained_layout {
495                    self.mark_cache_root_dirty(id, source, detail);
496                }
497                invalidation_active = false;
498            }
499            if agg_enabled {
500                agg_walk_len = agg_walk_len.saturating_add(1);
501                pending_layout_dirty_delta = if rebuild_subtree_layout_dirty {
502                    0
503                } else {
504                    pending_layout_dirty_delta.saturating_add(self_delta)
505                };
506            }
507            if !invalidation_active && (!agg_enabled || pending_layout_dirty_delta == 0) {
508                break;
509            }
510            current = next_parent;
511        }
512
513        if agg_enabled && self.debug_enabled && agg_walk_len > 0 && pending_layout_dirty_delta != 0
514        {
515            self.debug_stats.layout_subtree_dirty_agg_updates = self
516                .debug_stats
517                .layout_subtree_dirty_agg_updates
518                .saturating_add(1);
519            self.debug_stats.layout_subtree_dirty_agg_nodes_touched = self
520                .debug_stats
521                .layout_subtree_dirty_agg_nodes_touched
522                .saturating_add(agg_walk_len);
523            self.debug_stats.layout_subtree_dirty_agg_max_parent_walk = self
524                .debug_stats
525                .layout_subtree_dirty_agg_max_parent_walk
526                .max(agg_walk_len);
527        }
528
529        if self.debug_enabled {
530            self.debug_invalidation_walks.push(UiDebugInvalidationWalk {
531                root: node,
532                root_element,
533                inv,
534                source,
535                detail,
536                walked_nodes,
537                truncated_at: hit_cache_root,
538            });
539        }
540
541        // Nested cache-root correctness: if a descendant cache root is invalidated, any ancestor
542        // cache roots must also be invalidated for the same categories so they cannot replay stale
543        // recorded ranges that include the old descendant output.
544        if stop_at_view_cache && let Some(cache_root) = hit_cache_root {
545            let mut parent = self.nodes.get(cache_root).and_then(|n| n.parent);
546            while let Some(id) = parent {
547                let next_parent = self.nodes.get(id).and_then(|n| n.parent);
548                let already = visited.mask(id);
549                if self.nodes.get(id).is_some_and(|n| n.view_cache.enabled) {
550                    let mut mark_dirty = false;
551                    let mut mark_dirty_for_contained_layout = false;
552                    let mut counter_update: Option<(InvalidationFlags, InvalidationFlags)> = None;
553                    let mut layout_transition: Option<(NodeId, bool, bool)> = None;
554                    if let Some(n) = self.nodes.get_mut(id) {
555                        if Self::invalidation_marks_view_dirty(source, inv, detail) {
556                            n.view_cache_needs_rerender = true;
557                            mark_dirty = true;
558                        }
559                        if (already & needed) != needed {
560                            let prev = n.invalidation;
561                            let layout_before = n.invalidation.layout;
562                            Self::mark_node_invalidation_state(n, inv);
563                            let layout_after = n.invalidation.layout;
564                            record_layout_invalidation_transition(
565                                &mut self.layout_invalidations_count,
566                                layout_before,
567                                layout_after,
568                            );
569                            layout_transition = Some((id, layout_before, layout_after));
570                            counter_update = Some((prev, n.invalidation));
571                            mark_dirty_for_contained_layout =
572                                n.view_cache.contained_layout && n.invalidation.layout;
573                        }
574                    }
575                    if let Some((id, before, after)) = layout_transition {
576                        self.note_layout_invalidation_transition_for_subtree_aggregation(
577                            id, before, after,
578                        );
579                    }
580                    if let Some((prev, next)) = counter_update {
581                        self.update_invalidation_counters(prev, next);
582                    }
583                    if mark_dirty || mark_dirty_for_contained_layout {
584                        self.mark_cache_root_dirty(id, source, detail);
585                    }
586                    visited.set_mask(id, already | needed);
587                }
588                parent = next_parent;
589            }
590        }
591    }
592
593    pub fn invalidate(&mut self, node: NodeId, inv: Invalidation) {
594        self.mark_invalidation(node, inv);
595    }
596
597    pub fn invalidate_with_source(
598        &mut self,
599        node: NodeId,
600        inv: Invalidation,
601        source: UiDebugInvalidationSource,
602    ) {
603        let detail = UiDebugInvalidationDetail::from_source(source);
604        self.mark_invalidation_with_detail(node, inv, source, detail);
605    }
606
607    pub fn invalidate_with_detail(
608        &mut self,
609        node: NodeId,
610        inv: Invalidation,
611        detail: UiDebugInvalidationDetail,
612    ) {
613        self.mark_invalidation_with_detail(node, inv, UiDebugInvalidationSource::Other, detail);
614    }
615
616    pub fn invalidate_with_source_and_detail(
617        &mut self,
618        node: NodeId,
619        inv: Invalidation,
620        source: UiDebugInvalidationSource,
621        detail: UiDebugInvalidationDetail,
622    ) {
623        self.mark_invalidation_with_detail(node, inv, source, detail);
624    }
625}