Skip to main content

ftui_layout/
incremental.rs

1//! Incremental layout engine (bd-3p4y1.3).
2//!
3//! Wraps [`DepGraph`] and a per-node result cache so that only dirty
4//! subtrees are re-evaluated during layout. Clean subtrees return their
5//! cached `Vec<Rect>` in O(1).
6//!
7//! # Key Invariant
8//!
9//! The output of incremental layout is **bit-identical** to full layout.
10//! The same traversal order (DFS pre-order) and the same computation at
11//! each dirty node guarantee isomorphic results.
12//!
13//! # Usage
14//!
15//! ```ignore
16//! use ftui_layout::incremental::IncrementalLayout;
17//! use ftui_layout::dep_graph::InputKind;
18//!
19//! let mut inc = IncrementalLayout::new();
20//!
21//! // Build the widget tree.
22//! let root = inc.add_node(None);
23//! let left = inc.add_node(Some(root));
24//! let right = inc.add_node(Some(root));
25//!
26//! // First pass: everything computes (cold cache).
27//! inc.propagate();
28//! let root_rects = inc.get_or_compute(root, area, |a| outer_flex.split(a));
29//! let left_rects = inc.get_or_compute(left, root_rects[0], |a| left_flex.split(a));
30//! let right_rects = inc.get_or_compute(right, root_rects[1], |a| right_flex.split(a));
31//!
32//! // Only left content changed — right subtree is skipped.
33//! inc.mark_changed(left, InputKind::Content, new_hash);
34//! inc.propagate();
35//! let root_rects2 = inc.get_or_compute(root, area, |a| outer_flex.split(a));
36//! let left_rects2 = inc.get_or_compute(left, root_rects2[0], |a| left_flex.split(a));
37//! let right_rects2 = inc.get_or_compute(right, root_rects2[1], |a| right_flex.split(a));
38//! // right_rects2 was returned from cache without calling right_flex.split().
39//! ```
40//!
41//! # Flex Siblings
42//!
43//! In flex layouts, changing one child can affect all siblings (because
44//! remaining space is redistributed). Use
45//! [`mark_dirty_with_ancestors`](IncrementalLayout::mark_dirty_with_ancestors)
46//! to dirty a child and all its ancestors. Since parent→child edges
47//! already exist (from `add_node`), dirtying the parent automatically
48//! propagates to all siblings during `propagate()`.
49//!
50//! # Force-Full Fallback
51//!
52//! Call [`IncrementalLayout::set_force_full`] to bypass the cache entirely
53//! and recompute every node. This is useful for debugging or as an env-var
54//! fallback (`FRANKENTUI_FULL_LAYOUT=1`, wired in bd-3p4y1.6).
55
56use crate::dep_graph::{CycleError, DepGraph, InputKind, NodeId};
57use ftui_core::geometry::Rect;
58use rustc_hash::FxHashMap;
59use std::hash::{Hash, Hasher};
60
61// ============================================================================
62// CachedNodeLayout
63// ============================================================================
64
65/// Cached layout result for a single node.
66#[derive(Clone, Debug)]
67struct CachedNodeLayout {
68    /// The area that was used for this computation.
69    area: Rect,
70    /// The computed sub-regions (output of Flex::split / Grid row solve).
71    rects: crate::Rects,
72    /// FxHash of the output rects for change detection.
73    result_hash: u64,
74}
75
76/// Compute a fast hash of a `Vec<Rect>` for change detection.
77fn hash_rects(rects: &[Rect]) -> u64 {
78    let mut h = rustc_hash::FxHasher::default();
79    for r in rects {
80        r.x.hash(&mut h);
81        r.y.hash(&mut h);
82        r.width.hash(&mut h);
83        r.height.hash(&mut h);
84    }
85    h.finish()
86}
87
88// ============================================================================
89// IncrementalStats
90// ============================================================================
91
92/// Statistics for a single incremental layout pass.
93#[derive(Debug, Clone, Default, PartialEq, Eq)]
94pub struct IncrementalStats {
95    /// Nodes recomputed in this pass.
96    pub recomputed: usize,
97    /// Nodes returned from cache.
98    pub cached: usize,
99    /// Total `get_or_compute` calls in this pass.
100    pub total: usize,
101    /// Current number of entries in the cache.
102    pub cache_entries: usize,
103}
104
105impl IncrementalStats {
106    /// Cache hit rate as a fraction (0.0 – 1.0).
107    pub fn hit_rate(&self) -> f64 {
108        if self.total == 0 {
109            0.0
110        } else {
111            self.cached as f64 / self.total as f64
112        }
113    }
114}
115
116// ============================================================================
117// IncrementalLayout
118// ============================================================================
119
120/// Incremental layout engine.
121///
122/// Combines a [`DepGraph`] for dirty tracking with a per-node result
123/// cache. The user drives the DFS tree walk; at each node,
124/// [`get_or_compute`](Self::get_or_compute) decides whether to return
125/// the cached result or call the supplied compute closure.
126pub struct IncrementalLayout {
127    /// Dependency graph for dirty propagation.
128    graph: DepGraph,
129    /// Per-node layout cache.
130    cache: FxHashMap<NodeId, CachedNodeLayout>,
131    /// Current-pass statistics.
132    stats: IncrementalStats,
133    /// When true, every `get_or_compute` call recomputes (bypass cache).
134    force_full: bool,
135}
136
137impl IncrementalLayout {
138    /// Create an empty incremental layout engine.
139    #[must_use]
140    pub fn new() -> Self {
141        Self {
142            graph: DepGraph::new(),
143            cache: FxHashMap::default(),
144            stats: IncrementalStats::default(),
145            force_full: false,
146        }
147    }
148
149    /// Create with pre-allocated capacity.
150    #[must_use]
151    pub fn with_capacity(node_cap: usize) -> Self {
152        Self {
153            graph: DepGraph::with_capacity(node_cap, node_cap),
154            cache: FxHashMap::with_capacity_and_hasher(node_cap, Default::default()),
155            stats: IncrementalStats::default(),
156            force_full: false,
157        }
158    }
159
160    /// Create from environment configuration.
161    ///
162    /// Reads `FRANKENTUI_FULL_LAYOUT`. When set to `"1"`, `"true"`, or
163    /// `"yes"` (case-insensitive), force-full mode is enabled and all
164    /// `get_or_compute` calls bypass the cache.
165    #[must_use]
166    pub fn from_env() -> Self {
167        let force = std::env::var("FRANKENTUI_FULL_LAYOUT")
168            .map(|v| matches!(v.to_ascii_lowercase().as_str(), "1" | "true" | "yes"))
169            .unwrap_or(false);
170        Self {
171            graph: DepGraph::new(),
172            cache: FxHashMap::default(),
173            stats: IncrementalStats::default(),
174            force_full: force,
175        }
176    }
177
178    // ── Node Management ─────────────────────────────────────────────
179
180    /// Add a layout node, optionally with a parent.
181    ///
182    /// When a parent is provided, a dependency edge is added so that
183    /// dirtying the parent also dirties this child.
184    pub fn add_node(&mut self, parent: Option<NodeId>) -> NodeId {
185        let id = self.graph.add_node();
186        if let Some(p) = parent {
187            self.graph.set_parent(id, p);
188            // Child depends on parent: parent dirty → child dirty.
189            let _ = self.graph.add_edge(id, p);
190        }
191        // New nodes start dirty (no cached result).
192        self.graph.mark_dirty(id);
193        id
194    }
195
196    /// Remove a node and evict its cache entry.
197    pub fn remove_node(&mut self, id: NodeId) {
198        self.cache.remove(&id);
199        // If there's a parent, mark it and all ancestors dirty (layout changed),
200        // because removing a node affects intrinsic sizes (FitContent) all the way up.
201        if let Some(parent) = self.graph.parent(id) {
202            self.mark_dirty_with_ancestors(parent);
203        }
204        self.graph.remove_node(id);
205    }
206
207    /// Add a custom dependency edge: `from` depends on `to`.
208    ///
209    /// Use this for bidirectional flex-sibling relationships or
210    /// cross-subtree dependencies.
211    pub fn add_dependency(&mut self, from: NodeId, to: NodeId) -> Result<(), CycleError> {
212        self.graph.add_edge(from, to)
213    }
214
215    // ── Dirty Tracking ──────────────────────────────────────────────
216
217    /// Mark a node's input as changed (hash-deduplicated).
218    ///
219    /// The node will not be dirtied if `new_hash` matches the stored
220    /// hash for this `InputKind`.
221    pub fn mark_changed(&mut self, id: NodeId, kind: InputKind, new_hash: u64) {
222        self.graph.mark_changed(id, kind, new_hash);
223    }
224
225    /// Force-mark a node as dirty without hash comparison.
226    pub fn mark_dirty(&mut self, id: NodeId) {
227        self.graph.mark_dirty(id);
228    }
229
230    /// Mark a node dirty and also dirty all its ancestors up to the
231    /// root. This implements the "flex sibling" pattern: when a child
232    /// changes, its parent must recompute, which propagates to all
233    /// siblings via parent→child edges.
234    pub fn mark_dirty_with_ancestors(&mut self, id: NodeId) {
235        self.graph.mark_dirty(id);
236        let mut current = id;
237        while let Some(parent) = self.graph.parent(current) {
238            self.graph.mark_dirty(parent);
239            current = parent;
240        }
241    }
242
243    /// Propagate dirtiness from pending nodes to all transitive
244    /// dependents. Must be called before [`get_or_compute`](Self::get_or_compute).
245    ///
246    /// Returns the dirty set in DFS pre-order.
247    pub fn propagate(&mut self) -> Vec<NodeId> {
248        self.graph.propagate()
249    }
250
251    /// Check if a node is currently dirty.
252    #[must_use]
253    pub fn is_dirty(&self, id: NodeId) -> bool {
254        self.graph.is_dirty(id)
255    }
256
257    // ── Layout Computation ──────────────────────────────────────────
258
259    /// Get the cached layout for a node, or compute and cache it.
260    ///
261    /// A cache hit requires:
262    /// 1. The node is not dirty.
263    /// 2. The area matches the previously cached area.
264    /// 3. `force_full` is not enabled.
265    ///
266    /// On a cache miss, `compute` is called with the `area` and the
267    /// result is stored. The node is cleaned after computation.
268    pub fn get_or_compute<F>(&mut self, id: NodeId, area: Rect, compute: F) -> crate::Rects
269    where
270        F: FnOnce(Rect) -> crate::Rects,
271    {
272        self.stats.total += 1;
273
274        if !self.force_full
275            && !self.graph.is_dirty(id)
276            && let Some(cached) = self.cache.get(&id)
277            && cached.area == area
278        {
279            self.stats.cached += 1;
280            return cached.rects.clone();
281        }
282
283        // Cache miss or dirty: recompute.
284        let rects = compute(area);
285        let result_hash = hash_rects(&rects);
286        self.cache.insert(
287            id,
288            CachedNodeLayout {
289                area,
290                rects: rects.clone(),
291                result_hash,
292            },
293        );
294        self.graph.clean(id);
295        self.stats.recomputed += 1;
296        rects
297    }
298
299    /// Retrieve the cached result for a node without recomputing.
300    ///
301    /// Returns `None` if the node has no cached result.
302    #[must_use]
303    pub fn cached_rects(&self, id: NodeId) -> Option<&[Rect]> {
304        self.cache.get(&id).map(|c| c.rects.as_slice())
305    }
306
307    /// Check whether a node's last computed result changed compared to
308    /// its previous result. Useful for detecting when a parent must
309    /// propagate layout changes to children.
310    #[must_use]
311    pub fn result_changed(&self, id: NodeId) -> bool {
312        // If there's no cached entry, it's "changed" (new).
313        !self.cache.contains_key(&id)
314    }
315
316    /// Get the hash of a node's last computed result.
317    #[must_use]
318    pub fn result_hash(&self, id: NodeId) -> Option<u64> {
319        self.cache.get(&id).map(|c| c.result_hash)
320    }
321
322    // ── Configuration ───────────────────────────────────────────────
323
324    /// Enable or disable force-full mode.
325    ///
326    /// When enabled, every `get_or_compute` call recomputes from
327    /// scratch, ignoring the cache. Useful for debugging or as a
328    /// fallback path.
329    pub fn set_force_full(&mut self, force: bool) {
330        self.force_full = force;
331    }
332
333    /// Whether force-full mode is enabled.
334    #[must_use]
335    pub fn force_full(&self) -> bool {
336        self.force_full
337    }
338
339    // ── Statistics ──────────────────────────────────────────────────
340
341    /// Current-pass statistics.
342    #[must_use]
343    pub fn stats(&self) -> IncrementalStats {
344        IncrementalStats {
345            cache_entries: self.cache.len(),
346            ..self.stats.clone()
347        }
348    }
349
350    /// Reset per-pass statistics to zero.
351    pub fn reset_stats(&mut self) {
352        self.stats = IncrementalStats::default();
353    }
354
355    // ── Bulk Operations ─────────────────────────────────────────────
356
357    /// Mark all nodes clean and advance the generation.
358    pub fn clean_all(&mut self) {
359        self.graph.clean_all();
360    }
361
362    /// Mark all nodes dirty (forces full recomputation on next pass).
363    pub fn invalidate_all(&mut self) {
364        self.graph.invalidate_all();
365    }
366
367    /// Clear the entire result cache (frees memory).
368    pub fn clear_cache(&mut self) {
369        self.cache.clear();
370    }
371
372    // ── Introspection ───────────────────────────────────────────────
373
374    /// Borrow the underlying dependency graph.
375    #[must_use]
376    pub fn graph(&self) -> &DepGraph {
377        &self.graph
378    }
379
380    /// Mutable access to the dependency graph.
381    pub fn graph_mut(&mut self) -> &mut DepGraph {
382        &mut self.graph
383    }
384
385    /// Number of cached entries.
386    #[must_use]
387    pub fn cache_len(&self) -> usize {
388        self.cache.len()
389    }
390
391    /// Number of live nodes in the dependency graph.
392    #[must_use]
393    pub fn node_count(&self) -> usize {
394        self.graph.node_count()
395    }
396}
397
398impl Default for IncrementalLayout {
399    fn default() -> Self {
400        Self::new()
401    }
402}
403
404impl std::fmt::Debug for IncrementalLayout {
405    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
406        f.debug_struct("IncrementalLayout")
407            .field("nodes", &self.graph.node_count())
408            .field("cache_entries", &self.cache.len())
409            .field("force_full", &self.force_full)
410            .finish()
411    }
412}
413
414// ============================================================================
415// Tests
416// ============================================================================
417
418#[cfg(test)]
419mod tests {
420    use super::*;
421
422    /// Helper: build a simple binary tree of depth `d`.
423    /// Returns (inc, root, leaves).
424    fn binary_tree(depth: u32) -> (IncrementalLayout, NodeId, Vec<NodeId>) {
425        let node_count = (1u32 << (depth + 1)) - 1;
426        let mut inc = IncrementalLayout::with_capacity(node_count as usize);
427        let root = inc.add_node(None);
428
429        let mut current_level = vec![root];
430        let mut leaves = Vec::new();
431
432        for _ in 0..depth {
433            let mut next_level = Vec::new();
434            for &parent in &current_level {
435                let left = inc.add_node(Some(parent));
436                let right = inc.add_node(Some(parent));
437                next_level.push(left);
438                next_level.push(right);
439            }
440            current_level = next_level;
441        }
442        leaves.extend(current_level);
443
444        (inc, root, leaves)
445    }
446
447    /// Helper: build a flat tree (root + N children).
448    fn flat_tree(n: usize) -> (IncrementalLayout, NodeId, Vec<NodeId>) {
449        let mut inc = IncrementalLayout::with_capacity(n + 1);
450        let root = inc.add_node(None);
451        let children: Vec<_> = (0..n).map(|_| inc.add_node(Some(root))).collect();
452        (inc, root, children)
453    }
454
455    fn area(w: u16, h: u16) -> Rect {
456        Rect::new(0, 0, w, h)
457    }
458
459    fn split_equal(a: Rect, n: usize) -> crate::Rects {
460        if n == 0 {
461            return crate::Rects::new();
462        }
463        let w = a.width / n as u16;
464        (0..n)
465            .map(|i| Rect::new(a.x + (i as u16) * w, a.y, w, a.height))
466            .collect()
467    }
468
469    // ── Basic ───────────────────────────────────────────────────────
470
471    #[test]
472    fn new_node_is_dirty() {
473        let mut inc = IncrementalLayout::new();
474        let n = inc.add_node(None);
475        assert!(inc.is_dirty(n));
476    }
477
478    #[test]
479    fn get_or_compute_caches() {
480        let mut inc = IncrementalLayout::new();
481        let n = inc.add_node(None);
482        inc.propagate();
483
484        let a = area(80, 24);
485        let mut calls = 0u32;
486        let r1 = inc.get_or_compute(n, a, |a| {
487            calls += 1;
488            split_equal(a, 2)
489        });
490
491        // Second call should be cached.
492        let r2 = inc.get_or_compute(n, a, |a| {
493            calls += 1;
494            split_equal(a, 2)
495        });
496
497        assert_eq!(r1, r2);
498        assert_eq!(calls, 1);
499    }
500
501    #[test]
502    fn dirty_node_recomputes() {
503        let mut inc = IncrementalLayout::new();
504        let n = inc.add_node(None);
505        inc.propagate();
506
507        let a = area(80, 24);
508        inc.get_or_compute(n, a, |a| split_equal(a, 2));
509
510        // Mark dirty → recompute.
511        inc.mark_dirty(n);
512        inc.propagate();
513
514        let mut calls = 0u32;
515        inc.get_or_compute(n, a, |a| {
516            calls += 1;
517            split_equal(a, 2)
518        });
519        assert_eq!(calls, 1);
520    }
521
522    #[test]
523    fn area_change_triggers_recompute() {
524        let mut inc = IncrementalLayout::new();
525        let n = inc.add_node(None);
526        inc.propagate();
527
528        inc.get_or_compute(n, area(80, 24), |a| split_equal(a, 2));
529
530        // Different area → must recompute even though node is clean.
531        let mut calls = 0u32;
532        inc.get_or_compute(n, area(120, 40), |a| {
533            calls += 1;
534            split_equal(a, 2)
535        });
536        assert_eq!(calls, 1);
537    }
538
539    #[test]
540    fn same_area_same_node_cached() {
541        let mut inc = IncrementalLayout::new();
542        let n = inc.add_node(None);
543        inc.propagate();
544
545        let a = area(80, 24);
546        inc.get_or_compute(n, a, |a| split_equal(a, 2));
547
548        // Same area, clean node → cache hit.
549        let mut calls = 0u32;
550        inc.get_or_compute(n, a, |_a| {
551            calls += 1;
552            crate::Rects::new()
553        });
554        assert_eq!(calls, 0);
555    }
556
557    // ── Propagation ─────────────────────────────────────────────────
558
559    #[test]
560    fn dirty_parent_dirties_child() {
561        let mut inc = IncrementalLayout::new();
562        let root = inc.add_node(None);
563        let child = inc.add_node(Some(root));
564        inc.propagate();
565
566        let a = area(80, 24);
567        inc.get_or_compute(root, a, |a| split_equal(a, 1));
568        inc.get_or_compute(child, a, |a| split_equal(a, 2));
569
570        // Dirty root → child should also recompute.
571        inc.mark_dirty(root);
572        inc.propagate();
573
574        assert!(inc.is_dirty(root));
575        assert!(inc.is_dirty(child));
576    }
577
578    #[test]
579    fn clean_sibling_not_affected_by_dirty_sibling() {
580        let (mut inc, root, children) = flat_tree(3);
581        inc.propagate();
582
583        let a = area(120, 24);
584        let root_rects = inc.get_or_compute(root, a, |a| split_equal(a, 3));
585        for (i, &c) in children.iter().enumerate() {
586            inc.get_or_compute(c, root_rects[i], |a| split_equal(a, 1));
587        }
588
589        // Mark only child[1] dirty.
590        inc.mark_dirty(children[1]);
591        inc.propagate();
592
593        assert!(!inc.is_dirty(root));
594        assert!(!inc.is_dirty(children[0]));
595        assert!(inc.is_dirty(children[1]));
596        assert!(!inc.is_dirty(children[2]));
597    }
598
599    #[test]
600    fn flex_siblings_dirty_via_parent() {
601        let (mut inc, root, children) = flat_tree(3);
602        inc.propagate();
603
604        let a = area(120, 24);
605        let root_rects = inc.get_or_compute(root, a, |a| split_equal(a, 3));
606        for (i, &c) in children.iter().enumerate() {
607            inc.get_or_compute(c, root_rects[i], |a| split_equal(a, 1));
608        }
609
610        // Flex coupling: when a child changes, mark the parent dirty.
611        // Since parent→child edges exist, parent dirty → all children dirty.
612        inc.mark_dirty(children[1]);
613        inc.mark_dirty(root); // Flex coupling: parent must recompute.
614        inc.propagate();
615
616        assert!(inc.is_dirty(root));
617        assert!(inc.is_dirty(children[0]));
618        assert!(inc.is_dirty(children[1]));
619        assert!(inc.is_dirty(children[2]));
620    }
621
622    // ── Statistics ──────────────────────────────────────────────────
623
624    #[test]
625    fn stats_track_hits_and_misses() {
626        let (mut inc, root, children) = flat_tree(3);
627        inc.propagate();
628
629        let a = area(120, 24);
630        // First pass: all misses.
631        let root_rects = inc.get_or_compute(root, a, |a| split_equal(a, 3));
632        for (i, &c) in children.iter().enumerate() {
633            inc.get_or_compute(c, root_rects[i], |a| split_equal(a, 1));
634        }
635
636        let s = inc.stats();
637        assert_eq!(s.recomputed, 4);
638        assert_eq!(s.cached, 0);
639        assert_eq!(s.total, 4);
640
641        inc.reset_stats();
642
643        // Second pass: all hits.
644        let root_rects = inc.get_or_compute(root, a, |a| split_equal(a, 3));
645        for (i, &c) in children.iter().enumerate() {
646            inc.get_or_compute(c, root_rects[i], |a| split_equal(a, 1));
647        }
648
649        let s = inc.stats();
650        assert_eq!(s.recomputed, 0);
651        assert_eq!(s.cached, 4);
652        assert_eq!(s.total, 4);
653        assert!((s.hit_rate() - 1.0).abs() < 0.001);
654    }
655
656    #[test]
657    fn stats_partial_dirty() {
658        let (mut inc, root, children) = flat_tree(4);
659        inc.propagate();
660
661        let a = area(160, 24);
662        let root_rects = inc.get_or_compute(root, a, |a| split_equal(a, 4));
663        for (i, &c) in children.iter().enumerate() {
664            inc.get_or_compute(c, root_rects[i], |a| split_equal(a, 1));
665        }
666        inc.reset_stats();
667
668        // Dirty one child.
669        inc.mark_dirty(children[2]);
670        inc.propagate();
671
672        let root_rects = inc.get_or_compute(root, a, |a| split_equal(a, 4));
673        for (i, &c) in children.iter().enumerate() {
674            inc.get_or_compute(c, root_rects[i], |a| split_equal(a, 1));
675        }
676
677        let s = inc.stats();
678        assert_eq!(s.recomputed, 1);
679        assert_eq!(s.cached, 4); // root + 3 clean children
680        assert_eq!(s.total, 5);
681    }
682
683    // ── Force Full ──────────────────────────────────────────────────
684
685    #[test]
686    fn force_full_bypasses_cache() {
687        let mut inc = IncrementalLayout::new();
688        let n = inc.add_node(None);
689        inc.propagate();
690
691        let a = area(80, 24);
692        inc.get_or_compute(n, a, |a| split_equal(a, 2));
693
694        // Enable force-full.
695        inc.set_force_full(true);
696        assert!(inc.force_full());
697
698        let mut calls = 0u32;
699        inc.get_or_compute(n, a, |a| {
700            calls += 1;
701            split_equal(a, 2)
702        });
703        assert_eq!(calls, 1);
704    }
705
706    #[test]
707    fn force_full_produces_identical_results() {
708        let (mut inc, root, children) = flat_tree(3);
709        inc.propagate();
710
711        let a = area(120, 24);
712
713        // Incremental pass.
714        let root_rects = inc.get_or_compute(root, a, |a| split_equal(a, 3));
715        let mut child_rects_inc = Vec::new();
716        for (i, &c) in children.iter().enumerate() {
717            child_rects_inc.push(inc.get_or_compute(c, root_rects[i], |a| split_equal(a, 2)));
718        }
719
720        // Force-full pass.
721        inc.set_force_full(true);
722        inc.reset_stats();
723
724        let root_rects_full = inc.get_or_compute(root, a, |a| split_equal(a, 3));
725        let mut child_rects_full = Vec::new();
726        for (i, &c) in children.iter().enumerate() {
727            child_rects_full.push(inc.get_or_compute(c, root_rects_full[i], |a| split_equal(a, 2)));
728        }
729
730        assert_eq!(root_rects, root_rects_full);
731        assert_eq!(child_rects_inc, child_rects_full);
732    }
733
734    // ── Node Removal ────────────────────────────────────────────────
735
736    #[test]
737    fn remove_node_evicts_cache() {
738        let mut inc = IncrementalLayout::new();
739        let root = inc.add_node(None);
740        let child = inc.add_node(Some(root));
741        inc.propagate();
742
743        inc.get_or_compute(child, area(40, 24), |a| split_equal(a, 1));
744        assert!(inc.cached_rects(child).is_some());
745
746        inc.remove_node(child);
747        assert!(inc.cached_rects(child).is_none());
748    }
749
750    #[test]
751    fn remove_node_dirties_parent() {
752        let mut inc = IncrementalLayout::new();
753        let root = inc.add_node(None);
754        let child = inc.add_node(Some(root));
755        inc.propagate();
756
757        let a = area(80, 24);
758        inc.get_or_compute(root, a, |a| split_equal(a, 1));
759        inc.get_or_compute(child, a, |a| split_equal(a, 1));
760
761        // Remove child → root should be dirty.
762        inc.remove_node(child);
763        assert!(inc.is_dirty(root));
764    }
765
766    // ── Bulk Operations ─────────────────────────────────────────────
767
768    #[test]
769    fn invalidate_all_forces_recompute() {
770        let (mut inc, root, children) = flat_tree(3);
771        inc.propagate();
772
773        let a = area(120, 24);
774        let root_rects = inc.get_or_compute(root, a, |a| split_equal(a, 3));
775        for (i, &c) in children.iter().enumerate() {
776            inc.get_or_compute(c, root_rects[i], |a| split_equal(a, 1));
777        }
778
779        inc.invalidate_all();
780        inc.propagate();
781
782        assert!(inc.is_dirty(root));
783        for &c in &children {
784            assert!(inc.is_dirty(c));
785        }
786    }
787
788    #[test]
789    fn clean_all_resets_dirty() {
790        let (mut inc, root, children) = flat_tree(2);
791        inc.propagate();
792
793        // All should be clean after propagate + compute.
794        let a = area(80, 24);
795        inc.get_or_compute(root, a, |a| split_equal(a, 2));
796        for (i, &c) in children.iter().enumerate() {
797            let child_area = Rect::new(i as u16 * 40, 0, 40, 24);
798            inc.get_or_compute(c, child_area, |a| split_equal(a, 1));
799        }
800
801        inc.mark_dirty(root);
802        inc.propagate();
803        assert!(inc.is_dirty(root));
804
805        inc.clean_all();
806        assert!(!inc.is_dirty(root));
807    }
808
809    #[test]
810    fn clear_cache_frees_memory() {
811        let (mut inc, root, _children) = flat_tree(5);
812        inc.propagate();
813
814        let a = area(200, 24);
815        inc.get_or_compute(root, a, |a| split_equal(a, 5));
816        assert!(inc.cache_len() > 0);
817
818        inc.clear_cache();
819        assert_eq!(inc.cache_len(), 0);
820    }
821
822    // ── mark_dirty_with_ancestors ─────────────────────────────────
823
824    #[test]
825    fn mark_dirty_with_ancestors_propagates_to_siblings() {
826        // Root → 3 children. Dirtying child[1] with ancestors
827        // should dirty root, which propagates to all children.
828        let (mut inc, root, children) = flat_tree(3);
829        inc.propagate();
830
831        let a = area(120, 24);
832        let root_rects = inc.get_or_compute(root, a, |a| split_equal(a, 3));
833        for (i, &c) in children.iter().enumerate() {
834            inc.get_or_compute(c, root_rects[i], |a| split_equal(a, 1));
835        }
836
837        inc.mark_dirty_with_ancestors(children[1]);
838        inc.propagate();
839
840        assert!(inc.is_dirty(root));
841        assert!(inc.is_dirty(children[0]));
842        assert!(inc.is_dirty(children[1]));
843        assert!(inc.is_dirty(children[2]));
844    }
845
846    #[test]
847    fn mark_dirty_with_ancestors_deep_chain() {
848        // Chain: root → A → B → C. Dirty C with ancestors → all dirty.
849        let mut inc = IncrementalLayout::new();
850        let root = inc.add_node(None);
851        let a = inc.add_node(Some(root));
852        let b = inc.add_node(Some(a));
853        let c = inc.add_node(Some(b));
854        inc.propagate();
855
856        let area_ = area(80, 24);
857        inc.get_or_compute(root, area_, |a| split_equal(a, 1));
858        inc.get_or_compute(a, area_, |a| split_equal(a, 1));
859        inc.get_or_compute(b, area_, |a| split_equal(a, 1));
860        inc.get_or_compute(c, area_, |a| split_equal(a, 1));
861
862        inc.mark_dirty_with_ancestors(c);
863        inc.propagate();
864
865        assert!(inc.is_dirty(root));
866        assert!(inc.is_dirty(a));
867        assert!(inc.is_dirty(b));
868        assert!(inc.is_dirty(c));
869    }
870
871    // ── Mark Changed Deduplication ──────────────────────────────────
872
873    #[test]
874    fn mark_changed_deduplicates() {
875        let mut inc = IncrementalLayout::new();
876        let n = inc.add_node(None);
877        inc.propagate();
878
879        let a = area(80, 24);
880        inc.get_or_compute(n, a, |a| split_equal(a, 2));
881
882        // First change.
883        inc.mark_changed(n, InputKind::Constraint, 42);
884        inc.propagate();
885        assert!(inc.is_dirty(n));
886
887        inc.get_or_compute(n, a, |a| split_equal(a, 2));
888
889        // Same hash → no-op.
890        inc.mark_changed(n, InputKind::Constraint, 42);
891        inc.propagate();
892        assert!(!inc.is_dirty(n));
893    }
894
895    // ── Deep Tree ───────────────────────────────────────────────────
896
897    #[test]
898    fn deep_tree_partial_dirty() {
899        let (mut inc, root, leaves) = binary_tree(4);
900        inc.propagate();
901
902        // Full pass.
903        fn walk(inc: &mut IncrementalLayout, id: NodeId, a: Rect) {
904            let rects = inc.get_or_compute(id, a, |a| split_equal(a, 2));
905            // Walk children (dependents whose parent is `id`).
906            let deps: Vec<_> = inc.graph().dependents(id).to_vec();
907            for (i, child) in deps.iter().enumerate() {
908                if i < rects.len() {
909                    walk(inc, *child, rects[i]);
910                }
911            }
912        }
913
914        walk(&mut inc, root, area(160, 24));
915        inc.reset_stats();
916
917        // Dirty one leaf → only that leaf recomputes.
918        inc.mark_dirty(leaves[7]);
919        inc.propagate();
920
921        walk(&mut inc, root, area(160, 24));
922
923        let s = inc.stats();
924        assert_eq!(s.recomputed, 1);
925        assert!(s.cached > 0);
926    }
927
928    // ── Bit-Identical ───────────────────────────────────────────────
929
930    #[test]
931    fn incremental_equals_full_layout() {
932        let a = area(200, 60);
933
934        // Build tree: root → 5 children, each with 3 grandchildren.
935        let mut inc = IncrementalLayout::new();
936        let root = inc.add_node(None);
937        let mut children = Vec::new();
938        let mut grandchildren = Vec::new();
939        for _ in 0..5 {
940            let child = inc.add_node(Some(root));
941            children.push(child);
942            for _ in 0..3 {
943                let gc = inc.add_node(Some(child));
944                grandchildren.push(gc);
945            }
946        }
947        inc.propagate();
948
949        // Define a deterministic compute function.
950        let compute = |a: Rect, n: usize| -> crate::Rects { split_equal(a, n) };
951
952        // Incremental pass.
953        let root_rects = inc.get_or_compute(root, a, |a| compute(a, 5));
954        let mut child_rects = Vec::new();
955        let mut gc_rects = Vec::new();
956        for (i, &c) in children.iter().enumerate() {
957            let cr = inc.get_or_compute(c, root_rects[i], |a| compute(a, 3));
958            let deps: Vec<_> = inc.graph().dependents(c).to_vec();
959            for (j, &gc) in deps.iter().enumerate() {
960                if j < cr.len() {
961                    gc_rects.push(inc.get_or_compute(gc, cr[j], |a| compute(a, 1)));
962                }
963            }
964            child_rects.push(cr);
965        }
966
967        // Force-full pass.
968        inc.set_force_full(true);
969        inc.reset_stats();
970
971        let root_rects2 = inc.get_or_compute(root, a, |a| compute(a, 5));
972        let mut child_rects2 = Vec::new();
973        let mut gc_rects2 = Vec::new();
974        for (i, &c) in children.iter().enumerate() {
975            let cr = inc.get_or_compute(c, root_rects2[i], |a| compute(a, 3));
976            let deps: Vec<_> = inc.graph().dependents(c).to_vec();
977            for (j, &gc) in deps.iter().enumerate() {
978                if j < cr.len() {
979                    gc_rects2.push(inc.get_or_compute(gc, cr[j], |a| compute(a, 1)));
980                }
981            }
982            child_rects2.push(cr);
983        }
984
985        assert_eq!(root_rects, root_rects2);
986        assert_eq!(child_rects, child_rects2);
987        assert_eq!(gc_rects, gc_rects2);
988    }
989
990    // ── Edge Cases ──────────────────────────────────────────────────
991
992    #[test]
993    fn empty_graph() {
994        let inc = IncrementalLayout::new();
995        assert_eq!(inc.node_count(), 0);
996        assert_eq!(inc.cache_len(), 0);
997    }
998
999    #[test]
1000    fn single_node_graph() {
1001        let mut inc = IncrementalLayout::new();
1002        let n = inc.add_node(None);
1003        inc.propagate();
1004
1005        let r = inc.get_or_compute(n, area(80, 24), |_a| crate::Rects::new());
1006        assert!(r.is_empty());
1007
1008        let s = inc.stats();
1009        assert_eq!(s.total, 1);
1010        assert_eq!(s.recomputed, 1);
1011    }
1012
1013    #[test]
1014    fn zero_area_still_caches() {
1015        let mut inc = IncrementalLayout::new();
1016        let n = inc.add_node(None);
1017        inc.propagate();
1018
1019        let a = Rect::default(); // 0×0
1020        inc.get_or_compute(n, a, |_| crate::Rects::new());
1021
1022        let mut calls = 0u32;
1023        inc.get_or_compute(n, a, |_| {
1024            calls += 1;
1025            crate::Rects::new()
1026        });
1027        assert_eq!(calls, 0);
1028    }
1029
1030    #[test]
1031    fn result_hash_consistent() {
1032        let mut inc = IncrementalLayout::new();
1033        let n = inc.add_node(None);
1034        inc.propagate();
1035
1036        let a = area(80, 24);
1037        inc.get_or_compute(n, a, |a| split_equal(a, 2));
1038
1039        let h1 = inc.result_hash(n).unwrap();
1040
1041        // Recompute with same inputs → same hash.
1042        inc.mark_dirty(n);
1043        inc.propagate();
1044        inc.get_or_compute(n, a, |a| split_equal(a, 2));
1045
1046        let h2 = inc.result_hash(n).unwrap();
1047        assert_eq!(h1, h2);
1048    }
1049
1050    #[test]
1051    fn debug_format() {
1052        let inc = IncrementalLayout::new();
1053        let dbg = format!("{:?}", inc);
1054        assert!(dbg.contains("IncrementalLayout"));
1055        assert!(dbg.contains("nodes"));
1056    }
1057
1058    #[test]
1059    fn default_impl() {
1060        let inc = IncrementalLayout::default();
1061        assert_eq!(inc.node_count(), 0);
1062        assert!(!inc.force_full());
1063    }
1064
1065    #[test]
1066    fn from_env_default_is_not_force_full() {
1067        // We can't safely set_var in forbid(unsafe_code) crate, so test
1068        // that the default (unset) path doesn't enable force_full.
1069        // The env var may or may not be set in CI; the key assertion is
1070        // that `from_env()` doesn't panic and returns a usable instance.
1071        let inc = IncrementalLayout::from_env();
1072        // If FRANKENTUI_FULL_LAYOUT is not set (common case), force_full is false.
1073        // If it IS set (unlikely in tests), force_full matches the value.
1074        assert_eq!(inc.node_count(), 0);
1075    }
1076
1077    #[test]
1078    fn parse_env_values() {
1079        // Test the parsing logic directly without touching env vars.
1080        let parse =
1081            |s: &str| -> bool { matches!(s.to_ascii_lowercase().as_str(), "1" | "true" | "yes") };
1082        assert!(parse("1"));
1083        assert!(parse("true"));
1084        assert!(parse("TRUE"));
1085        assert!(parse("yes"));
1086        assert!(parse("YES"));
1087        assert!(!parse("0"));
1088        assert!(!parse("false"));
1089        assert!(!parse("no"));
1090        assert!(!parse(""));
1091    }
1092
1093    // ── Large-Scale ─────────────────────────────────────────────────
1094
1095    #[test]
1096    fn thousand_node_tree_partial_dirty() {
1097        // Root → 10 children → 10 grandchildren each (= 111 nodes).
1098        let mut inc = IncrementalLayout::with_capacity(111);
1099        let root = inc.add_node(None);
1100        let mut children = Vec::new();
1101        let mut grandchildren = Vec::new();
1102
1103        for _ in 0..10 {
1104            let child = inc.add_node(Some(root));
1105            children.push(child);
1106            for _ in 0..10 {
1107                let gc = inc.add_node(Some(child));
1108                grandchildren.push(gc);
1109            }
1110        }
1111        inc.propagate();
1112
1113        let a = area(200, 60);
1114
1115        // Full first pass.
1116        let root_rects = inc.get_or_compute(root, a, |a| split_equal(a, 10));
1117        for (i, &c) in children.iter().enumerate() {
1118            let cr = inc.get_or_compute(c, root_rects[i], |a| split_equal(a, 10));
1119            let deps: Vec<_> = inc.graph().dependents(c).to_vec();
1120            for (j, &gc) in deps.iter().enumerate() {
1121                if j < cr.len() {
1122                    inc.get_or_compute(gc, cr[j], |a| split_equal(a, 1));
1123                }
1124            }
1125        }
1126
1127        let s = inc.stats();
1128        assert_eq!(s.recomputed, 111);
1129        assert_eq!(s.cached, 0);
1130        inc.reset_stats();
1131
1132        // Dirty only 2 grandchildren (< 2% of 111 nodes).
1133        inc.mark_dirty(grandchildren[17]);
1134        inc.mark_dirty(grandchildren[83]);
1135        inc.propagate();
1136
1137        // Second pass.
1138        let root_rects = inc.get_or_compute(root, a, |a| split_equal(a, 10));
1139        for (i, &c) in children.iter().enumerate() {
1140            let cr = inc.get_or_compute(c, root_rects[i], |a| split_equal(a, 10));
1141            let deps: Vec<_> = inc.graph().dependents(c).to_vec();
1142            for (j, &gc) in deps.iter().enumerate() {
1143                if j < cr.len() {
1144                    inc.get_or_compute(gc, cr[j], |a| split_equal(a, 1));
1145                }
1146            }
1147        }
1148
1149        let s = inc.stats();
1150        assert_eq!(s.recomputed, 2);
1151        assert_eq!(s.cached, 109);
1152        assert!(s.hit_rate() > 0.95);
1153    }
1154}