Skip to main content

ftui_layout/
incremental.rs

1//! Incremental layout engine (bd-3p4y1.3).
2//!
3//! Wraps [`DepGraph`] and a per-node result cache so that only dirty
4//! subtrees are re-evaluated during layout. Clean subtrees return their
5//! cached `Vec<Rect>` in O(1).
6//!
7//! # Key Invariant
8//!
9//! The output of incremental layout is **bit-identical** to full layout.
10//! The same traversal order (DFS pre-order) and the same computation at
11//! each dirty node guarantee isomorphic results.
12//!
13//! # Usage
14//!
15//! ```ignore
16//! use ftui_layout::incremental::IncrementalLayout;
17//! use ftui_layout::dep_graph::InputKind;
18//!
19//! let mut inc = IncrementalLayout::new();
20//!
21//! // Build the widget tree.
22//! let root = inc.add_node(None);
23//! let left = inc.add_node(Some(root));
24//! let right = inc.add_node(Some(root));
25//!
26//! // First pass: everything computes (cold cache).
27//! inc.propagate();
28//! let root_rects = inc.get_or_compute(root, area, |a| outer_flex.split(a));
29//! let left_rects = inc.get_or_compute(left, root_rects[0], |a| left_flex.split(a));
30//! let right_rects = inc.get_or_compute(right, root_rects[1], |a| right_flex.split(a));
31//!
32//! // Only left content changed — right subtree is skipped.
33//! inc.mark_changed(left, InputKind::Content, new_hash);
34//! inc.propagate();
35//! let root_rects2 = inc.get_or_compute(root, area, |a| outer_flex.split(a));
36//! let left_rects2 = inc.get_or_compute(left, root_rects2[0], |a| left_flex.split(a));
37//! let right_rects2 = inc.get_or_compute(right, root_rects2[1], |a| right_flex.split(a));
38//! // right_rects2 was returned from cache without calling right_flex.split().
39//! ```
40//!
41//! # Flex Siblings
42//!
43//! In flex layouts, changing one child can affect all siblings (because
44//! remaining space is redistributed). Use
45//! [`mark_dirty_with_ancestors`](IncrementalLayout::mark_dirty_with_ancestors)
46//! to dirty a child and all its ancestors. Since parent→child edges
47//! already exist (from `add_node`), dirtying the parent automatically
48//! propagates to all siblings during `propagate()`.
49//!
50//! # Force-Full Fallback
51//!
52//! Call [`IncrementalLayout::set_force_full`] to bypass the cache entirely
53//! and recompute every node. This is useful for debugging or as an env-var
54//! fallback (`FRANKENTUI_FULL_LAYOUT=1`, wired in bd-3p4y1.6).
55
56use crate::dep_graph::{CycleError, DepGraph, InputKind, NodeId};
57use ftui_core::geometry::Rect;
58use rustc_hash::FxHashMap;
59use std::hash::{Hash, Hasher};
60
61// ============================================================================
62// CachedNodeLayout
63// ============================================================================
64
65/// Cached layout result for a single node.
66#[derive(Clone, Debug)]
67struct CachedNodeLayout {
68    /// The area that was used for this computation.
69    area: Rect,
70    /// The computed sub-regions (output of Flex::split / Grid row solve).
71    rects: Vec<Rect>,
72    /// FxHash of the output rects for change detection.
73    result_hash: u64,
74}
75
76/// Compute a fast hash of a `Vec<Rect>` for change detection.
77fn hash_rects(rects: &[Rect]) -> u64 {
78    let mut h = rustc_hash::FxHasher::default();
79    for r in rects {
80        r.x.hash(&mut h);
81        r.y.hash(&mut h);
82        r.width.hash(&mut h);
83        r.height.hash(&mut h);
84    }
85    h.finish()
86}
87
88// ============================================================================
89// IncrementalStats
90// ============================================================================
91
92/// Statistics for a single incremental layout pass.
93#[derive(Debug, Clone, Default, PartialEq, Eq)]
94pub struct IncrementalStats {
95    /// Nodes recomputed in this pass.
96    pub recomputed: usize,
97    /// Nodes returned from cache.
98    pub cached: usize,
99    /// Total `get_or_compute` calls in this pass.
100    pub total: usize,
101    /// Current number of entries in the cache.
102    pub cache_entries: usize,
103}
104
105impl IncrementalStats {
106    /// Cache hit rate as a fraction (0.0 – 1.0).
107    pub fn hit_rate(&self) -> f64 {
108        if self.total == 0 {
109            0.0
110        } else {
111            self.cached as f64 / self.total as f64
112        }
113    }
114}
115
116// ============================================================================
117// IncrementalLayout
118// ============================================================================
119
120/// Incremental layout engine.
121///
122/// Combines a [`DepGraph`] for dirty tracking with a per-node result
123/// cache. The user drives the DFS tree walk; at each node,
124/// [`get_or_compute`](Self::get_or_compute) decides whether to return
125/// the cached result or call the supplied compute closure.
126pub struct IncrementalLayout {
127    /// Dependency graph for dirty propagation.
128    graph: DepGraph,
129    /// Per-node layout cache.
130    cache: FxHashMap<NodeId, CachedNodeLayout>,
131    /// Current-pass statistics.
132    stats: IncrementalStats,
133    /// When true, every `get_or_compute` call recomputes (bypass cache).
134    force_full: bool,
135}
136
137impl IncrementalLayout {
138    /// Create an empty incremental layout engine.
139    #[must_use]
140    pub fn new() -> Self {
141        Self {
142            graph: DepGraph::new(),
143            cache: FxHashMap::default(),
144            stats: IncrementalStats::default(),
145            force_full: false,
146        }
147    }
148
149    /// Create with pre-allocated capacity.
150    #[must_use]
151    pub fn with_capacity(node_cap: usize) -> Self {
152        Self {
153            graph: DepGraph::with_capacity(node_cap, node_cap),
154            cache: FxHashMap::with_capacity_and_hasher(node_cap, Default::default()),
155            stats: IncrementalStats::default(),
156            force_full: false,
157        }
158    }
159
160    /// Create from environment configuration.
161    ///
162    /// Reads `FRANKENTUI_FULL_LAYOUT`. When set to `"1"`, `"true"`, or
163    /// `"yes"` (case-insensitive), force-full mode is enabled and all
164    /// `get_or_compute` calls bypass the cache.
165    #[must_use]
166    pub fn from_env() -> Self {
167        let force = std::env::var("FRANKENTUI_FULL_LAYOUT")
168            .map(|v| matches!(v.to_ascii_lowercase().as_str(), "1" | "true" | "yes"))
169            .unwrap_or(false);
170        Self {
171            graph: DepGraph::new(),
172            cache: FxHashMap::default(),
173            stats: IncrementalStats::default(),
174            force_full: force,
175        }
176    }
177
178    // ── Node Management ─────────────────────────────────────────────
179
180    /// Add a layout node, optionally with a parent.
181    ///
182    /// When a parent is provided, a dependency edge is added so that
183    /// dirtying the parent also dirties this child.
184    pub fn add_node(&mut self, parent: Option<NodeId>) -> NodeId {
185        let id = self.graph.add_node();
186        if let Some(p) = parent {
187            self.graph.set_parent(id, p);
188            // Child depends on parent: parent dirty → child dirty.
189            let _ = self.graph.add_edge(id, p);
190        }
191        // New nodes start dirty (no cached result).
192        self.graph.mark_dirty(id);
193        id
194    }
195
196    /// Remove a node and evict its cache entry.
197    pub fn remove_node(&mut self, id: NodeId) {
198        self.cache.remove(&id);
199        // If there's a parent, mark it dirty (layout changed).
200        if let Some(parent) = self.graph.parent(id) {
201            self.graph.mark_dirty(parent);
202        }
203        self.graph.remove_node(id);
204    }
205
206    /// Add a custom dependency edge: `from` depends on `to`.
207    ///
208    /// Use this for bidirectional flex-sibling relationships or
209    /// cross-subtree dependencies.
210    pub fn add_dependency(&mut self, from: NodeId, to: NodeId) -> Result<(), CycleError> {
211        self.graph.add_edge(from, to)
212    }
213
214    // ── Dirty Tracking ──────────────────────────────────────────────
215
216    /// Mark a node's input as changed (hash-deduplicated).
217    ///
218    /// The node will not be dirtied if `new_hash` matches the stored
219    /// hash for this `InputKind`.
220    pub fn mark_changed(&mut self, id: NodeId, kind: InputKind, new_hash: u64) {
221        self.graph.mark_changed(id, kind, new_hash);
222    }
223
224    /// Force-mark a node as dirty without hash comparison.
225    pub fn mark_dirty(&mut self, id: NodeId) {
226        self.graph.mark_dirty(id);
227    }
228
229    /// Mark a node dirty and also dirty all its ancestors up to the
230    /// root. This implements the "flex sibling" pattern: when a child
231    /// changes, its parent must recompute, which propagates to all
232    /// siblings via parent→child edges.
233    pub fn mark_dirty_with_ancestors(&mut self, id: NodeId) {
234        self.graph.mark_dirty(id);
235        let mut current = id;
236        while let Some(parent) = self.graph.parent(current) {
237            self.graph.mark_dirty(parent);
238            current = parent;
239        }
240    }
241
242    /// Propagate dirtiness from pending nodes to all transitive
243    /// dependents. Must be called before [`get_or_compute`](Self::get_or_compute).
244    ///
245    /// Returns the dirty set in DFS pre-order.
246    pub fn propagate(&mut self) -> Vec<NodeId> {
247        self.graph.propagate()
248    }
249
250    /// Check if a node is currently dirty.
251    #[must_use]
252    pub fn is_dirty(&self, id: NodeId) -> bool {
253        self.graph.is_dirty(id)
254    }
255
256    // ── Layout Computation ──────────────────────────────────────────
257
258    /// Get the cached layout for a node, or compute and cache it.
259    ///
260    /// A cache hit requires:
261    /// 1. The node is not dirty.
262    /// 2. The area matches the previously cached area.
263    /// 3. `force_full` is not enabled.
264    ///
265    /// On a cache miss, `compute` is called with the `area` and the
266    /// result is stored. The node is cleaned after computation.
267    pub fn get_or_compute<F>(&mut self, id: NodeId, area: Rect, compute: F) -> Vec<Rect>
268    where
269        F: FnOnce(Rect) -> Vec<Rect>,
270    {
271        self.stats.total += 1;
272
273        if !self.force_full
274            && !self.graph.is_dirty(id)
275            && let Some(cached) = self.cache.get(&id)
276            && cached.area == area
277        {
278            self.stats.cached += 1;
279            return cached.rects.clone();
280        }
281
282        // Cache miss or dirty: recompute.
283        let rects = compute(area);
284        let result_hash = hash_rects(&rects);
285        self.cache.insert(
286            id,
287            CachedNodeLayout {
288                area,
289                rects: rects.clone(),
290                result_hash,
291            },
292        );
293        self.graph.clean(id);
294        self.stats.recomputed += 1;
295        rects
296    }
297
298    /// Retrieve the cached result for a node without recomputing.
299    ///
300    /// Returns `None` if the node has no cached result.
301    #[must_use]
302    pub fn cached_rects(&self, id: NodeId) -> Option<&[Rect]> {
303        self.cache.get(&id).map(|c| c.rects.as_slice())
304    }
305
306    /// Check whether a node's last computed result changed compared to
307    /// its previous result. Useful for detecting when a parent must
308    /// propagate layout changes to children.
309    #[must_use]
310    pub fn result_changed(&self, id: NodeId) -> bool {
311        // If there's no cached entry, it's "changed" (new).
312        !self.cache.contains_key(&id)
313    }
314
315    /// Get the hash of a node's last computed result.
316    #[must_use]
317    pub fn result_hash(&self, id: NodeId) -> Option<u64> {
318        self.cache.get(&id).map(|c| c.result_hash)
319    }
320
321    // ── Configuration ───────────────────────────────────────────────
322
323    /// Enable or disable force-full mode.
324    ///
325    /// When enabled, every `get_or_compute` call recomputes from
326    /// scratch, ignoring the cache. Useful for debugging or as a
327    /// fallback path.
328    pub fn set_force_full(&mut self, force: bool) {
329        self.force_full = force;
330    }
331
332    /// Whether force-full mode is enabled.
333    #[must_use]
334    pub fn force_full(&self) -> bool {
335        self.force_full
336    }
337
338    // ── Statistics ──────────────────────────────────────────────────
339
340    /// Current-pass statistics.
341    #[must_use]
342    pub fn stats(&self) -> IncrementalStats {
343        IncrementalStats {
344            cache_entries: self.cache.len(),
345            ..self.stats.clone()
346        }
347    }
348
349    /// Reset per-pass statistics to zero.
350    pub fn reset_stats(&mut self) {
351        self.stats = IncrementalStats::default();
352    }
353
354    // ── Bulk Operations ─────────────────────────────────────────────
355
356    /// Mark all nodes clean and advance the generation.
357    pub fn clean_all(&mut self) {
358        self.graph.clean_all();
359    }
360
361    /// Mark all nodes dirty (forces full recomputation on next pass).
362    pub fn invalidate_all(&mut self) {
363        self.graph.invalidate_all();
364    }
365
366    /// Clear the entire result cache (frees memory).
367    pub fn clear_cache(&mut self) {
368        self.cache.clear();
369    }
370
371    // ── Introspection ───────────────────────────────────────────────
372
373    /// Borrow the underlying dependency graph.
374    #[must_use]
375    pub fn graph(&self) -> &DepGraph {
376        &self.graph
377    }
378
379    /// Mutable access to the dependency graph.
380    pub fn graph_mut(&mut self) -> &mut DepGraph {
381        &mut self.graph
382    }
383
384    /// Number of cached entries.
385    #[must_use]
386    pub fn cache_len(&self) -> usize {
387        self.cache.len()
388    }
389
390    /// Number of live nodes in the dependency graph.
391    #[must_use]
392    pub fn node_count(&self) -> usize {
393        self.graph.node_count()
394    }
395}
396
397impl Default for IncrementalLayout {
398    fn default() -> Self {
399        Self::new()
400    }
401}
402
403impl std::fmt::Debug for IncrementalLayout {
404    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
405        f.debug_struct("IncrementalLayout")
406            .field("nodes", &self.graph.node_count())
407            .field("cache_entries", &self.cache.len())
408            .field("force_full", &self.force_full)
409            .finish()
410    }
411}
412
413// ============================================================================
414// Tests
415// ============================================================================
416
417#[cfg(test)]
418mod tests {
419    use super::*;
420
421    /// Helper: build a simple binary tree of depth `d`.
422    /// Returns (inc, root, leaves).
423    fn binary_tree(depth: u32) -> (IncrementalLayout, NodeId, Vec<NodeId>) {
424        let node_count = (1u32 << (depth + 1)) - 1;
425        let mut inc = IncrementalLayout::with_capacity(node_count as usize);
426        let root = inc.add_node(None);
427
428        let mut current_level = vec![root];
429        let mut leaves = Vec::new();
430
431        for _ in 0..depth {
432            let mut next_level = Vec::new();
433            for &parent in &current_level {
434                let left = inc.add_node(Some(parent));
435                let right = inc.add_node(Some(parent));
436                next_level.push(left);
437                next_level.push(right);
438            }
439            current_level = next_level;
440        }
441        leaves.extend(current_level);
442
443        (inc, root, leaves)
444    }
445
446    /// Helper: build a flat tree (root + N children).
447    fn flat_tree(n: usize) -> (IncrementalLayout, NodeId, Vec<NodeId>) {
448        let mut inc = IncrementalLayout::with_capacity(n + 1);
449        let root = inc.add_node(None);
450        let children: Vec<_> = (0..n).map(|_| inc.add_node(Some(root))).collect();
451        (inc, root, children)
452    }
453
454    fn area(w: u16, h: u16) -> Rect {
455        Rect::new(0, 0, w, h)
456    }
457
458    fn split_equal(a: Rect, n: usize) -> Vec<Rect> {
459        if n == 0 {
460            return vec![];
461        }
462        let w = a.width / n as u16;
463        (0..n)
464            .map(|i| Rect::new(a.x + (i as u16) * w, a.y, w, a.height))
465            .collect()
466    }
467
468    // ── Basic ───────────────────────────────────────────────────────
469
470    #[test]
471    fn new_node_is_dirty() {
472        let mut inc = IncrementalLayout::new();
473        let n = inc.add_node(None);
474        assert!(inc.is_dirty(n));
475    }
476
477    #[test]
478    fn get_or_compute_caches() {
479        let mut inc = IncrementalLayout::new();
480        let n = inc.add_node(None);
481        inc.propagate();
482
483        let a = area(80, 24);
484        let mut calls = 0u32;
485        let r1 = inc.get_or_compute(n, a, |a| {
486            calls += 1;
487            split_equal(a, 2)
488        });
489
490        // Second call should be cached.
491        let r2 = inc.get_or_compute(n, a, |a| {
492            calls += 1;
493            split_equal(a, 2)
494        });
495
496        assert_eq!(r1, r2);
497        assert_eq!(calls, 1);
498    }
499
500    #[test]
501    fn dirty_node_recomputes() {
502        let mut inc = IncrementalLayout::new();
503        let n = inc.add_node(None);
504        inc.propagate();
505
506        let a = area(80, 24);
507        inc.get_or_compute(n, a, |a| split_equal(a, 2));
508
509        // Mark dirty → recompute.
510        inc.mark_dirty(n);
511        inc.propagate();
512
513        let mut calls = 0u32;
514        inc.get_or_compute(n, a, |a| {
515            calls += 1;
516            split_equal(a, 2)
517        });
518        assert_eq!(calls, 1);
519    }
520
521    #[test]
522    fn area_change_triggers_recompute() {
523        let mut inc = IncrementalLayout::new();
524        let n = inc.add_node(None);
525        inc.propagate();
526
527        inc.get_or_compute(n, area(80, 24), |a| split_equal(a, 2));
528
529        // Different area → must recompute even though node is clean.
530        let mut calls = 0u32;
531        inc.get_or_compute(n, area(120, 40), |a| {
532            calls += 1;
533            split_equal(a, 2)
534        });
535        assert_eq!(calls, 1);
536    }
537
538    #[test]
539    fn same_area_same_node_cached() {
540        let mut inc = IncrementalLayout::new();
541        let n = inc.add_node(None);
542        inc.propagate();
543
544        let a = area(80, 24);
545        inc.get_or_compute(n, a, |a| split_equal(a, 2));
546
547        // Same area, clean node → cache hit.
548        let mut calls = 0u32;
549        inc.get_or_compute(n, a, |_a| {
550            calls += 1;
551            vec![]
552        });
553        assert_eq!(calls, 0);
554    }
555
556    // ── Propagation ─────────────────────────────────────────────────
557
558    #[test]
559    fn dirty_parent_dirties_child() {
560        let mut inc = IncrementalLayout::new();
561        let root = inc.add_node(None);
562        let child = inc.add_node(Some(root));
563        inc.propagate();
564
565        let a = area(80, 24);
566        inc.get_or_compute(root, a, |a| split_equal(a, 1));
567        inc.get_or_compute(child, a, |a| split_equal(a, 2));
568
569        // Dirty root → child should also recompute.
570        inc.mark_dirty(root);
571        inc.propagate();
572
573        assert!(inc.is_dirty(root));
574        assert!(inc.is_dirty(child));
575    }
576
577    #[test]
578    fn clean_sibling_not_affected_by_dirty_sibling() {
579        let (mut inc, root, children) = flat_tree(3);
580        inc.propagate();
581
582        let a = area(120, 24);
583        let root_rects = inc.get_or_compute(root, a, |a| split_equal(a, 3));
584        for (i, &c) in children.iter().enumerate() {
585            inc.get_or_compute(c, root_rects[i], |a| split_equal(a, 1));
586        }
587
588        // Mark only child[1] dirty.
589        inc.mark_dirty(children[1]);
590        inc.propagate();
591
592        assert!(!inc.is_dirty(root));
593        assert!(!inc.is_dirty(children[0]));
594        assert!(inc.is_dirty(children[1]));
595        assert!(!inc.is_dirty(children[2]));
596    }
597
598    #[test]
599    fn flex_siblings_dirty_via_parent() {
600        let (mut inc, root, children) = flat_tree(3);
601        inc.propagate();
602
603        let a = area(120, 24);
604        let root_rects = inc.get_or_compute(root, a, |a| split_equal(a, 3));
605        for (i, &c) in children.iter().enumerate() {
606            inc.get_or_compute(c, root_rects[i], |a| split_equal(a, 1));
607        }
608
609        // Flex coupling: when a child changes, mark the parent dirty.
610        // Since parent→child edges exist, parent dirty → all children dirty.
611        inc.mark_dirty(children[1]);
612        inc.mark_dirty(root); // Flex coupling: parent must recompute.
613        inc.propagate();
614
615        assert!(inc.is_dirty(root));
616        assert!(inc.is_dirty(children[0]));
617        assert!(inc.is_dirty(children[1]));
618        assert!(inc.is_dirty(children[2]));
619    }
620
621    // ── Statistics ──────────────────────────────────────────────────
622
623    #[test]
624    fn stats_track_hits_and_misses() {
625        let (mut inc, root, children) = flat_tree(3);
626        inc.propagate();
627
628        let a = area(120, 24);
629        // First pass: all misses.
630        let root_rects = inc.get_or_compute(root, a, |a| split_equal(a, 3));
631        for (i, &c) in children.iter().enumerate() {
632            inc.get_or_compute(c, root_rects[i], |a| split_equal(a, 1));
633        }
634
635        let s = inc.stats();
636        assert_eq!(s.recomputed, 4);
637        assert_eq!(s.cached, 0);
638        assert_eq!(s.total, 4);
639
640        inc.reset_stats();
641
642        // Second pass: all hits.
643        let root_rects = inc.get_or_compute(root, a, |a| split_equal(a, 3));
644        for (i, &c) in children.iter().enumerate() {
645            inc.get_or_compute(c, root_rects[i], |a| split_equal(a, 1));
646        }
647
648        let s = inc.stats();
649        assert_eq!(s.recomputed, 0);
650        assert_eq!(s.cached, 4);
651        assert_eq!(s.total, 4);
652        assert!((s.hit_rate() - 1.0).abs() < 0.001);
653    }
654
655    #[test]
656    fn stats_partial_dirty() {
657        let (mut inc, root, children) = flat_tree(4);
658        inc.propagate();
659
660        let a = area(160, 24);
661        let root_rects = inc.get_or_compute(root, a, |a| split_equal(a, 4));
662        for (i, &c) in children.iter().enumerate() {
663            inc.get_or_compute(c, root_rects[i], |a| split_equal(a, 1));
664        }
665        inc.reset_stats();
666
667        // Dirty one child.
668        inc.mark_dirty(children[2]);
669        inc.propagate();
670
671        let root_rects = inc.get_or_compute(root, a, |a| split_equal(a, 4));
672        for (i, &c) in children.iter().enumerate() {
673            inc.get_or_compute(c, root_rects[i], |a| split_equal(a, 1));
674        }
675
676        let s = inc.stats();
677        assert_eq!(s.recomputed, 1);
678        assert_eq!(s.cached, 4); // root + 3 clean children
679        assert_eq!(s.total, 5);
680    }
681
682    // ── Force Full ──────────────────────────────────────────────────
683
684    #[test]
685    fn force_full_bypasses_cache() {
686        let mut inc = IncrementalLayout::new();
687        let n = inc.add_node(None);
688        inc.propagate();
689
690        let a = area(80, 24);
691        inc.get_or_compute(n, a, |a| split_equal(a, 2));
692
693        // Enable force-full.
694        inc.set_force_full(true);
695        assert!(inc.force_full());
696
697        let mut calls = 0u32;
698        inc.get_or_compute(n, a, |a| {
699            calls += 1;
700            split_equal(a, 2)
701        });
702        assert_eq!(calls, 1);
703    }
704
705    #[test]
706    fn force_full_produces_identical_results() {
707        let (mut inc, root, children) = flat_tree(3);
708        inc.propagate();
709
710        let a = area(120, 24);
711
712        // Incremental pass.
713        let root_rects = inc.get_or_compute(root, a, |a| split_equal(a, 3));
714        let mut child_rects_inc = Vec::new();
715        for (i, &c) in children.iter().enumerate() {
716            child_rects_inc.push(inc.get_or_compute(c, root_rects[i], |a| split_equal(a, 2)));
717        }
718
719        // Force-full pass.
720        inc.set_force_full(true);
721        inc.reset_stats();
722
723        let root_rects_full = inc.get_or_compute(root, a, |a| split_equal(a, 3));
724        let mut child_rects_full = Vec::new();
725        for (i, &c) in children.iter().enumerate() {
726            child_rects_full.push(inc.get_or_compute(c, root_rects_full[i], |a| split_equal(a, 2)));
727        }
728
729        assert_eq!(root_rects, root_rects_full);
730        assert_eq!(child_rects_inc, child_rects_full);
731    }
732
733    // ── Node Removal ────────────────────────────────────────────────
734
735    #[test]
736    fn remove_node_evicts_cache() {
737        let mut inc = IncrementalLayout::new();
738        let root = inc.add_node(None);
739        let child = inc.add_node(Some(root));
740        inc.propagate();
741
742        inc.get_or_compute(child, area(40, 24), |a| split_equal(a, 1));
743        assert!(inc.cached_rects(child).is_some());
744
745        inc.remove_node(child);
746        assert!(inc.cached_rects(child).is_none());
747    }
748
749    #[test]
750    fn remove_node_dirties_parent() {
751        let mut inc = IncrementalLayout::new();
752        let root = inc.add_node(None);
753        let child = inc.add_node(Some(root));
754        inc.propagate();
755
756        let a = area(80, 24);
757        inc.get_or_compute(root, a, |a| split_equal(a, 1));
758        inc.get_or_compute(child, a, |a| split_equal(a, 1));
759
760        // Remove child → root should be dirty.
761        inc.remove_node(child);
762        assert!(inc.is_dirty(root));
763    }
764
765    // ── Bulk Operations ─────────────────────────────────────────────
766
767    #[test]
768    fn invalidate_all_forces_recompute() {
769        let (mut inc, root, children) = flat_tree(3);
770        inc.propagate();
771
772        let a = area(120, 24);
773        let root_rects = inc.get_or_compute(root, a, |a| split_equal(a, 3));
774        for (i, &c) in children.iter().enumerate() {
775            inc.get_or_compute(c, root_rects[i], |a| split_equal(a, 1));
776        }
777
778        inc.invalidate_all();
779        inc.propagate();
780
781        assert!(inc.is_dirty(root));
782        for &c in &children {
783            assert!(inc.is_dirty(c));
784        }
785    }
786
787    #[test]
788    fn clean_all_resets_dirty() {
789        let (mut inc, root, children) = flat_tree(2);
790        inc.propagate();
791
792        // All should be clean after propagate + compute.
793        let a = area(80, 24);
794        inc.get_or_compute(root, a, |a| split_equal(a, 2));
795        for (i, &c) in children.iter().enumerate() {
796            let child_area = Rect::new(i as u16 * 40, 0, 40, 24);
797            inc.get_or_compute(c, child_area, |a| split_equal(a, 1));
798        }
799
800        inc.mark_dirty(root);
801        inc.propagate();
802        assert!(inc.is_dirty(root));
803
804        inc.clean_all();
805        assert!(!inc.is_dirty(root));
806    }
807
808    #[test]
809    fn clear_cache_frees_memory() {
810        let (mut inc, root, _children) = flat_tree(5);
811        inc.propagate();
812
813        let a = area(200, 24);
814        inc.get_or_compute(root, a, |a| split_equal(a, 5));
815        assert!(inc.cache_len() > 0);
816
817        inc.clear_cache();
818        assert_eq!(inc.cache_len(), 0);
819    }
820
821    // ── mark_dirty_with_ancestors ─────────────────────────────────
822
823    #[test]
824    fn mark_dirty_with_ancestors_propagates_to_siblings() {
825        // Root → 3 children. Dirtying child[1] with ancestors
826        // should dirty root, which propagates to all children.
827        let (mut inc, root, children) = flat_tree(3);
828        inc.propagate();
829
830        let a = area(120, 24);
831        let root_rects = inc.get_or_compute(root, a, |a| split_equal(a, 3));
832        for (i, &c) in children.iter().enumerate() {
833            inc.get_or_compute(c, root_rects[i], |a| split_equal(a, 1));
834        }
835
836        inc.mark_dirty_with_ancestors(children[1]);
837        inc.propagate();
838
839        assert!(inc.is_dirty(root));
840        assert!(inc.is_dirty(children[0]));
841        assert!(inc.is_dirty(children[1]));
842        assert!(inc.is_dirty(children[2]));
843    }
844
845    #[test]
846    fn mark_dirty_with_ancestors_deep_chain() {
847        // Chain: root → A → B → C. Dirty C with ancestors → all dirty.
848        let mut inc = IncrementalLayout::new();
849        let root = inc.add_node(None);
850        let a = inc.add_node(Some(root));
851        let b = inc.add_node(Some(a));
852        let c = inc.add_node(Some(b));
853        inc.propagate();
854
855        let area_ = area(80, 24);
856        inc.get_or_compute(root, area_, |a| split_equal(a, 1));
857        inc.get_or_compute(a, area_, |a| split_equal(a, 1));
858        inc.get_or_compute(b, area_, |a| split_equal(a, 1));
859        inc.get_or_compute(c, area_, |a| split_equal(a, 1));
860
861        inc.mark_dirty_with_ancestors(c);
862        inc.propagate();
863
864        assert!(inc.is_dirty(root));
865        assert!(inc.is_dirty(a));
866        assert!(inc.is_dirty(b));
867        assert!(inc.is_dirty(c));
868    }
869
870    // ── Mark Changed Deduplication ──────────────────────────────────
871
872    #[test]
873    fn mark_changed_deduplicates() {
874        let mut inc = IncrementalLayout::new();
875        let n = inc.add_node(None);
876        inc.propagate();
877
878        let a = area(80, 24);
879        inc.get_or_compute(n, a, |a| split_equal(a, 2));
880
881        // First change.
882        inc.mark_changed(n, InputKind::Constraint, 42);
883        inc.propagate();
884        assert!(inc.is_dirty(n));
885
886        inc.get_or_compute(n, a, |a| split_equal(a, 2));
887
888        // Same hash → no-op.
889        inc.mark_changed(n, InputKind::Constraint, 42);
890        inc.propagate();
891        assert!(!inc.is_dirty(n));
892    }
893
894    // ── Deep Tree ───────────────────────────────────────────────────
895
896    #[test]
897    fn deep_tree_partial_dirty() {
898        let (mut inc, root, leaves) = binary_tree(4);
899        inc.propagate();
900
901        // Full pass.
902        fn walk(inc: &mut IncrementalLayout, id: NodeId, a: Rect) {
903            let rects = inc.get_or_compute(id, a, |a| split_equal(a, 2));
904            // Walk children (dependents whose parent is `id`).
905            let deps: Vec<_> = inc.graph().dependents(id).to_vec();
906            for (i, child) in deps.iter().enumerate() {
907                if i < rects.len() {
908                    walk(inc, *child, rects[i]);
909                }
910            }
911        }
912
913        walk(&mut inc, root, area(160, 24));
914        inc.reset_stats();
915
916        // Dirty one leaf → only that leaf recomputes.
917        inc.mark_dirty(leaves[7]);
918        inc.propagate();
919
920        walk(&mut inc, root, area(160, 24));
921
922        let s = inc.stats();
923        assert_eq!(s.recomputed, 1);
924        assert!(s.cached > 0);
925    }
926
927    // ── Bit-Identical ───────────────────────────────────────────────
928
929    #[test]
930    fn incremental_equals_full_layout() {
931        let a = area(200, 60);
932
933        // Build tree: root → 5 children, each with 3 grandchildren.
934        let mut inc = IncrementalLayout::new();
935        let root = inc.add_node(None);
936        let mut children = Vec::new();
937        let mut grandchildren = Vec::new();
938        for _ in 0..5 {
939            let child = inc.add_node(Some(root));
940            children.push(child);
941            for _ in 0..3 {
942                let gc = inc.add_node(Some(child));
943                grandchildren.push(gc);
944            }
945        }
946        inc.propagate();
947
948        // Define a deterministic compute function.
949        let compute = |a: Rect, n: usize| -> Vec<Rect> { split_equal(a, n) };
950
951        // Incremental pass.
952        let root_rects = inc.get_or_compute(root, a, |a| compute(a, 5));
953        let mut child_rects = Vec::new();
954        let mut gc_rects = Vec::new();
955        for (i, &c) in children.iter().enumerate() {
956            let cr = inc.get_or_compute(c, root_rects[i], |a| compute(a, 3));
957            let deps: Vec<_> = inc.graph().dependents(c).to_vec();
958            for (j, &gc) in deps.iter().enumerate() {
959                if j < cr.len() {
960                    gc_rects.push(inc.get_or_compute(gc, cr[j], |a| compute(a, 1)));
961                }
962            }
963            child_rects.push(cr);
964        }
965
966        // Force-full pass.
967        inc.set_force_full(true);
968        inc.reset_stats();
969
970        let root_rects2 = inc.get_or_compute(root, a, |a| compute(a, 5));
971        let mut child_rects2 = Vec::new();
972        let mut gc_rects2 = Vec::new();
973        for (i, &c) in children.iter().enumerate() {
974            let cr = inc.get_or_compute(c, root_rects2[i], |a| compute(a, 3));
975            let deps: Vec<_> = inc.graph().dependents(c).to_vec();
976            for (j, &gc) in deps.iter().enumerate() {
977                if j < cr.len() {
978                    gc_rects2.push(inc.get_or_compute(gc, cr[j], |a| compute(a, 1)));
979                }
980            }
981            child_rects2.push(cr);
982        }
983
984        assert_eq!(root_rects, root_rects2);
985        assert_eq!(child_rects, child_rects2);
986        assert_eq!(gc_rects, gc_rects2);
987    }
988
989    // ── Edge Cases ──────────────────────────────────────────────────
990
991    #[test]
992    fn empty_graph() {
993        let inc = IncrementalLayout::new();
994        assert_eq!(inc.node_count(), 0);
995        assert_eq!(inc.cache_len(), 0);
996    }
997
998    #[test]
999    fn single_node_graph() {
1000        let mut inc = IncrementalLayout::new();
1001        let n = inc.add_node(None);
1002        inc.propagate();
1003
1004        let r = inc.get_or_compute(n, area(80, 24), |_a| vec![]);
1005        assert!(r.is_empty());
1006
1007        let s = inc.stats();
1008        assert_eq!(s.total, 1);
1009        assert_eq!(s.recomputed, 1);
1010    }
1011
1012    #[test]
1013    fn zero_area_still_caches() {
1014        let mut inc = IncrementalLayout::new();
1015        let n = inc.add_node(None);
1016        inc.propagate();
1017
1018        let a = Rect::default(); // 0×0
1019        inc.get_or_compute(n, a, |_| vec![]);
1020
1021        let mut calls = 0u32;
1022        inc.get_or_compute(n, a, |_| {
1023            calls += 1;
1024            vec![]
1025        });
1026        assert_eq!(calls, 0);
1027    }
1028
1029    #[test]
1030    fn result_hash_consistent() {
1031        let mut inc = IncrementalLayout::new();
1032        let n = inc.add_node(None);
1033        inc.propagate();
1034
1035        let a = area(80, 24);
1036        inc.get_or_compute(n, a, |a| split_equal(a, 2));
1037
1038        let h1 = inc.result_hash(n).unwrap();
1039
1040        // Recompute with same inputs → same hash.
1041        inc.mark_dirty(n);
1042        inc.propagate();
1043        inc.get_or_compute(n, a, |a| split_equal(a, 2));
1044
1045        let h2 = inc.result_hash(n).unwrap();
1046        assert_eq!(h1, h2);
1047    }
1048
1049    #[test]
1050    fn debug_format() {
1051        let inc = IncrementalLayout::new();
1052        let dbg = format!("{:?}", inc);
1053        assert!(dbg.contains("IncrementalLayout"));
1054        assert!(dbg.contains("nodes"));
1055    }
1056
1057    #[test]
1058    fn default_impl() {
1059        let inc = IncrementalLayout::default();
1060        assert_eq!(inc.node_count(), 0);
1061        assert!(!inc.force_full());
1062    }
1063
1064    #[test]
1065    fn from_env_default_is_not_force_full() {
1066        // We can't safely set_var in forbid(unsafe_code) crate, so test
1067        // that the default (unset) path doesn't enable force_full.
1068        // The env var may or may not be set in CI; the key assertion is
1069        // that `from_env()` doesn't panic and returns a usable instance.
1070        let inc = IncrementalLayout::from_env();
1071        // If FRANKENTUI_FULL_LAYOUT is not set (common case), force_full is false.
1072        // If it IS set (unlikely in tests), force_full matches the value.
1073        assert_eq!(inc.node_count(), 0);
1074    }
1075
1076    #[test]
1077    fn parse_env_values() {
1078        // Test the parsing logic directly without touching env vars.
1079        let parse =
1080            |s: &str| -> bool { matches!(s.to_ascii_lowercase().as_str(), "1" | "true" | "yes") };
1081        assert!(parse("1"));
1082        assert!(parse("true"));
1083        assert!(parse("TRUE"));
1084        assert!(parse("yes"));
1085        assert!(parse("YES"));
1086        assert!(!parse("0"));
1087        assert!(!parse("false"));
1088        assert!(!parse("no"));
1089        assert!(!parse(""));
1090    }
1091
1092    // ── Large-Scale ─────────────────────────────────────────────────
1093
1094    #[test]
1095    fn thousand_node_tree_partial_dirty() {
1096        // Root → 10 children → 10 grandchildren each (= 111 nodes).
1097        let mut inc = IncrementalLayout::with_capacity(111);
1098        let root = inc.add_node(None);
1099        let mut children = Vec::new();
1100        let mut grandchildren = Vec::new();
1101
1102        for _ in 0..10 {
1103            let child = inc.add_node(Some(root));
1104            children.push(child);
1105            for _ in 0..10 {
1106                let gc = inc.add_node(Some(child));
1107                grandchildren.push(gc);
1108            }
1109        }
1110        inc.propagate();
1111
1112        let a = area(200, 60);
1113
1114        // Full first pass.
1115        let root_rects = inc.get_or_compute(root, a, |a| split_equal(a, 10));
1116        for (i, &c) in children.iter().enumerate() {
1117            let cr = inc.get_or_compute(c, root_rects[i], |a| split_equal(a, 10));
1118            let deps: Vec<_> = inc.graph().dependents(c).to_vec();
1119            for (j, &gc) in deps.iter().enumerate() {
1120                if j < cr.len() {
1121                    inc.get_or_compute(gc, cr[j], |a| split_equal(a, 1));
1122                }
1123            }
1124        }
1125
1126        let s = inc.stats();
1127        assert_eq!(s.recomputed, 111);
1128        assert_eq!(s.cached, 0);
1129        inc.reset_stats();
1130
1131        // Dirty only 2 grandchildren (< 2% of 111 nodes).
1132        inc.mark_dirty(grandchildren[17]);
1133        inc.mark_dirty(grandchildren[83]);
1134        inc.propagate();
1135
1136        // Second pass.
1137        let root_rects = inc.get_or_compute(root, a, |a| split_equal(a, 10));
1138        for (i, &c) in children.iter().enumerate() {
1139            let cr = inc.get_or_compute(c, root_rects[i], |a| split_equal(a, 10));
1140            let deps: Vec<_> = inc.graph().dependents(c).to_vec();
1141            for (j, &gc) in deps.iter().enumerate() {
1142                if j < cr.len() {
1143                    inc.get_or_compute(gc, cr[j], |a| split_equal(a, 1));
1144                }
1145            }
1146        }
1147
1148        let s = inc.stats();
1149        assert_eq!(s.recomputed, 2);
1150        assert_eq!(s.cached, 109);
1151        assert!(s.hit_rate() > 0.95);
1152    }
1153}