ruvector_mincut/tree/
mod.rs

1//! Hierarchical Tree Decomposition for Dynamic Minimum Cut
2//!
3//! Maintains a hierarchy of graph partitions where each level contains
4//! increasingly refined cuts. Enables subpolynomial update time.
5//!
6//! # Overview
7//!
8//! This module implements a hierarchical decomposition of graphs for efficient
9//! minimum cut maintenance. The key idea is to build a balanced binary tree over
10//! the graph vertices, where each node represents a potential partition of the graph.
11//!
12//! # Features
13//!
14//! - **Balanced Binary Tree**: O(log n) height decomposition
15//! - **Lazy Recomputation**: Only recompute dirty nodes after updates
16//! - **LCA-based Updates**: Localize updates to affected subtrees
17//! - **Multiple Cut Evaluation**: Consider all possible tree-induced partitions
18//!
19//! # Example
20//!
21//! ```rust
22//! use std::sync::Arc;
23//! use ruvector_mincut::graph::DynamicGraph;
24//! use ruvector_mincut::tree::HierarchicalDecomposition;
25//!
26//! // Create a graph
27//! let graph = Arc::new(DynamicGraph::new());
28//! graph.insert_edge(1, 2, 1.0).unwrap();
29//! graph.insert_edge(2, 3, 1.0).unwrap();
30//! graph.insert_edge(3, 1, 1.0).unwrap();
31//!
32//! // Build hierarchical decomposition
33//! let mut decomp = HierarchicalDecomposition::build(graph.clone()).unwrap();
34//!
35//! // Get minimum cut
36//! let min_cut = decomp.min_cut_value();
37//! println!("Minimum cut: {}", min_cut);
38//!
39//! // Get the partition
40//! let (partition_a, partition_b) = decomp.min_cut_partition();
41//! println!("Partition: {:?} vs {:?}", partition_a, partition_b);
42//!
43//! // Handle dynamic updates
44//! graph.insert_edge(1, 4, 2.0).unwrap();
45//! let new_min_cut = decomp.insert_edge(1, 4, 2.0).unwrap();
46//! println!("New minimum cut: {}", new_min_cut);
47//! ```
48//!
49//! # Algorithm
50//!
51//! 1. **Build Phase**: Construct a balanced binary tree over graph vertices
52//! 2. **Compute Phase**: For each node, compute the cut value (edges crossing
53//!    between node's vertices and all other vertices)
54//! 3. **Query Phase**: Return minimum cut over all nodes
55//! 4. **Update Phase**: Mark affected nodes dirty, recompute only dirty subtrees
56//!
57//! # Complexity
58//!
59//! - Build: O(n log n + m) where n = vertices, m = edges
60//! - Query: O(1)
61//! - Update: O(log n) nodes recomputed, O(m') edges examined per node
62//!
63//! # Limitations
64//!
65//! The balanced binary partitioning may not find the true minimum cut if it
66//! requires a partition not represented in the tree structure. For guaranteed
67//! minimum cut finding, use the exact algorithm in the `algorithm` module.
68
69use crate::error::Result;
70use crate::graph::{DynamicGraph, VertexId, Weight};
71use std::collections::{HashMap, HashSet};
72use std::sync::Arc;
73
74/// A node in the hierarchical decomposition tree
75#[derive(Debug, Clone)]
76pub struct DecompositionNode {
77    /// Unique ID of this node
78    pub id: usize,
79    /// Level in the hierarchy (0 = leaves = individual vertices)
80    pub level: usize,
81    /// Vertices contained in this node (at leaves) or children indices
82    pub vertices: HashSet<VertexId>,
83    /// Parent node index (None for root)
84    pub parent: Option<usize>,
85    /// Child node indices
86    pub children: Vec<usize>,
87    /// Cut value for this subtree
88    pub cut_value: f64,
89    /// Whether this node needs recomputation
90    pub dirty: bool,
91}
92
93impl DecompositionNode {
94    /// Create a new leaf node
95    fn new_leaf(id: usize, vertex: VertexId) -> Self {
96        let mut vertices = HashSet::new();
97        vertices.insert(vertex);
98        Self {
99            id,
100            level: 0,
101            vertices,
102            parent: None,
103            children: Vec::new(),
104            cut_value: f64::INFINITY,
105            dirty: true,
106        }
107    }
108
109    /// Create a new internal node
110    fn new_internal(id: usize, level: usize, children: Vec<usize>) -> Self {
111        Self {
112            id,
113            level,
114            vertices: HashSet::new(), // Will be populated from children
115            parent: None,
116            children,
117            cut_value: f64::INFINITY,
118            dirty: true,
119        }
120    }
121
122    /// Check if this is a leaf node
123    pub fn is_leaf(&self) -> bool {
124        self.children.is_empty()
125    }
126
127    /// Get the size of this subtree (number of vertices)
128    pub fn size(&self) -> usize {
129        self.vertices.len()
130    }
131}
132
133/// The hierarchical decomposition tree
134pub struct HierarchicalDecomposition {
135    /// All nodes in the decomposition
136    nodes: Vec<DecompositionNode>,
137    /// Map from vertex to its leaf node index
138    vertex_to_leaf: HashMap<VertexId, usize>,
139    /// Root node index
140    root: Option<usize>,
141    /// Current global minimum cut value
142    min_cut: f64,
143    /// Height of the tree
144    height: usize,
145    /// Reference to the underlying graph
146    graph: Arc<DynamicGraph>,
147    /// Next node ID
148    next_node_id: usize,
149}
150
151impl HierarchicalDecomposition {
152    /// Build a new hierarchical decomposition from a graph
153    pub fn build(graph: Arc<DynamicGraph>) -> Result<Self> {
154        let mut decomp = Self {
155            nodes: Vec::new(),
156            vertex_to_leaf: HashMap::new(),
157            root: None,
158            min_cut: f64::INFINITY,
159            height: 0,
160            graph,
161            next_node_id: 0,
162        };
163
164        decomp.build_hierarchy()?;
165        decomp.min_cut = decomp.propagate_updates();
166
167        Ok(decomp)
168    }
169
170    /// Get the current minimum cut value
171    pub fn min_cut_value(&self) -> f64 {
172        self.min_cut
173    }
174
175    /// Get the vertices on each side of the minimum cut
176    pub fn min_cut_partition(&self) -> (HashSet<VertexId>, HashSet<VertexId>) {
177        if self.root.is_none() {
178            return (HashSet::new(), HashSet::new());
179        }
180
181        // Find the node with minimum cut value
182        let (min_node_idx, _) = self.find_min_cut_node();
183        let node = &self.nodes[min_node_idx];
184
185        // The partition is: node's vertices vs all other vertices
186        let partition_a = node.vertices.clone();
187        let all_vertices: HashSet<VertexId> = self.graph.vertices().into_iter().collect();
188        let partition_b: HashSet<VertexId> =
189            all_vertices.difference(&partition_a).copied().collect();
190
191        (partition_a, partition_b)
192    }
193
194    /// Handle edge insertion
195    pub fn insert_edge(&mut self, u: VertexId, v: VertexId, _weight: Weight) -> Result<f64> {
196        // Find LCA of u and v
197        if let Some(lca_idx) = self.lca_node(u, v) {
198            // Mark LCA and ancestors as dirty
199            self.mark_dirty(lca_idx);
200        }
201
202        // Recompute and return new min cut
203        self.min_cut = self.propagate_updates();
204        Ok(self.min_cut)
205    }
206
207    /// Handle edge deletion
208    pub fn delete_edge(&mut self, u: VertexId, v: VertexId) -> Result<f64> {
209        // Find LCA of u and v
210        if let Some(lca_idx) = self.lca_node(u, v) {
211            // Mark LCA and ancestors as dirty
212            self.mark_dirty(lca_idx);
213        }
214
215        // Recompute and return new min cut
216        self.min_cut = self.propagate_updates();
217        Ok(self.min_cut)
218    }
219
220    /// Recompute dirty nodes and return new min cut
221    fn propagate_updates(&mut self) -> f64 {
222        // Post-order traversal to recompute dirty nodes
223        if let Some(root_idx) = self.root {
224            self.recompute_subtree(root_idx);
225        }
226
227        // Find global minimum
228        self.find_min_cut_value()
229    }
230
231    /// Recompute a subtree (post-order)
232    fn recompute_subtree(&mut self, node_idx: usize) {
233        // First, recompute children
234        let children = self.nodes[node_idx].children.clone();
235        for child_idx in children {
236            if self.nodes[child_idx].dirty {
237                self.recompute_subtree(child_idx);
238            }
239        }
240
241        // Then recompute this node
242        if self.nodes[node_idx].dirty {
243            let cut_value = self.compute_cut(node_idx);
244            self.nodes[node_idx].cut_value = cut_value;
245            self.nodes[node_idx].dirty = false;
246        }
247    }
248
249    /// Find the lowest common ancestor node of two vertices
250    fn lca_node(&self, u: VertexId, v: VertexId) -> Option<usize> {
251        let u_leaf = self.vertex_to_leaf.get(&u)?;
252        let v_leaf = self.vertex_to_leaf.get(&v)?;
253
254        if u_leaf == v_leaf {
255            return Some(*u_leaf);
256        }
257
258        // Get ancestors of u
259        let mut u_ancestors = HashSet::new();
260        let mut current = Some(*u_leaf);
261        while let Some(node_idx) = current {
262            u_ancestors.insert(node_idx);
263            current = self.nodes[node_idx].parent;
264        }
265
266        // Find first common ancestor of v
267        let mut current = Some(*v_leaf);
268        while let Some(node_idx) = current {
269            if u_ancestors.contains(&node_idx) {
270                return Some(node_idx);
271            }
272            current = self.nodes[node_idx].parent;
273        }
274
275        None
276    }
277
278    /// Mark a node and its ancestors as dirty
279    fn mark_dirty(&mut self, node_idx: usize) {
280        let mut current = Some(node_idx);
281        while let Some(idx) = current {
282            self.nodes[idx].dirty = true;
283            current = self.nodes[idx].parent;
284        }
285    }
286
287    /// Build the initial hierarchy using recursive partitioning
288    fn build_hierarchy(&mut self) -> Result<()> {
289        let vertices = self.graph.vertices();
290
291        if vertices.is_empty() {
292            return Ok(());
293        }
294
295        // Create leaf nodes for each vertex
296        for vertex in &vertices {
297            let node_id = self.next_node_id;
298            self.next_node_id += 1;
299            let leaf = DecompositionNode::new_leaf(node_id, *vertex);
300            let leaf_idx = self.nodes.len();
301            self.nodes.push(leaf);
302            self.vertex_to_leaf.insert(*vertex, leaf_idx);
303        }
304
305        // Build tree using balanced binary partitioning
306        let leaf_indices: Vec<usize> = (0..vertices.len()).collect();
307        if !leaf_indices.is_empty() {
308            self.root = Some(self.build_subtree(&leaf_indices, 1)?);
309        }
310
311        Ok(())
312    }
313
314    /// Recursively build a balanced binary tree from leaf indices
315    fn build_subtree(&mut self, indices: &[usize], level: usize) -> Result<usize> {
316        if indices.len() == 1 {
317            // Single leaf node - update its level
318            self.nodes[indices[0]].level = 0;
319            self.height = self.height.max(level - 1);
320            return Ok(indices[0]);
321        }
322
323        // Split into two balanced halves
324        let mid = indices.len() / 2;
325        let left_indices = &indices[..mid];
326        let right_indices = &indices[mid..];
327
328        // Recursively build children
329        let left_idx = self.build_subtree(left_indices, level + 1)?;
330        let right_idx = self.build_subtree(right_indices, level + 1)?;
331
332        // Create internal node
333        let node_id = self.next_node_id;
334        self.next_node_id += 1;
335        let mut internal =
336            DecompositionNode::new_internal(node_id, level, vec![left_idx, right_idx]);
337
338        // Collect vertices from children
339        internal.vertices.extend(&self.nodes[left_idx].vertices);
340        internal.vertices.extend(&self.nodes[right_idx].vertices);
341
342        let internal_idx = self.nodes.len();
343        self.nodes.push(internal);
344
345        // Update parent pointers
346        self.nodes[left_idx].parent = Some(internal_idx);
347        self.nodes[right_idx].parent = Some(internal_idx);
348
349        self.height = self.height.max(level);
350
351        Ok(internal_idx)
352    }
353
354    /// Compute the cut value at a node
355    /// Each node represents a partition: (node's vertices) vs (all other vertices)
356    fn compute_cut(&self, node_idx: usize) -> f64 {
357        let node = &self.nodes[node_idx];
358
359        // Leaf nodes: partition would be {vertex} vs {all others}
360        // If there's only 1 vertex total, cut is infinite
361        // Otherwise, compute edges from this vertex to all others
362        if node.vertices.len() == self.graph.num_vertices() {
363            // This node contains all vertices - no valid cut
364            return f64::INFINITY;
365        }
366
367        // Compute cut: sum of edge weights crossing between node's vertices and others
368        self.compute_global_cut(&node.vertices)
369    }
370
371    /// Compute the cut value for a partition
372    /// One side is 'vertices', other side is all vertices not in this set
373    fn compute_global_cut(&self, vertices: &HashSet<VertexId>) -> f64 {
374        let mut cut_weight = 0.0;
375
376        for &u in vertices {
377            for (v, _edge_id) in self.graph.neighbors(u) {
378                // If v is NOT in our vertex set, this edge crosses the cut
379                if !vertices.contains(&v) {
380                    if let Some(weight) = self.graph.edge_weight(u, v) {
381                        cut_weight += weight;
382                    }
383                }
384            }
385        }
386
387        cut_weight
388    }
389
390    /// Find the node with minimum cut value
391    fn find_min_cut_node(&self) -> (usize, f64) {
392        let mut min_idx = 0;
393        let mut min_value = f64::INFINITY;
394
395        for (idx, node) in self.nodes.iter().enumerate() {
396            if node.cut_value < min_value {
397                min_value = node.cut_value;
398                min_idx = idx;
399            }
400        }
401
402        (min_idx, min_value)
403    }
404
405    /// Find global minimum cut value
406    fn find_min_cut_value(&self) -> f64 {
407        self.find_min_cut_node().1
408    }
409
410    /// Get height of decomposition
411    pub fn height(&self) -> usize {
412        self.height
413    }
414
415    /// Get number of nodes
416    pub fn num_nodes(&self) -> usize {
417        self.nodes.len()
418    }
419}
420
421/// Level information for the decomposition
422#[derive(Debug, Clone)]
423pub struct LevelInfo {
424    /// Level index
425    pub level: usize,
426    /// Number of nodes at this level
427    pub num_nodes: usize,
428    /// Average cut value at this level
429    pub avg_cut: f64,
430}
431
432impl HierarchicalDecomposition {
433    /// Get information about each level
434    pub fn level_info(&self) -> Vec<LevelInfo> {
435        let mut levels: HashMap<usize, Vec<f64>> = HashMap::new();
436
437        for node in &self.nodes {
438            levels
439                .entry(node.level)
440                .or_insert_with(Vec::new)
441                .push(node.cut_value);
442        }
443
444        let mut result: Vec<LevelInfo> = levels
445            .into_iter()
446            .map(|(level, cut_values)| {
447                let num_nodes = cut_values.len();
448                let finite_cuts: Vec<f64> = cut_values
449                    .iter()
450                    .filter(|&&v| v.is_finite())
451                    .copied()
452                    .collect();
453                let avg_cut = if finite_cuts.is_empty() {
454                    f64::INFINITY
455                } else {
456                    finite_cuts.iter().sum::<f64>() / finite_cuts.len() as f64
457                };
458
459                LevelInfo {
460                    level,
461                    num_nodes,
462                    avg_cut,
463                }
464            })
465            .collect();
466
467        result.sort_by_key(|info| info.level);
468        result
469    }
470}
471
472#[cfg(test)]
473mod tests {
474    use super::*;
475
476    fn create_simple_graph() -> Arc<DynamicGraph> {
477        let graph = Arc::new(DynamicGraph::new());
478        // Triangle: 1-2-3-1
479        graph.insert_edge(1, 2, 1.0).unwrap();
480        graph.insert_edge(2, 3, 1.0).unwrap();
481        graph.insert_edge(3, 1, 1.0).unwrap();
482        graph
483    }
484
485    fn create_disconnectable_graph() -> Arc<DynamicGraph> {
486        let graph = Arc::new(DynamicGraph::new());
487        // Two triangles connected by single edge
488        // Triangle 1: 1-2-3-1
489        graph.insert_edge(1, 2, 2.0).unwrap();
490        graph.insert_edge(2, 3, 2.0).unwrap();
491        graph.insert_edge(3, 1, 2.0).unwrap();
492        // Bridge
493        graph.insert_edge(3, 4, 1.0).unwrap();
494        // Triangle 2: 4-5-6-4
495        graph.insert_edge(4, 5, 2.0).unwrap();
496        graph.insert_edge(5, 6, 2.0).unwrap();
497        graph.insert_edge(6, 4, 2.0).unwrap();
498        graph
499    }
500
501    #[test]
502    fn test_build_empty_graph() {
503        let graph = Arc::new(DynamicGraph::new());
504        let decomp = HierarchicalDecomposition::build(graph).unwrap();
505        assert_eq!(decomp.num_nodes(), 0);
506        assert_eq!(decomp.height(), 0);
507    }
508
509    #[test]
510    fn test_build_single_vertex() {
511        let graph = Arc::new(DynamicGraph::new());
512        graph.add_vertex(1);
513        let decomp = HierarchicalDecomposition::build(graph).unwrap();
514        assert_eq!(decomp.num_nodes(), 1);
515        assert_eq!(decomp.height(), 0);
516        assert!(decomp.min_cut_value().is_infinite());
517    }
518
519    #[test]
520    fn test_build_triangle() {
521        let graph = create_simple_graph();
522        let decomp = HierarchicalDecomposition::build(graph).unwrap();
523
524        // 3 leaves + 2 internal nodes = 5 total
525        assert_eq!(decomp.num_nodes(), 5);
526
527        // Height should be O(log n) = O(log 3) ≈ 2
528        assert!(decomp.height() <= 2);
529
530        // Min cut of triangle is 2.0 (any two edges)
531        assert_eq!(decomp.min_cut_value(), 2.0);
532    }
533
534    #[test]
535    fn test_build_disconnectable() {
536        let graph = create_disconnectable_graph();
537        let decomp = HierarchicalDecomposition::build(graph).unwrap();
538
539        // 6 leaves + 5 internal nodes = 11 total
540        assert_eq!(decomp.num_nodes(), 11);
541
542        // Min cut depends on tree structure from balanced partitioning
543        // The optimal cut (bridge = 1.0) may not be found due to arbitrary partitioning
544        // But it should find some valid cut
545        let min_cut = decomp.min_cut_value();
546        assert!(min_cut.is_finite() && min_cut >= 1.0);
547    }
548
549    #[test]
550    fn test_min_cut_partition() {
551        let graph = create_disconnectable_graph();
552        let decomp = HierarchicalDecomposition::build(graph).unwrap();
553
554        let (partition_a, partition_b) = decomp.min_cut_partition();
555
556        // Should split into two triangles
557        assert_eq!(partition_a.len() + partition_b.len(), 6);
558
559        // Verify partition sizes (should be 3 and 3, or some other split)
560        assert!(partition_a.len() >= 1 && partition_a.len() <= 5);
561        assert!(partition_b.len() >= 1 && partition_b.len() <= 5);
562
563        // Verify partitions are disjoint
564        let intersection: HashSet<_> = partition_a.intersection(&partition_b).collect();
565        assert!(intersection.is_empty());
566    }
567
568    #[test]
569    fn test_lca_node() {
570        let graph = create_simple_graph();
571        let decomp = HierarchicalDecomposition::build(graph).unwrap();
572
573        // LCA of same vertex is itself
574        let lca = decomp.lca_node(1, 1);
575        assert!(lca.is_some());
576
577        // LCA of different vertices exists
578        let lca = decomp.lca_node(1, 2);
579        assert!(lca.is_some());
580
581        let lca = decomp.lca_node(1, 3);
582        assert!(lca.is_some());
583    }
584
585    #[test]
586    fn test_mark_dirty() {
587        let graph = create_simple_graph();
588        let mut decomp = HierarchicalDecomposition::build(graph).unwrap();
589
590        // Initially all nodes should be clean (after propagate_updates)
591        for node in &decomp.nodes {
592            assert!(
593                !node.dirty,
594                "Node {} should not be dirty after build",
595                node.id
596            );
597        }
598
599        // Mark a leaf as dirty
600        let leaf_idx = *decomp.vertex_to_leaf.get(&1).unwrap();
601        decomp.mark_dirty(leaf_idx);
602
603        // Verify the path to root is marked dirty
604        let mut current = Some(leaf_idx);
605        while let Some(idx) = current {
606            assert!(decomp.nodes[idx].dirty, "Node {} should be dirty", idx);
607            current = decomp.nodes[idx].parent;
608        }
609    }
610
611    #[test]
612    fn test_insert_edge() {
613        let graph = create_simple_graph();
614        let mut decomp = HierarchicalDecomposition::build(graph.clone()).unwrap();
615
616        let old_min_cut = decomp.min_cut_value();
617
618        // Add edge 1-3 with high weight (creates more connectivity)
619        graph.insert_edge(1, 4, 5.0).unwrap();
620        graph.insert_edge(2, 4, 5.0).unwrap();
621
622        // Rebuild to get proper baseline
623        let mut decomp = HierarchicalDecomposition::build(graph.clone()).unwrap();
624        let baseline = decomp.min_cut_value();
625
626        // Now add another edge
627        graph.insert_edge(3, 4, 3.0).unwrap();
628        let new_min_cut = decomp.insert_edge(3, 4, 3.0).unwrap();
629
630        // Verify that we got a valid result
631        assert!(new_min_cut.is_finite());
632    }
633
634    #[test]
635    fn test_delete_edge() {
636        let graph = create_disconnectable_graph();
637        let mut decomp = HierarchicalDecomposition::build(graph.clone()).unwrap();
638
639        let old_min_cut = decomp.min_cut_value();
640        assert!(old_min_cut.is_finite());
641
642        // Delete an edge from one triangle
643        graph.delete_edge(1, 2).unwrap();
644        let new_min_cut = decomp.delete_edge(1, 2).unwrap();
645
646        // After deleting an edge, min cut might change
647        // The exact value depends on tree structure
648        assert!(new_min_cut.is_finite());
649    }
650
651    #[test]
652    fn test_level_info() {
653        let graph = create_simple_graph();
654        let decomp = HierarchicalDecomposition::build(graph).unwrap();
655
656        let levels = decomp.level_info();
657
658        // Should have levels 0, 1, 2 (leaves at 0, internal at 1 and 2)
659        assert!(!levels.is_empty());
660
661        // Verify levels are sorted
662        for i in 1..levels.len() {
663            assert!(levels[i].level > levels[i - 1].level);
664        }
665
666        // Count total nodes
667        let total_nodes: usize = levels.iter().map(|l| l.num_nodes).sum();
668        assert_eq!(total_nodes, decomp.num_nodes());
669    }
670
671    #[test]
672    fn test_balanced_tree() {
673        let graph = Arc::new(DynamicGraph::new());
674
675        // Create a graph with 15 vertices (should give height ~4)
676        for i in 1..=15 {
677            graph.add_vertex(i);
678        }
679
680        // Add some edges to make it connected
681        for i in 1..15 {
682            graph.insert_edge(i, i + 1, 1.0).unwrap();
683        }
684
685        let decomp = HierarchicalDecomposition::build(graph).unwrap();
686
687        // Height should be O(log n) = O(log 15) ≈ 4
688        assert!(
689            decomp.height() <= 4,
690            "Height {} should be <= 4",
691            decomp.height()
692        );
693
694        // Verify balanced: all leaves should be at level 0
695        let leaf_count = decomp.nodes.iter().filter(|n| n.level == 0).count();
696        assert_eq!(leaf_count, 15);
697    }
698
699    #[test]
700    fn test_compute_cut() {
701        let graph = Arc::new(DynamicGraph::new());
702
703        // Create simple 2-2 bipartite graph
704        // Left: 1, 2
705        // Right: 3, 4
706        // Edges: 1-3 (weight 1), 2-4 (weight 1)
707        graph.insert_edge(1, 3, 1.0).unwrap();
708        graph.insert_edge(2, 4, 1.0).unwrap();
709
710        let decomp = HierarchicalDecomposition::build(graph).unwrap();
711
712        // Min cut depends on tree partitioning
713        // Could be 0.0 if it partitions {1,3} vs {2,4} (no edges between)
714        // Could be 1.0 if it partitions {1} vs {2,3,4} or similar
715        // Could be 2.0 if it partitions {1,2} vs {3,4}
716        let min_cut = decomp.min_cut_value();
717        assert!(min_cut.is_finite() && min_cut <= 2.0);
718    }
719
720    #[test]
721    fn test_large_tree() {
722        let graph = Arc::new(DynamicGraph::new());
723
724        // Create a path graph with 100 vertices
725        for i in 1..=100 {
726            graph.add_vertex(i);
727        }
728
729        for i in 1..100 {
730            graph.insert_edge(i, i + 1, 1.0).unwrap();
731        }
732
733        let decomp = HierarchicalDecomposition::build(graph).unwrap();
734
735        // Height should be O(log n) = O(log 100) ≈ 7
736        assert!(
737            decomp.height() <= 7,
738            "Height {} should be <= 7",
739            decomp.height()
740        );
741
742        // Min cut of a path is 1.0 (any single edge)
743        assert_eq!(decomp.min_cut_value(), 1.0);
744
745        // Total nodes: 100 leaves + 99 internal = 199
746        assert_eq!(decomp.num_nodes(), 199);
747    }
748
749    #[test]
750    fn test_propagate_updates() {
751        let graph = create_simple_graph();
752        let mut decomp = HierarchicalDecomposition::build(graph).unwrap();
753
754        // Mark all nodes as dirty
755        for i in 0..decomp.nodes.len() {
756            decomp.nodes[i].dirty = true;
757        }
758
759        // Propagate updates
760        let min_cut = decomp.propagate_updates();
761
762        // All nodes should now be clean
763        for node in &decomp.nodes {
764            assert!(!node.dirty);
765        }
766
767        // Min cut should be correct
768        assert_eq!(min_cut, 2.0);
769    }
770}