ruvector_mincut/tree/
mod.rs

1//! Hierarchical Tree Decomposition for Dynamic Minimum Cut
2//!
3//! Maintains a hierarchy of graph partitions where each level contains
4//! increasingly refined cuts. Enables subpolynomial update time.
5//!
6//! # Overview
7//!
8//! This module implements a hierarchical decomposition of graphs for efficient
9//! minimum cut maintenance. The key idea is to build a balanced binary tree over
10//! the graph vertices, where each node represents a potential partition of the graph.
11//!
12//! # Features
13//!
14//! - **Balanced Binary Tree**: O(log n) height decomposition
15//! - **Lazy Recomputation**: Only recompute dirty nodes after updates
16//! - **LCA-based Updates**: Localize updates to affected subtrees
17//! - **Multiple Cut Evaluation**: Consider all possible tree-induced partitions
18//!
19//! # Example
20//!
21//! ```rust
22//! use std::sync::Arc;
23//! use ruvector_mincut::graph::DynamicGraph;
24//! use ruvector_mincut::tree::HierarchicalDecomposition;
25//!
26//! // Create a graph
27//! let graph = Arc::new(DynamicGraph::new());
28//! graph.insert_edge(1, 2, 1.0).unwrap();
29//! graph.insert_edge(2, 3, 1.0).unwrap();
30//! graph.insert_edge(3, 1, 1.0).unwrap();
31//!
32//! // Build hierarchical decomposition
33//! let mut decomp = HierarchicalDecomposition::build(graph.clone()).unwrap();
34//!
35//! // Get minimum cut
36//! let min_cut = decomp.min_cut_value();
37//! println!("Minimum cut: {}", min_cut);
38//!
39//! // Get the partition
40//! let (partition_a, partition_b) = decomp.min_cut_partition();
41//! println!("Partition: {:?} vs {:?}", partition_a, partition_b);
42//!
43//! // Handle dynamic updates
44//! graph.insert_edge(1, 4, 2.0).unwrap();
45//! let new_min_cut = decomp.insert_edge(1, 4, 2.0).unwrap();
46//! println!("New minimum cut: {}", new_min_cut);
47//! ```
48//!
49//! # Algorithm
50//!
51//! 1. **Build Phase**: Construct a balanced binary tree over graph vertices
52//! 2. **Compute Phase**: For each node, compute the cut value (edges crossing
53//!    between node's vertices and all other vertices)
54//! 3. **Query Phase**: Return minimum cut over all nodes
55//! 4. **Update Phase**: Mark affected nodes dirty, recompute only dirty subtrees
56//!
57//! # Complexity
58//!
59//! - Build: O(n log n + m) where n = vertices, m = edges
60//! - Query: O(1)
61//! - Update: O(log n) nodes recomputed, O(m') edges examined per node
62//!
63//! # Limitations
64//!
65//! The balanced binary partitioning may not find the true minimum cut if it
66//! requires a partition not represented in the tree structure. For guaranteed
67//! minimum cut finding, use the exact algorithm in the `algorithm` module.
68
69use std::collections::{HashMap, HashSet};
70use std::sync::Arc;
71use crate::graph::{DynamicGraph, VertexId, Weight};
72use crate::error::Result;
73
74/// A node in the hierarchical decomposition tree
75#[derive(Debug, Clone)]
76pub struct DecompositionNode {
77    /// Unique ID of this node
78    pub id: usize,
79    /// Level in the hierarchy (0 = leaves = individual vertices)
80    pub level: usize,
81    /// Vertices contained in this node (at leaves) or children indices
82    pub vertices: HashSet<VertexId>,
83    /// Parent node index (None for root)
84    pub parent: Option<usize>,
85    /// Child node indices
86    pub children: Vec<usize>,
87    /// Cut value for this subtree
88    pub cut_value: f64,
89    /// Whether this node needs recomputation
90    pub dirty: bool,
91}
92
93impl DecompositionNode {
94    /// Create a new leaf node
95    fn new_leaf(id: usize, vertex: VertexId) -> Self {
96        let mut vertices = HashSet::new();
97        vertices.insert(vertex);
98        Self {
99            id,
100            level: 0,
101            vertices,
102            parent: None,
103            children: Vec::new(),
104            cut_value: f64::INFINITY,
105            dirty: true,
106        }
107    }
108
109    /// Create a new internal node
110    fn new_internal(id: usize, level: usize, children: Vec<usize>) -> Self {
111        Self {
112            id,
113            level,
114            vertices: HashSet::new(), // Will be populated from children
115            parent: None,
116            children,
117            cut_value: f64::INFINITY,
118            dirty: true,
119        }
120    }
121
122    /// Check if this is a leaf node
123    pub fn is_leaf(&self) -> bool {
124        self.children.is_empty()
125    }
126
127    /// Get the size of this subtree (number of vertices)
128    pub fn size(&self) -> usize {
129        self.vertices.len()
130    }
131}
132
133/// The hierarchical decomposition tree
134pub struct HierarchicalDecomposition {
135    /// All nodes in the decomposition
136    nodes: Vec<DecompositionNode>,
137    /// Map from vertex to its leaf node index
138    vertex_to_leaf: HashMap<VertexId, usize>,
139    /// Root node index
140    root: Option<usize>,
141    /// Current global minimum cut value
142    min_cut: f64,
143    /// Height of the tree
144    height: usize,
145    /// Reference to the underlying graph
146    graph: Arc<DynamicGraph>,
147    /// Next node ID
148    next_node_id: usize,
149}
150
151impl HierarchicalDecomposition {
152    /// Build a new hierarchical decomposition from a graph
153    pub fn build(graph: Arc<DynamicGraph>) -> Result<Self> {
154        let mut decomp = Self {
155            nodes: Vec::new(),
156            vertex_to_leaf: HashMap::new(),
157            root: None,
158            min_cut: f64::INFINITY,
159            height: 0,
160            graph,
161            next_node_id: 0,
162        };
163
164        decomp.build_hierarchy()?;
165        decomp.min_cut = decomp.propagate_updates();
166
167        Ok(decomp)
168    }
169
170    /// Get the current minimum cut value
171    pub fn min_cut_value(&self) -> f64 {
172        self.min_cut
173    }
174
175    /// Get the vertices on each side of the minimum cut
176    pub fn min_cut_partition(&self) -> (HashSet<VertexId>, HashSet<VertexId>) {
177        if self.root.is_none() {
178            return (HashSet::new(), HashSet::new());
179        }
180
181        // Find the node with minimum cut value
182        let (min_node_idx, _) = self.find_min_cut_node();
183        let node = &self.nodes[min_node_idx];
184
185        // The partition is: node's vertices vs all other vertices
186        let partition_a = node.vertices.clone();
187        let all_vertices: HashSet<VertexId> = self.graph.vertices().into_iter().collect();
188        let partition_b: HashSet<VertexId> = all_vertices
189            .difference(&partition_a)
190            .copied()
191            .collect();
192
193        (partition_a, partition_b)
194    }
195
196    /// Handle edge insertion
197    pub fn insert_edge(&mut self, u: VertexId, v: VertexId, _weight: Weight) -> Result<f64> {
198        // Find LCA of u and v
199        if let Some(lca_idx) = self.lca_node(u, v) {
200            // Mark LCA and ancestors as dirty
201            self.mark_dirty(lca_idx);
202        }
203
204        // Recompute and return new min cut
205        self.min_cut = self.propagate_updates();
206        Ok(self.min_cut)
207    }
208
209    /// Handle edge deletion
210    pub fn delete_edge(&mut self, u: VertexId, v: VertexId) -> Result<f64> {
211        // Find LCA of u and v
212        if let Some(lca_idx) = self.lca_node(u, v) {
213            // Mark LCA and ancestors as dirty
214            self.mark_dirty(lca_idx);
215        }
216
217        // Recompute and return new min cut
218        self.min_cut = self.propagate_updates();
219        Ok(self.min_cut)
220    }
221
222    /// Recompute dirty nodes and return new min cut
223    fn propagate_updates(&mut self) -> f64 {
224        // Post-order traversal to recompute dirty nodes
225        if let Some(root_idx) = self.root {
226            self.recompute_subtree(root_idx);
227        }
228
229        // Find global minimum
230        self.find_min_cut_value()
231    }
232
233    /// Recompute a subtree (post-order)
234    fn recompute_subtree(&mut self, node_idx: usize) {
235        // First, recompute children
236        let children = self.nodes[node_idx].children.clone();
237        for child_idx in children {
238            if self.nodes[child_idx].dirty {
239                self.recompute_subtree(child_idx);
240            }
241        }
242
243        // Then recompute this node
244        if self.nodes[node_idx].dirty {
245            let cut_value = self.compute_cut(node_idx);
246            self.nodes[node_idx].cut_value = cut_value;
247            self.nodes[node_idx].dirty = false;
248        }
249    }
250
251    /// Find the lowest common ancestor node of two vertices
252    fn lca_node(&self, u: VertexId, v: VertexId) -> Option<usize> {
253        let u_leaf = self.vertex_to_leaf.get(&u)?;
254        let v_leaf = self.vertex_to_leaf.get(&v)?;
255
256        if u_leaf == v_leaf {
257            return Some(*u_leaf);
258        }
259
260        // Get ancestors of u
261        let mut u_ancestors = HashSet::new();
262        let mut current = Some(*u_leaf);
263        while let Some(node_idx) = current {
264            u_ancestors.insert(node_idx);
265            current = self.nodes[node_idx].parent;
266        }
267
268        // Find first common ancestor of v
269        let mut current = Some(*v_leaf);
270        while let Some(node_idx) = current {
271            if u_ancestors.contains(&node_idx) {
272                return Some(node_idx);
273            }
274            current = self.nodes[node_idx].parent;
275        }
276
277        None
278    }
279
280    /// Mark a node and its ancestors as dirty
281    fn mark_dirty(&mut self, node_idx: usize) {
282        let mut current = Some(node_idx);
283        while let Some(idx) = current {
284            self.nodes[idx].dirty = true;
285            current = self.nodes[idx].parent;
286        }
287    }
288
289    /// Build the initial hierarchy using recursive partitioning
290    fn build_hierarchy(&mut self) -> Result<()> {
291        let vertices = self.graph.vertices();
292
293        if vertices.is_empty() {
294            return Ok(());
295        }
296
297        // Create leaf nodes for each vertex
298        for vertex in &vertices {
299            let node_id = self.next_node_id;
300            self.next_node_id += 1;
301            let leaf = DecompositionNode::new_leaf(node_id, *vertex);
302            let leaf_idx = self.nodes.len();
303            self.nodes.push(leaf);
304            self.vertex_to_leaf.insert(*vertex, leaf_idx);
305        }
306
307        // Build tree using balanced binary partitioning
308        let leaf_indices: Vec<usize> = (0..vertices.len()).collect();
309        if !leaf_indices.is_empty() {
310            self.root = Some(self.build_subtree(&leaf_indices, 1)?);
311        }
312
313        Ok(())
314    }
315
316    /// Recursively build a balanced binary tree from leaf indices
317    fn build_subtree(&mut self, indices: &[usize], level: usize) -> Result<usize> {
318        if indices.len() == 1 {
319            // Single leaf node - update its level
320            self.nodes[indices[0]].level = 0;
321            self.height = self.height.max(level - 1);
322            return Ok(indices[0]);
323        }
324
325        // Split into two balanced halves
326        let mid = indices.len() / 2;
327        let left_indices = &indices[..mid];
328        let right_indices = &indices[mid..];
329
330        // Recursively build children
331        let left_idx = self.build_subtree(left_indices, level + 1)?;
332        let right_idx = self.build_subtree(right_indices, level + 1)?;
333
334        // Create internal node
335        let node_id = self.next_node_id;
336        self.next_node_id += 1;
337        let mut internal = DecompositionNode::new_internal(
338            node_id,
339            level,
340            vec![left_idx, right_idx],
341        );
342
343        // Collect vertices from children
344        internal.vertices.extend(&self.nodes[left_idx].vertices);
345        internal.vertices.extend(&self.nodes[right_idx].vertices);
346
347        let internal_idx = self.nodes.len();
348        self.nodes.push(internal);
349
350        // Update parent pointers
351        self.nodes[left_idx].parent = Some(internal_idx);
352        self.nodes[right_idx].parent = Some(internal_idx);
353
354        self.height = self.height.max(level);
355
356        Ok(internal_idx)
357    }
358
359    /// Compute the cut value at a node
360    /// Each node represents a partition: (node's vertices) vs (all other vertices)
361    fn compute_cut(&self, node_idx: usize) -> f64 {
362        let node = &self.nodes[node_idx];
363
364        // Leaf nodes: partition would be {vertex} vs {all others}
365        // If there's only 1 vertex total, cut is infinite
366        // Otherwise, compute edges from this vertex to all others
367        if node.vertices.len() == self.graph.num_vertices() {
368            // This node contains all vertices - no valid cut
369            return f64::INFINITY;
370        }
371
372        // Compute cut: sum of edge weights crossing between node's vertices and others
373        self.compute_global_cut(&node.vertices)
374    }
375
376    /// Compute the cut value for a partition
377    /// One side is 'vertices', other side is all vertices not in this set
378    fn compute_global_cut(&self, vertices: &HashSet<VertexId>) -> f64 {
379        let mut cut_weight = 0.0;
380
381        for &u in vertices {
382            for (v, _edge_id) in self.graph.neighbors(u) {
383                // If v is NOT in our vertex set, this edge crosses the cut
384                if !vertices.contains(&v) {
385                    if let Some(weight) = self.graph.edge_weight(u, v) {
386                        cut_weight += weight;
387                    }
388                }
389            }
390        }
391
392        cut_weight
393    }
394
395    /// Find the node with minimum cut value
396    fn find_min_cut_node(&self) -> (usize, f64) {
397        let mut min_idx = 0;
398        let mut min_value = f64::INFINITY;
399
400        for (idx, node) in self.nodes.iter().enumerate() {
401            if node.cut_value < min_value {
402                min_value = node.cut_value;
403                min_idx = idx;
404            }
405        }
406
407        (min_idx, min_value)
408    }
409
410    /// Find global minimum cut value
411    fn find_min_cut_value(&self) -> f64 {
412        self.find_min_cut_node().1
413    }
414
415    /// Get height of decomposition
416    pub fn height(&self) -> usize {
417        self.height
418    }
419
420    /// Get number of nodes
421    pub fn num_nodes(&self) -> usize {
422        self.nodes.len()
423    }
424}
425
426/// Level information for the decomposition
427#[derive(Debug, Clone)]
428pub struct LevelInfo {
429    /// Level index
430    pub level: usize,
431    /// Number of nodes at this level
432    pub num_nodes: usize,
433    /// Average cut value at this level
434    pub avg_cut: f64,
435}
436
437impl HierarchicalDecomposition {
438    /// Get information about each level
439    pub fn level_info(&self) -> Vec<LevelInfo> {
440        let mut levels: HashMap<usize, Vec<f64>> = HashMap::new();
441
442        for node in &self.nodes {
443            levels.entry(node.level).or_insert_with(Vec::new).push(node.cut_value);
444        }
445
446        let mut result: Vec<LevelInfo> = levels
447            .into_iter()
448            .map(|(level, cut_values)| {
449                let num_nodes = cut_values.len();
450                let finite_cuts: Vec<f64> = cut_values
451                    .iter()
452                    .filter(|&&v| v.is_finite())
453                    .copied()
454                    .collect();
455                let avg_cut = if finite_cuts.is_empty() {
456                    f64::INFINITY
457                } else {
458                    finite_cuts.iter().sum::<f64>() / finite_cuts.len() as f64
459                };
460
461                LevelInfo {
462                    level,
463                    num_nodes,
464                    avg_cut,
465                }
466            })
467            .collect();
468
469        result.sort_by_key(|info| info.level);
470        result
471    }
472}
473
474#[cfg(test)]
475mod tests {
476    use super::*;
477
478    fn create_simple_graph() -> Arc<DynamicGraph> {
479        let graph = Arc::new(DynamicGraph::new());
480        // Triangle: 1-2-3-1
481        graph.insert_edge(1, 2, 1.0).unwrap();
482        graph.insert_edge(2, 3, 1.0).unwrap();
483        graph.insert_edge(3, 1, 1.0).unwrap();
484        graph
485    }
486
487    fn create_disconnectable_graph() -> Arc<DynamicGraph> {
488        let graph = Arc::new(DynamicGraph::new());
489        // Two triangles connected by single edge
490        // Triangle 1: 1-2-3-1
491        graph.insert_edge(1, 2, 2.0).unwrap();
492        graph.insert_edge(2, 3, 2.0).unwrap();
493        graph.insert_edge(3, 1, 2.0).unwrap();
494        // Bridge
495        graph.insert_edge(3, 4, 1.0).unwrap();
496        // Triangle 2: 4-5-6-4
497        graph.insert_edge(4, 5, 2.0).unwrap();
498        graph.insert_edge(5, 6, 2.0).unwrap();
499        graph.insert_edge(6, 4, 2.0).unwrap();
500        graph
501    }
502
503    #[test]
504    fn test_build_empty_graph() {
505        let graph = Arc::new(DynamicGraph::new());
506        let decomp = HierarchicalDecomposition::build(graph).unwrap();
507        assert_eq!(decomp.num_nodes(), 0);
508        assert_eq!(decomp.height(), 0);
509    }
510
511    #[test]
512    fn test_build_single_vertex() {
513        let graph = Arc::new(DynamicGraph::new());
514        graph.add_vertex(1);
515        let decomp = HierarchicalDecomposition::build(graph).unwrap();
516        assert_eq!(decomp.num_nodes(), 1);
517        assert_eq!(decomp.height(), 0);
518        assert!(decomp.min_cut_value().is_infinite());
519    }
520
521    #[test]
522    fn test_build_triangle() {
523        let graph = create_simple_graph();
524        let decomp = HierarchicalDecomposition::build(graph).unwrap();
525
526        // 3 leaves + 2 internal nodes = 5 total
527        assert_eq!(decomp.num_nodes(), 5);
528
529        // Height should be O(log n) = O(log 3) ≈ 2
530        assert!(decomp.height() <= 2);
531
532        // Min cut of triangle is 2.0 (any two edges)
533        assert_eq!(decomp.min_cut_value(), 2.0);
534    }
535
536    #[test]
537    fn test_build_disconnectable() {
538        let graph = create_disconnectable_graph();
539        let decomp = HierarchicalDecomposition::build(graph).unwrap();
540
541        // 6 leaves + 5 internal nodes = 11 total
542        assert_eq!(decomp.num_nodes(), 11);
543
544        // Min cut depends on tree structure from balanced partitioning
545        // The optimal cut (bridge = 1.0) may not be found due to arbitrary partitioning
546        // But it should find some valid cut
547        let min_cut = decomp.min_cut_value();
548        assert!(min_cut.is_finite() && min_cut >= 1.0);
549    }
550
551    #[test]
552    fn test_min_cut_partition() {
553        let graph = create_disconnectable_graph();
554        let decomp = HierarchicalDecomposition::build(graph).unwrap();
555
556        let (partition_a, partition_b) = decomp.min_cut_partition();
557
558        // Should split into two triangles
559        assert_eq!(partition_a.len() + partition_b.len(), 6);
560
561        // Verify partition sizes (should be 3 and 3, or some other split)
562        assert!(partition_a.len() >= 1 && partition_a.len() <= 5);
563        assert!(partition_b.len() >= 1 && partition_b.len() <= 5);
564
565        // Verify partitions are disjoint
566        let intersection: HashSet<_> = partition_a.intersection(&partition_b).collect();
567        assert!(intersection.is_empty());
568    }
569
570    #[test]
571    fn test_lca_node() {
572        let graph = create_simple_graph();
573        let decomp = HierarchicalDecomposition::build(graph).unwrap();
574
575        // LCA of same vertex is itself
576        let lca = decomp.lca_node(1, 1);
577        assert!(lca.is_some());
578
579        // LCA of different vertices exists
580        let lca = decomp.lca_node(1, 2);
581        assert!(lca.is_some());
582
583        let lca = decomp.lca_node(1, 3);
584        assert!(lca.is_some());
585    }
586
587    #[test]
588    fn test_mark_dirty() {
589        let graph = create_simple_graph();
590        let mut decomp = HierarchicalDecomposition::build(graph).unwrap();
591
592        // Initially all nodes should be clean (after propagate_updates)
593        for node in &decomp.nodes {
594            assert!(!node.dirty, "Node {} should not be dirty after build", node.id);
595        }
596
597        // Mark a leaf as dirty
598        let leaf_idx = *decomp.vertex_to_leaf.get(&1).unwrap();
599        decomp.mark_dirty(leaf_idx);
600
601        // Verify the path to root is marked dirty
602        let mut current = Some(leaf_idx);
603        while let Some(idx) = current {
604            assert!(decomp.nodes[idx].dirty, "Node {} should be dirty", idx);
605            current = decomp.nodes[idx].parent;
606        }
607    }
608
609    #[test]
610    fn test_insert_edge() {
611        let graph = create_simple_graph();
612        let mut decomp = HierarchicalDecomposition::build(graph.clone()).unwrap();
613
614        let old_min_cut = decomp.min_cut_value();
615
616        // Add edge 1-3 with high weight (creates more connectivity)
617        graph.insert_edge(1, 4, 5.0).unwrap();
618        graph.insert_edge(2, 4, 5.0).unwrap();
619
620        // Rebuild to get proper baseline
621        let mut decomp = HierarchicalDecomposition::build(graph.clone()).unwrap();
622        let baseline = decomp.min_cut_value();
623
624        // Now add another edge
625        graph.insert_edge(3, 4, 3.0).unwrap();
626        let new_min_cut = decomp.insert_edge(3, 4, 3.0).unwrap();
627
628        // Verify that we got a valid result
629        assert!(new_min_cut.is_finite());
630    }
631
632    #[test]
633    fn test_delete_edge() {
634        let graph = create_disconnectable_graph();
635        let mut decomp = HierarchicalDecomposition::build(graph.clone()).unwrap();
636
637        let old_min_cut = decomp.min_cut_value();
638        assert!(old_min_cut.is_finite());
639
640        // Delete an edge from one triangle
641        graph.delete_edge(1, 2).unwrap();
642        let new_min_cut = decomp.delete_edge(1, 2).unwrap();
643
644        // After deleting an edge, min cut might change
645        // The exact value depends on tree structure
646        assert!(new_min_cut.is_finite());
647    }
648
649    #[test]
650    fn test_level_info() {
651        let graph = create_simple_graph();
652        let decomp = HierarchicalDecomposition::build(graph).unwrap();
653
654        let levels = decomp.level_info();
655
656        // Should have levels 0, 1, 2 (leaves at 0, internal at 1 and 2)
657        assert!(!levels.is_empty());
658
659        // Verify levels are sorted
660        for i in 1..levels.len() {
661            assert!(levels[i].level > levels[i - 1].level);
662        }
663
664        // Count total nodes
665        let total_nodes: usize = levels.iter().map(|l| l.num_nodes).sum();
666        assert_eq!(total_nodes, decomp.num_nodes());
667    }
668
669    #[test]
670    fn test_balanced_tree() {
671        let graph = Arc::new(DynamicGraph::new());
672
673        // Create a graph with 15 vertices (should give height ~4)
674        for i in 1..=15 {
675            graph.add_vertex(i);
676        }
677
678        // Add some edges to make it connected
679        for i in 1..15 {
680            graph.insert_edge(i, i + 1, 1.0).unwrap();
681        }
682
683        let decomp = HierarchicalDecomposition::build(graph).unwrap();
684
685        // Height should be O(log n) = O(log 15) ≈ 4
686        assert!(decomp.height() <= 4, "Height {} should be <= 4", decomp.height());
687
688        // Verify balanced: all leaves should be at level 0
689        let leaf_count = decomp.nodes.iter().filter(|n| n.level == 0).count();
690        assert_eq!(leaf_count, 15);
691    }
692
693    #[test]
694    fn test_compute_cut() {
695        let graph = Arc::new(DynamicGraph::new());
696
697        // Create simple 2-2 bipartite graph
698        // Left: 1, 2
699        // Right: 3, 4
700        // Edges: 1-3 (weight 1), 2-4 (weight 1)
701        graph.insert_edge(1, 3, 1.0).unwrap();
702        graph.insert_edge(2, 4, 1.0).unwrap();
703
704        let decomp = HierarchicalDecomposition::build(graph).unwrap();
705
706        // Min cut depends on tree partitioning
707        // Could be 0.0 if it partitions {1,3} vs {2,4} (no edges between)
708        // Could be 1.0 if it partitions {1} vs {2,3,4} or similar
709        // Could be 2.0 if it partitions {1,2} vs {3,4}
710        let min_cut = decomp.min_cut_value();
711        assert!(min_cut.is_finite() && min_cut <= 2.0);
712    }
713
714    #[test]
715    fn test_large_tree() {
716        let graph = Arc::new(DynamicGraph::new());
717
718        // Create a path graph with 100 vertices
719        for i in 1..=100 {
720            graph.add_vertex(i);
721        }
722
723        for i in 1..100 {
724            graph.insert_edge(i, i + 1, 1.0).unwrap();
725        }
726
727        let decomp = HierarchicalDecomposition::build(graph).unwrap();
728
729        // Height should be O(log n) = O(log 100) ≈ 7
730        assert!(decomp.height() <= 7, "Height {} should be <= 7", decomp.height());
731
732        // Min cut of a path is 1.0 (any single edge)
733        assert_eq!(decomp.min_cut_value(), 1.0);
734
735        // Total nodes: 100 leaves + 99 internal = 199
736        assert_eq!(decomp.num_nodes(), 199);
737    }
738
739    #[test]
740    fn test_propagate_updates() {
741        let graph = create_simple_graph();
742        let mut decomp = HierarchicalDecomposition::build(graph).unwrap();
743
744        // Mark all nodes as dirty
745        for i in 0..decomp.nodes.len() {
746            decomp.nodes[i].dirty = true;
747        }
748
749        // Propagate updates
750        let min_cut = decomp.propagate_updates();
751
752        // All nodes should now be clean
753        for node in &decomp.nodes {
754            assert!(!node.dirty);
755        }
756
757        // Min cut should be correct
758        assert_eq!(min_cut, 2.0);
759    }
760}