polyvoice 0.6.0-alpha.4

Speaker diarization library for Rust — online and offline, ONNX-powered, ecosystem-agnostic
Documentation
//! Spectral clustering for speaker diarization.
//!
//! Uses normalized graph Laplacian + k-means on eigenvectors.
//! Auto-selects k via eigengap heuristic.

use crate::utils::cosine_similarity;
use faer::Side;
use faer::prelude::*;

/// { TODO: precondition }
/// `pub fn spectral_cluster(embeddings: &[Vec<f32>], max_k: usize) -> Vec<usize>`
/// { TODO: postcondition }
/// Run spectral clustering with automatic k selection.
pub fn spectral_cluster(embeddings: &[Vec<f32>], max_k: usize) -> Vec<usize> {
    let n = embeddings.len();
    if n == 0 {
        return Vec::new();
    }
    if n == 1 {
        return vec![0];
    }

    // Build k-NN affinity matrix using cosine similarity.
    let k_nn = (n / 10).clamp(2, 10);
    let mut aff = vec![0.0f64; n * n];
    for i in 0..n {
        aff[i * n + i] = 1.0;
        let mut neighbors: Vec<(f64, usize)> = Vec::with_capacity(n);
        for j in 0..n {
            if i != j {
                let sim = cosine_similarity(&embeddings[i], &embeddings[j]) as f64;
                neighbors.push((sim, j));
            }
        }
        neighbors.sort_by(|a, b| b.0.total_cmp(&a.0));
        for &(sim, j) in neighbors.iter().take(k_nn) {
            if sim > 0.0 {
                aff[i * n + j] = sim;
                aff[j * n + i] = sim;
            }
        }
    }

    // Degree matrix D.
    let deg: Vec<f64> = (0..n).map(|i| aff[i * n..i * n + n].iter().sum()).collect();

    // Normalized Laplacian: L = I - D^{-1/2} A D^{-1/2}
    let mut lap = Mat::zeros(n, n);
    for i in 0..n {
        for j in 0..n {
            let val = if i == j {
                1.0 - aff[i * n + j] / deg[i].max(1e-10)
            } else {
                -aff[i * n + j] / (deg[i].sqrt() * deg[j].sqrt()).max(1e-10)
            };
            lap[(i, j)] = val;
        }
    }

    // Eigendecomposition of symmetric matrix.
    // Compute all eigenvalues/eigenvectors (n is small for diarization, usually < 500).
    let eig = match lap.self_adjoint_eigen(Side::Lower) {
        Ok(e) => e,
        Err(_) => return vec![0; n],
    };
    let s = eig.S();
    let u = eig.U();

    // Sort eigenvalues ascending and get indices.
    let mut eig_pairs: Vec<(f64, usize)> = (0..n).map(|i| (s[i], i)).collect();
    eig_pairs.sort_by(|a, b| a.0.total_cmp(&b.0));

    // Determine k via eigengap heuristic, then validate with BIC on spectral features.
    let max_k = max_k.min(n).min(20);
    let mut eigengap_k = 1usize;
    let mut best_gap = 0.0f64;

    for k in 1..max_k.saturating_sub(1) {
        let prev = eig_pairs[k].0;
        let gap = if prev > 1e-10 {
            (eig_pairs[k + 1].0 - prev) / prev
        } else {
            0.0
        };
        if gap > best_gap {
            best_gap = gap;
            eigengap_k = k;
        }
    }

    // Extract spectral features for a range of k values and pick best via BIC.
    let mut best_k = eigengap_k.max(2).min(max_k);
    let mut best_bic = f64::INFINITY;

    for k in 2..=max_k.min(10) {
        let dim = k;
        let mut features = vec![vec![0.0f64; dim]; n];
        for (i, feat) in features.iter_mut().enumerate() {
            for (col, &(_, idx)) in eig_pairs.iter().take(dim).enumerate() {
                feat[col] = u[(i, idx)];
            }
        }
        // Normalize rows.
        for feat in features.iter_mut() {
            let norm: f64 = feat.iter().map(|v| v * v).sum::<f64>().sqrt();
            if norm > 1e-10 {
                for v in feat.iter_mut() {
                    *v /= norm;
                }
            }
        }

        let labels = kmeans_on_features(&features, k, 20);
        let bic = compute_bic(&features, &labels, k);
        if bic < best_bic {
            best_bic = bic;
            best_k = k;
        }
    }

    // Final run with best_k.
    let mut features = vec![vec![0.0f64; best_k]; n];
    for (i, feat) in features.iter_mut().enumerate() {
        for (col, &(_, idx)) in eig_pairs.iter().take(best_k).enumerate() {
            feat[col] = u[(i, idx)];
        }
    }
    for feat in features.iter_mut() {
        let norm: f64 = feat.iter().map(|v| v * v).sum::<f64>().sqrt();
        if norm > 1e-10 {
            for v in feat.iter_mut() {
                *v /= norm;
            }
        }
    }
    kmeans_on_features(&features, best_k, 20)
}

/// Simple Lloyd's k-means on pre-computed feature vectors.
fn kmeans_on_features(features: &[Vec<f64>], k: usize, max_iter: usize) -> Vec<usize> {
    let n = features.len();
    let k = k.min(n).max(1);
    let dim = features[0].len();

    if k == 1 {
        return vec![0; n];
    }

    // K-means++ initialization.
    let mut centroids: Vec<Vec<f64>> = Vec::with_capacity(k);
    let mut rng = fastrand::Rng::new();
    let first_idx = rng.usize(0..n);
    centroids.push(features[first_idx].clone());

    let mut dists = vec![f64::INFINITY; n];
    for _ in 1..k {
        for (i, feat) in features.iter().enumerate() {
            let d = euclidean_distance(feat, &centroids[centroids.len() - 1]);
            if d < dists[i] {
                dists[i] = d;
            }
        }
        let total: f64 = dists.iter().sum();
        if total <= 0.0 {
            break;
        }
        let target = rng.f64() * total;
        let mut cumsum = 0.0;
        let mut chosen = 0;
        for (i, &d) in dists.iter().enumerate() {
            cumsum += d;
            if cumsum >= target {
                chosen = i;
                break;
            }
        }
        centroids.push(features[chosen].clone());
    }

    let k = centroids.len();
    let mut labels = vec![0usize; n];

    for _ in 0..max_iter {
        let mut changed = false;
        // Assign.
        for (i, feat) in features.iter().enumerate() {
            let mut best = 0usize;
            let mut best_dist = f64::INFINITY;
            for (c_idx, c) in centroids.iter().enumerate() {
                let dist = euclidean_distance(feat, c);
                if dist < best_dist {
                    best_dist = dist;
                    best = c_idx;
                }
            }
            if labels[i] != best {
                labels[i] = best;
                changed = true;
            }
        }
        if !changed {
            break;
        }
        // Update.
        let mut new_centroids = vec![vec![0.0; dim]; k];
        let mut counts = vec![0usize; k];
        for (i, feat) in features.iter().enumerate() {
            let c = labels[i];
            for (new_centroid, &v) in new_centroids[c].iter_mut().zip(feat.iter()) {
                *new_centroid += v;
            }
            counts[c] += 1;
        }
        for (c, new_centroid) in new_centroids.iter_mut().enumerate().take(k) {
            if counts[c] > 0 {
                for v in new_centroid.iter_mut().take(dim) {
                    *v /= counts[c] as f64;
                }
            }
        }
        centroids = new_centroids;
    }

    labels
}

fn euclidean_distance(a: &[f64], b: &[f64]) -> f64 {
    a.iter()
        .zip(b.iter())
        .map(|(x, y)| (x - y).powi(2))
        .sum::<f64>()
        .sqrt()
}

fn compute_bic(features: &[Vec<f64>], labels: &[usize], k: usize) -> f64 {
    let n = features.len();
    if n == 0 {
        return f64::INFINITY;
    }
    let dim = features[0].len();

    // Compute centroids.
    let mut centroids = vec![vec![0.0f64; dim]; k];
    let mut counts = vec![0usize; k];
    for (i, feat) in features.iter().enumerate() {
        let c = labels[i];
        for (d, &v) in feat.iter().enumerate() {
            centroids[c][d] += v;
        }
        counts[c] += 1;
    }
    for (c, centroid) in centroids.iter_mut().enumerate().take(k) {
        if counts[c] > 0 {
            for v in centroid.iter_mut().take(dim) {
                *v /= counts[c] as f64;
            }
        }
    }

    // Compute inertia (sum of squared distances).
    let mut inertia = 0.0f64;
    for (i, feat) in features.iter().enumerate() {
        let c = labels[i];
        inertia += euclidean_distance(feat, &centroids[c]).powi(2);
    }

    // BIC for spherical Gaussian: -2*log(L) + p*log(n)
    // where p = k * (dim + 1)  (centroids + 1 variance parameter per cluster)
    let p = k * (dim + 1);
    if inertia < 1e-10 {
        // Perfect fit — penalize complexity.
        return p as f64 * (n as f64).ln();
    }
    let log_likelihood = -(n as f64) * (inertia / n as f64).ln() / 2.0;
    -2.0 * log_likelihood + p as f64 * (n as f64).ln()
}

#[cfg(test)]
mod tests {
    use super::*;

    #[test]
    fn test_spectral_cluster_basic() {
        // Two clear clusters in 3D.
        let embeddings = vec![
            vec![1.0, 0.0, 0.0],
            vec![0.9, 0.1, 0.0],
            vec![0.0, 1.0, 0.0],
            vec![0.1, 0.9, 0.0],
        ];
        let labels = spectral_cluster(&embeddings, 10);
        assert_eq!(labels.len(), 4);
        // Should find 2 clusters.
        let num_clusters = labels.iter().copied().max().unwrap_or(0) + 1;
        assert_eq!(num_clusters, 2);
        assert_eq!(labels[0], labels[1]);
        assert_eq!(labels[2], labels[3]);
        assert_ne!(labels[0], labels[2]);
    }

    #[test]
    fn test_spectral_cluster_empty() {
        let labels = spectral_cluster(&[], 10);
        assert!(labels.is_empty());
    }

    #[test]
    fn test_spectral_cluster_single() {
        let labels = spectral_cluster(&[vec![1.0, 0.0]], 10);
        assert_eq!(labels, vec![0]);
    }
}