sklears_mixture/
common.rs1use scirs2_core::ndarray::{Array1, Array2, ArrayView1, ArrayView2};
8use scirs2_core::random::{thread_rng, RandNormal};
9use sklears_core::error::{Result as SklResult, SklearsError};
10use std::f64::consts::PI;
11
12pub fn sample_multivariate_normal(
15 mean: &ArrayView1<f64>,
16 cov: &ArrayView2<f64>,
17) -> SklResult<Array1<f64>> {
18 let n_features = mean.len();
19 let mut sample = Array1::zeros(n_features);
20 let mut rng = thread_rng();
21
22 for i in 0..n_features {
24 let normal = RandNormal::new(0.0, 1.0)
25 .map_err(|e| SklearsError::InvalidInput(format!("Normal distribution error: {}", e)))?;
26 sample[i] = rng.sample(normal);
27 }
28
29 for i in 0..n_features {
32 let variance = cov[[i, i]].abs();
33 sample[i] = mean[i] + sample[i] * variance.sqrt();
34 }
35
36 Ok(sample)
37}
38
39pub fn gaussian_log_pdf(
41 x: &ArrayView1<f64>,
42 mean: &ArrayView1<f64>,
43 _cov: &ArrayView2<f64>,
44) -> SklResult<f64> {
45 let n_features = x.len();
46 let diff = x - mean;
47
48 let det: f64 = 1.0; let inv_quad_form = diff.dot(&diff); let log_norm = -0.5 * (n_features as f64 * (2.0 * PI).ln() + det.ln());
53 let log_exp = -0.5 * inv_quad_form;
54
55 Ok(log_norm + log_exp)
56}
57
58pub fn gaussian_log_pdf_diagonal(
60 x: &ArrayView1<f64>,
61 mean: &ArrayView1<f64>,
62 diag_cov: &ArrayView1<f64>,
63) -> SklResult<f64> {
64 let n_features = x.len();
65 let diff = x - mean;
66
67 let log_det = diag_cov.mapv(|v| v.ln()).sum();
68 let inv_quad_form = (&diff * &diff / diag_cov).sum();
69
70 let log_norm = -0.5 * (n_features as f64 * (2.0 * PI).ln() + log_det);
71 let log_exp = -0.5 * inv_quad_form;
72
73 Ok(log_norm + log_exp)
74}
75
76pub fn gaussian_log_pdf_spherical(
78 x: &ArrayView1<f64>,
79 mean: &ArrayView1<f64>,
80 variance: f64,
81) -> SklResult<f64> {
82 let n_features = x.len();
83 let diff = x - mean;
84
85 let squared_dist = diff.dot(&diff);
86
87 let log_norm = -0.5 * (n_features as f64 * (2.0 * PI * variance).ln());
88 let log_exp = -0.5 * squared_dist / variance;
89
90 Ok(log_norm + log_exp)
91}
92
93#[derive(Debug, Clone, PartialEq)]
95pub enum CovarianceType {
96 Full,
98 Diagonal,
100 Tied,
102 Spherical,
104}
105
106#[derive(Debug, Clone)]
108pub enum CovarianceMatrices {
109 Full(Vec<Array2<f64>>),
111 Diagonal(Array2<f64>),
113 Tied(Array2<f64>),
115 Spherical(Array1<f64>),
117}
118
119impl CovarianceType {
120 pub fn parse_type(s: &str) -> Result<Self, SklearsError> {
122 match s.to_lowercase().as_str() {
123 "full" => Ok(CovarianceType::Full),
124 "diag" | "diagonal" => Ok(CovarianceType::Diagonal),
125 "tied" => Ok(CovarianceType::Tied),
126 "spherical" => Ok(CovarianceType::Spherical),
127 _ => Err(SklearsError::InvalidInput(format!(
128 "Unknown covariance type: {}. Must be one of: 'full', 'diagonal', 'tied', 'spherical'",
129 s
130 ))),
131 }
132 }
133}
134
135#[derive(Debug, Clone, PartialEq)]
137pub enum InitMethod {
138 KMeansPlus,
140 Random,
142 Params,
144}
145
146#[derive(Debug, Clone)]
148pub struct ModelSelection {
149 pub aic: f64,
151 pub bic: f64,
153 pub log_likelihood: f64,
155 pub n_parameters: usize,
157}
158
159impl ModelSelection {
160 pub fn bic(log_likelihood: f64, n_params: usize, n_samples: usize) -> f64 {
162 -2.0 * log_likelihood + (n_params as f64) * (n_samples as f64).ln()
163 }
164
165 pub fn aic(log_likelihood: f64, n_params: usize) -> f64 {
167 -2.0 * log_likelihood + 2.0 * (n_params as f64)
168 }
169
170 pub fn n_parameters(
172 n_components: usize,
173 n_features: usize,
174 covariance_type: &CovarianceType,
175 ) -> usize {
176 let weight_params = n_components - 1; let mean_params = n_components * n_features;
178 let covariance_params = match covariance_type {
179 CovarianceType::Full => n_components * n_features * (n_features + 1) / 2,
180 CovarianceType::Diagonal => n_components * n_features,
181 CovarianceType::Tied => n_features * (n_features + 1) / 2,
182 CovarianceType::Spherical => n_components,
183 };
184 weight_params + mean_params + covariance_params
185 }
186}