fdars_core/classification/
mod.rs1use crate::error::FdarError;
14use crate::matrix::FdMatrix;
15use crate::regression::fdata_to_pc_1d;
16
17pub mod cv;
18pub mod dd;
19pub mod fit;
20pub mod kernel;
21pub mod knn;
22pub mod lda;
23pub mod qda;
24
25#[cfg(test)]
26mod tests;
27
28#[derive(Debug, Clone, PartialEq)]
34pub struct ClassifResult {
35 pub predicted: Vec<usize>,
37 pub probabilities: Option<FdMatrix>,
39 pub accuracy: f64,
41 pub confusion: Vec<Vec<usize>>,
43 pub n_classes: usize,
45 pub ncomp: usize,
47}
48
49#[derive(Debug, Clone, PartialEq)]
51pub struct ClassifCvResult {
52 pub error_rate: f64,
54 pub fold_errors: Vec<f64>,
56 pub best_ncomp: usize,
58}
59
60pub(crate) fn remap_labels(y: &[usize]) -> (Vec<usize>, usize) {
66 let mut labels: Vec<usize> = y.to_vec();
67 let mut unique: Vec<usize> = y.to_vec();
68 unique.sort_unstable();
69 unique.dedup();
70 let g = unique.len();
71 for label in &mut labels {
72 *label = unique.iter().position(|&u| u == *label).unwrap_or(0);
73 }
74 (labels, g)
75}
76
77fn confusion_matrix(true_labels: &[usize], pred_labels: &[usize], g: usize) -> Vec<Vec<usize>> {
79 let mut cm = vec![vec![0usize; g]; g];
80 for (&t, &p) in true_labels.iter().zip(pred_labels.iter()) {
81 if t < g && p < g {
82 cm[t][p] += 1;
83 }
84 }
85 cm
86}
87
88pub(crate) fn class_means_and_priors(
90 features: &FdMatrix,
91 labels: &[usize],
92 g: usize,
93) -> (Vec<Vec<f64>>, Vec<usize>, Vec<f64>) {
94 let n = features.nrows();
95 let d = features.ncols();
96 let mut counts = vec![0usize; g];
97 let mut class_means = vec![vec![0.0; d]; g];
98 for i in 0..n {
99 let c = labels[i];
100 counts[c] += 1;
101 for j in 0..d {
102 class_means[c][j] += features[(i, j)];
103 }
104 }
105 for c in 0..g {
106 if counts[c] > 0 {
107 for j in 0..d {
108 class_means[c][j] /= counts[c] as f64;
109 }
110 }
111 }
112 let priors: Vec<f64> = counts.iter().map(|&c| c as f64 / n as f64).collect();
113 (class_means, counts, priors)
114}
115
116fn compute_accuracy(true_labels: &[usize], pred_labels: &[usize]) -> f64 {
118 let n = true_labels.len();
119 if n == 0 {
120 return 0.0;
121 }
122 let correct = true_labels
123 .iter()
124 .zip(pred_labels.iter())
125 .filter(|(&t, &p)| t == p)
126 .count();
127 correct as f64 / n as f64
128}
129
130pub(crate) fn build_feature_matrix(
132 data: &FdMatrix,
133 scalar_covariates: Option<&FdMatrix>,
134 ncomp: usize,
135) -> Result<(FdMatrix, Vec<f64>, FdMatrix), FdarError> {
136 let fpca = fdata_to_pc_1d(data, ncomp)?;
137 let n = data.nrows();
138 let d_pc = fpca.scores.ncols();
139 let d_cov = scalar_covariates.map_or(0, super::matrix::FdMatrix::ncols);
140 let d = d_pc + d_cov;
141
142 let mut features = FdMatrix::zeros(n, d);
143 for i in 0..n {
144 for j in 0..d_pc {
145 features[(i, j)] = fpca.scores[(i, j)];
146 }
147 if let Some(cov) = scalar_covariates {
148 for j in 0..d_cov {
149 features[(i, d_pc + j)] = cov[(i, j)];
150 }
151 }
152 }
153
154 Ok((features, fpca.mean, fpca.rotation))
155}
156
157pub use cv::fclassif_cv;
162pub use dd::fclassif_dd;
163pub(crate) use fit::classif_predict_probs;
164pub use fit::{
165 fclassif_cv_with_config, fclassif_knn_fit, fclassif_lda_fit, fclassif_qda_fit, ClassifCvConfig,
166 ClassifFit, ClassifMethod,
167};
168pub use kernel::fclassif_kernel;
169pub use knn::fclassif_knn;
170pub use lda::fclassif_lda;
171pub use qda::fclassif_qda;