pub mod logistic;
pub mod pcr;
pub mod regression;
pub mod scalar_on_shape;
#[cfg(test)]
mod tests;
pub use logistic::{
elastic_logistic, elastic_logistic_with_config, predict_elastic_logistic, ElasticLogisticResult,
};
pub use pcr::{elastic_pcr, elastic_pcr_with_config, ElasticPcrResult};
pub use regression::{
elastic_regression, elastic_regression_with_config, predict_elastic_regression,
ElasticRegressionResult,
};
pub use scalar_on_shape::{predict_scalar_on_shape, scalar_on_shape, ScalarOnShapeResult};
use crate::alignment::reparameterize_curve;
use crate::matrix::FdMatrix;
#[derive(Debug, Clone, PartialEq)]
pub struct ElasticConfig {
pub ncomp_beta: usize,
pub lambda: f64,
pub max_iter: usize,
pub tol: f64,
}
impl Default for ElasticConfig {
fn default() -> Self {
Self {
ncomp_beta: 10,
lambda: 0.0,
max_iter: 20,
tol: 1e-4,
}
}
}
#[derive(Debug, Clone, PartialEq)]
pub struct ElasticPcrConfig {
pub ncomp: usize,
pub pca_method: PcaMethod,
pub lambda: f64,
pub max_iter: usize,
pub tol: f64,
}
impl Default for ElasticPcrConfig {
fn default() -> Self {
Self {
ncomp: 3,
pca_method: PcaMethod::Vertical,
lambda: 0.0,
max_iter: 20,
tol: 1e-4,
}
}
}
#[derive(Debug, Clone, PartialEq)]
pub struct ScalarOnShapeConfig {
pub nbasis: usize,
pub lambda: f64,
pub lfd_order: usize,
pub index_method: IndexMethod,
pub g_degree: usize,
pub max_iter_outer: usize,
pub max_iter_inner: usize,
pub tol: f64,
pub dp_lambda: f64,
}
impl Default for ScalarOnShapeConfig {
fn default() -> Self {
Self {
nbasis: 11,
lambda: 1e-3,
lfd_order: 2,
index_method: IndexMethod::Identity,
g_degree: 2,
max_iter_outer: 10,
max_iter_inner: 15,
tol: 1e-4,
dp_lambda: 0.0,
}
}
}
#[derive(Debug, Clone, Copy, PartialEq)]
#[non_exhaustive]
pub enum PcaMethod {
Vertical,
Horizontal,
Joint,
}
#[derive(Debug, Clone, PartialEq)]
#[non_exhaustive]
pub enum IndexMethod {
Identity,
Polynomial(usize),
NadarayaWatson(f64),
}
pub(super) fn apply_warps_to_srsfs(
q_all: &FdMatrix,
gammas: &FdMatrix,
argvals: &[f64],
) -> FdMatrix {
let (n, m) = q_all.shape();
let h = (argvals[m - 1] - argvals[0]) / (m - 1) as f64;
let mut q_aligned = FdMatrix::zeros(n, m);
for i in 0..n {
let qi: Vec<f64> = (0..m).map(|j| q_all[(i, j)]).collect();
let gam: Vec<f64> = (0..m).map(|j| gammas[(i, j)]).collect();
let q_warped = reparameterize_curve(&qi, argvals, &gam);
let gam_deriv = crate::helpers::gradient_uniform(&gam, h);
for j in 0..m {
q_aligned[(i, j)] = q_warped[j] * gam_deriv[j].max(0.0).sqrt();
}
}
q_aligned
}
pub(super) fn init_identity_warps(n: usize, argvals: &[f64]) -> FdMatrix {
let m = argvals.len();
let mut gammas = FdMatrix::zeros(n, m);
for i in 0..n {
for j in 0..m {
gammas[(i, j)] = argvals[j];
}
}
gammas
}
pub(super) fn srsf_fitted_values(
q_aligned: &FdMatrix,
beta: &[f64],
weights: &[f64],
alpha: f64,
) -> Vec<f64> {
let (n, m) = q_aligned.shape();
let mut fitted = vec![0.0; n];
for i in 0..n {
fitted[i] = alpha;
for j in 0..m {
fitted[i] += q_aligned[(i, j)] * beta[j] * weights[j];
}
}
fitted
}
pub(super) fn beta_converged(beta_new: &[f64], beta_old: &[f64], tol: f64) -> bool {
let diff: f64 = beta_new
.iter()
.zip(beta_old.iter())
.map(|(&a, &b)| (a - b).powi(2))
.sum::<f64>()
.sqrt();
let norm: f64 = beta_old
.iter()
.map(|&b| b * b)
.sum::<f64>()
.sqrt()
.max(1e-10);
diff / norm < tol
}