Skip to main content

ferrolearn_decomp/
lib.rs

1//! # ferrolearn-decomp
2//!
3//! Dimensionality reduction and matrix decomposition for the ferrolearn
4//! machine learning framework.
5//!
6//! This crate provides PCA, TruncatedSVD, NMF, Kernel PCA, and manifold
7//! learning methods that follow the ferrolearn `Fit`/`Transform` trait
8//! pattern.
9//!
10//! ## Algorithms
11//!
12//! - [`PCA`] — Principal Component Analysis. Centres data and projects onto
13//!   the directions of maximum variance.
14//! - [`TruncatedSVD`] — Truncated Singular Value Decomposition using the
15//!   randomized algorithm. Does **not** centre data, making it suitable for
16//!   sparse inputs.
17//! - [`NMF`] — Non-negative Matrix Factorization. Decomposes a non-negative
18//!   matrix `X` into `W * H` where both factors are non-negative.
19//! - [`KernelPCA`] — Kernel PCA. Non-linear dimensionality reduction via
20//!   a kernel-induced feature space.
21//! - [`MDS`] — Classical Multidimensional Scaling. Embeds data preserving
22//!   pairwise distances.
23//! - [`Isomap`] — Isometric Mapping. Non-linear dimensionality reduction
24//!   via geodesic distances on a kNN graph.
25//! - [`SpectralEmbedding`] — Laplacian Eigenmaps. Non-linear dimensionality
26//!   reduction via the normalised graph Laplacian.
27//! - [`LLE`] — Locally Linear Embedding. Non-linear dimensionality reduction
28//!   preserving local reconstruction weights.
29//! - [`Tsne`] — t-distributed Stochastic Neighbor Embedding. Non-linear
30//!   dimensionality reduction using Barnes-Hut approximation.
31//! - [`Umap`] — Uniform Manifold Approximation and Projection. Fast non-linear
32//!   dimensionality reduction based on topological data analysis.
33//! - [`LatentDirichletAllocation`] — Latent Dirichlet Allocation topic model.
34//!   Discovers latent topics in document-term matrices.
35//! - [`DictionaryLearning`] — Sparse coding with a learned dictionary.
36//!
37//! ## Pipeline Integration
38//!
39//! `PCA<f64>`, `TruncatedSVD<f64>`, `NMF<f64>`, and `KernelPCA<f64>` all
40//! implement
41//! [`PipelineTransformer`](ferrolearn_core::pipeline::PipelineTransformer)
42//! so they can be used as transformer steps in a
43//! [`Pipeline`](ferrolearn_core::pipeline::Pipeline).
44//!
45//! # Examples
46//!
47//! ```
48//! use ferrolearn_decomp::PCA;
49//! use ferrolearn_core::traits::{Fit, Transform};
50//! use ndarray::array;
51//!
52//! let pca = PCA::<f64>::new(1);
53//! let x = array![[1.0, 2.0], [3.0, 4.0], [5.0, 6.0]];
54//! let fitted = pca.fit(&x, &()).unwrap();
55//! let projected = fitted.transform(&x).unwrap();
56//! assert_eq!(projected.ncols(), 1);
57//! ```
58
59pub mod covariance;
60pub mod cross_decomposition;
61pub mod dictionary_learning;
62pub mod factor_analysis;
63pub mod fast_ica;
64pub mod incremental_pca;
65pub mod isomap;
66pub mod kernel_pca;
67pub mod lda_topic;
68pub mod lle;
69pub mod mds;
70pub mod nmf;
71pub mod pca;
72pub mod spectral_embedding;
73pub mod truncated_svd;
74pub mod tsne;
75pub mod umap;
76
77// Re-exports
78pub use covariance::{
79    EllipticEnvelope, EmpiricalCovariance, FittedCovariance, FittedEllipticEnvelope,
80    FittedLedoitWolf, FittedMinCovDet, FittedOAS, LedoitWolf, MinCovDet, ShrunkCovariance, OAS,
81};
82pub use cross_decomposition::{
83    CCA, FittedCCA, FittedPLSCanonical, FittedPLSRegression, FittedPLSSVD, PLSCanonical,
84    PLSRegression, PLSSVD,
85};
86pub use dictionary_learning::{
87    DictFitAlgorithm, DictTransformAlgorithm, DictionaryLearning, FittedDictionaryLearning,
88};
89pub use factor_analysis::{FactorAnalysis, FittedFactorAnalysis};
90pub use fast_ica::{Algorithm, FastICA, FittedFastICA, NonLinearity};
91pub use incremental_pca::{FittedIncrementalPCA, IncrementalPCA};
92pub use isomap::{FittedIsomap, Isomap};
93pub use kernel_pca::{FittedKernelPCA, Kernel, KernelPCA};
94pub use lda_topic::{
95    FittedLatentDirichletAllocation, LatentDirichletAllocation, LdaLearningMethod,
96};
97pub use lle::{FittedLLE, LLE};
98pub use mds::{Dissimilarity, FittedMDS, MDS};
99pub use nmf::{FittedNMF, NMF, NMFInit, NMFSolver};
100pub use pca::{FittedPCA, PCA};
101pub use spectral_embedding::{Affinity, FittedSpectralEmbedding, SpectralEmbedding};
102pub use truncated_svd::{FittedTruncatedSVD, TruncatedSVD};
103pub use tsne::{FittedTsne, Tsne};
104pub use umap::{FittedUmap, Umap, UmapMetric};