pub mod accuracy;
pub mod auc;
pub mod cluster_hcv;
pub(crate) mod cluster_helpers;
pub mod distance;
pub mod f1;
pub mod mean_absolute_error;
pub mod mean_squared_error;
pub mod precision;
pub mod r2;
pub mod recall;
use crate::linalg::basic::arrays::{Array1, ArrayView1};
use crate::numbers::basenum::Number;
use crate::numbers::floatnum::FloatNumber;
use crate::numbers::realnum::RealNumber;
use std::marker::PhantomData;
pub trait Metrics<T> {
fn new() -> Self
where
Self: Sized;
fn new_with(_parameter: f64) -> Self
where
Self: Sized;
fn get_score(&self, y_true: &dyn ArrayView1<T>, y_pred: &dyn ArrayView1<T>) -> f64;
}
pub struct ClassificationMetrics<T> {
phantom: PhantomData<T>,
}
pub struct ClassificationMetricsOrd<T> {
phantom: PhantomData<T>,
}
pub struct RegressionMetrics<T> {
phantom: PhantomData<T>,
}
pub struct ClusterMetrics<T> {
phantom: PhantomData<T>,
}
impl<T: Number + RealNumber + FloatNumber> ClassificationMetrics<T> {
pub fn recall() -> recall::Recall<T> {
recall::Recall::new()
}
pub fn precision() -> precision::Precision<T> {
precision::Precision::new()
}
pub fn f1(beta: f64) -> f1::F1<T> {
f1::F1::new_with(beta)
}
pub fn roc_auc_score() -> auc::AUC<T> {
auc::AUC::<T>::new()
}
}
impl<T: Number + Ord> ClassificationMetricsOrd<T> {
pub fn accuracy() -> accuracy::Accuracy<T> {
accuracy::Accuracy::new()
}
}
impl<T: Number + FloatNumber> RegressionMetrics<T> {
pub fn mean_squared_error() -> mean_squared_error::MeanSquareError<T> {
mean_squared_error::MeanSquareError::new()
}
pub fn mean_absolute_error() -> mean_absolute_error::MeanAbsoluteError<T> {
mean_absolute_error::MeanAbsoluteError::new()
}
pub fn r2() -> r2::R2<T> {
r2::R2::<T>::new()
}
}
impl<T: Number + Ord> ClusterMetrics<T> {
pub fn hcv_score() -> cluster_hcv::HCVScore<T> {
cluster_hcv::HCVScore::<T>::new()
}
}
pub fn accuracy<T: Number + Ord, V: ArrayView1<T>>(y_true: &V, y_pred: &V) -> f64 {
let obj = ClassificationMetricsOrd::<T>::accuracy();
obj.get_score(y_true, y_pred)
}
pub fn recall<T: Number + RealNumber + FloatNumber, V: ArrayView1<T>>(
y_true: &V,
y_pred: &V,
) -> f64 {
let obj = ClassificationMetrics::<T>::recall();
obj.get_score(y_true, y_pred)
}
pub fn precision<T: Number + RealNumber + FloatNumber, V: ArrayView1<T>>(
y_true: &V,
y_pred: &V,
) -> f64 {
let obj = ClassificationMetrics::<T>::precision();
obj.get_score(y_true, y_pred)
}
pub fn f1<T: Number + RealNumber + FloatNumber, V: ArrayView1<T>>(
y_true: &V,
y_pred: &V,
beta: f64,
) -> f64 {
let obj = ClassificationMetrics::<T>::f1(beta);
obj.get_score(y_true, y_pred)
}
pub fn roc_auc_score<
T: Number + RealNumber + FloatNumber + PartialOrd,
V: ArrayView1<T> + Array1<T> + Array1<T>,
>(
y_true: &V,
y_pred_probabilities: &V,
) -> f64 {
let obj = ClassificationMetrics::<T>::roc_auc_score();
obj.get_score(y_true, y_pred_probabilities)
}
pub fn mean_squared_error<T: Number + FloatNumber, V: ArrayView1<T>>(
y_true: &V,
y_pred: &V,
) -> f64 {
RegressionMetrics::<T>::mean_squared_error().get_score(y_true, y_pred)
}
pub fn mean_absolute_error<T: Number + FloatNumber, V: ArrayView1<T>>(
y_true: &V,
y_pred: &V,
) -> f64 {
RegressionMetrics::<T>::mean_absolute_error().get_score(y_true, y_pred)
}
pub fn r2<T: Number + FloatNumber, V: ArrayView1<T>>(y_true: &V, y_pred: &V) -> f64 {
RegressionMetrics::<T>::r2().get_score(y_true, y_pred)
}
pub fn homogeneity_score<
T: Number + FloatNumber + RealNumber + Ord,
V: ArrayView1<T> + Array1<T>,
>(
y_true: &V,
y_pred: &V,
) -> f64 {
let mut obj = ClusterMetrics::<T>::hcv_score();
obj.compute(y_true, y_pred);
obj.homogeneity().unwrap()
}
pub fn completeness_score<
T: Number + FloatNumber + RealNumber + Ord,
V: ArrayView1<T> + Array1<T>,
>(
y_true: &V,
y_pred: &V,
) -> f64 {
let mut obj = ClusterMetrics::<T>::hcv_score();
obj.compute(y_true, y_pred);
obj.completeness().unwrap()
}
pub fn v_measure_score<T: Number + FloatNumber + RealNumber + Ord, V: ArrayView1<T> + Array1<T>>(
y_true: &V,
y_pred: &V,
) -> f64 {
let mut obj = ClusterMetrics::<T>::hcv_score();
obj.compute(y_true, y_pred);
obj.v_measure().unwrap()
}